Project import
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..cd7efad
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,101 @@
+#
+#    Copyright (c) 2016 Nest, Inc.
+#    All rights reserved.
+#
+#    This document is the property of Nest. It is considered
+#    confidential and proprietary information.
+#
+#    This document may not be reproduced or transmitted in any form,
+#    in whole or in part, without the express written permission of
+#    Nest.
+#
+#    Description:
+#      This file is the makefile for nl-unit-test.
+#
+
+include pre.mak
+
+PackageRoot               := .
+
+PackageName               := nl-unit-test
+
+PackageSeparator          :=
+
+PackageSourceDir          := $(PackageRoot)/$(PackageName)
+
+PackageBuildMakefile       = $(call GenerateBuildPaths,Makefile)
+
+SOURCEDIRS                      = $(PackageSourceDir)
+$(PackageSourceDir)_RULE_TARGET = $(BuildDirectory)/configure
+
+SOURCEDIRS           = nl-unit-test
+$(PackageName)_RULE_TARGET = $(PackageBuildMakefile)
+
+
+#ResultDirectory := $(ResultDirectory)/nl-unit-test
+
+all: $(PackageDefaultGoal)
+
+# Generate the package license contents.
+
+$(PackageSourceDir)/COPYING: $(BuildDirectory)/source
+
+$(PackageLicenseFile): $(PackageSourceDir)/COPYING
+	$(copy-result)
+
+
+# Prepare the sources.
+
+$(BuildDirectory)/source: | $(PackageSourceDir)
+	$(Verbose)touch $(BuildDirectory)/source
+
+# Patch the sources, if necessary.
+
+$(BuildDirectory)/patch: $(BuildDirectory)/source
+	$(Verbose)touch $(BuildDirectory)/patch
+
+# Generate the package build makefile.
+
+$(PackageBuildMakefile): | $(PackageSourceDir) $(BuildDirectory) $(ResultDirectory)
+	$(Verbose)cd $(BuildDirectory) && \
+	$(LOGCURDIR)/$(PackageSourceDir)/configure \
+	CC="$(CC) $(CPPOPTFLAGS)" CXX="$(CXX) $(CPPOPTFLAGS)" \
+	CPPFLAGS="$(call ToolGenerateIncludeArgument,$(LinuxIncludePath))" \
+	--build=$(HostTuple) 	\
+	--host=$(TargetTuple) 	\
+	--prefix=''				
+
+# Configure the source for building.
+
+$(BuildDirectory)/configure: $(BuildDirectory)/source $(PackageBuildMakefile)
+	$(Verbose)touch $(BuildDirectory)/configure
+
+# Build the source.
+#
+# We have to unset MAKEFLAGS since they confuse the package build otherwise.
+
+$(BuildDirectory)/build: $(BuildDirectory)/configure
+	$(Verbose)unset MAKEFLAGS && \
+	$(MAKE) -C $(BuildDirectory) \
+	all
+	$(Verbose)touch $(BuildDirectory)/build
+
+# Stage the build to a temporary installation area.
+#
+# We have to unset MAKEFLAGS since they confuse the package build otherwise.
+
+$(BuildDirectory)/stage: $(BuildDirectory)/build | $(ResultDirectory)
+	$(Verbose)unset MAKEFLAGS && \
+	$(MAKE) -C $(BuildDirectory) \
+	DESTDIR=$(ResultDirectory) \
+	install
+	$(Verbose)touch $(BuildDirectory)/stage
+
+.PHONY: stage
+stage: $(BuildDirectory)/stage
+
+clean:
+	$(Verbose)$(RM) $(RMFLAGS) -r $(BuildDirectory)
+	$(Verbose)$(RM) $(RMFLAGS) -r $(ResultDirectory)
+
+include post.mak
\ No newline at end of file
diff --git a/nl-unit-test/.default-version b/nl-unit-test/.default-version
new file mode 100644
index 0000000..d3827e7
--- /dev/null
+++ b/nl-unit-test/.default-version
@@ -0,0 +1 @@
+1.0
diff --git a/nl-unit-test/CONTRIBUTING.md b/nl-unit-test/CONTRIBUTING.md
new file mode 100644
index 0000000..2827b7d
--- /dev/null
+++ b/nl-unit-test/CONTRIBUTING.md
@@ -0,0 +1,27 @@
+Want to contribute? Great! First, read this page (including the small print at the end).
+
+### Before you contribute
+Before we can use your code, you must sign the
+[Google Individual Contributor License Agreement]
+(https://cla.developers.google.com/about/google-individual)
+(CLA), which you can do online. The CLA is necessary mainly because you own the
+copyright to your changes, even after your contribution becomes part of our
+codebase, so we need your permission to use and distribute your code. We also
+need to be sure of various other things—for instance that you'll tell us if you
+know that your code infringes on other people's patents. You don't have to sign
+the CLA until after you've submitted your code for review and a member has
+approved it, but you must do it before we can put your code into our codebase.
+Before you start working on a larger contribution, you should get in touch with
+us first through the issue tracker with your idea so that we can help out and
+possibly guide you. Coordinating up front makes it much easier to avoid
+frustration later on.
+
+### Code reviews
+All submissions, including submissions by project members, require review. We
+use Github pull requests for this purpose.
+
+### The small print
+Contributions made by corporations are covered by a different agreement than
+the one above, the
+[Software Grant and Corporate Contributor License Agreement]
+(https://cla.developers.google.com/about/google-corporate).
diff --git a/nl-unit-test/LICENSE b/nl-unit-test/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/nl-unit-test/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/nl-unit-test/Makefile.am b/nl-unit-test/Makefile.am
new file mode 100644
index 0000000..9d954e0
--- /dev/null
+++ b/nl-unit-test/Makefile.am
@@ -0,0 +1,155 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest memory-
+#      mapped I/O library.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+AM_MAKEFLAGS                        = --no-print-directory
+
+SUBDIRS                             = \
+    src                               \
+    doc                               \
+    $(NULL)
+
+EXTRA_DIST                          = \
+    CONTRIBUTING.md                   \
+    LICENSE                           \
+    README.md                         \
+    .default-version                  \
+    bootstrap                         \
+    bootstrap-configure               \
+    $(srcdir)/third_party             \
+    $(NULL)
+
+BUILT_SOURCES                       = \
+    .local-version                    \
+    $(NULL)
+
+dist_doc_DATA                       = \
+    $(NULL)
+
+DISTCLEANFILES                      = \
+    .local-version                    \
+    $(NULL)
+
+#
+# Package version files:
+#
+# .default-version - The default package version. This file is ALWAYS checked
+#                    in and should always represent the current baseline
+#                    version of the package.
+#
+# .dist-version    - The distributed package version. This file is NEVER
+#                    checked in within the upstream repository, is auto-
+#                    generated, and is only found in the package distribution.
+#
+# .local-version   - The current source code controlled package version. This
+#                    file is NEVER checked in within the upstream repository,
+#                    is auto-generated, and can always be found in both the
+#                    build tree and distribution.
+#
+# When present, the .local-version file is preferred first, the
+# .dist-version second, and the .default-version last.
+#
+
+VERSION_FILE                      := $(if $(wildcard $(builddir)/.local-version),$(builddir)/.local-version,$(if $(wildcard $(srcdir)/.dist-version),$(srcdir)/.dist-version,$(srcdir)/.default-version))
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a source distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+
+NLUNITTEST_VERSION                ?= $(shell cat $(VERSION_FILE) 2> /dev/null)
+
+PACKAGE_VERSION                    = $(NLUNITTEST_VERSION)
+VERSION                            = $(PACKAGE_VERSION)
+
+#
+# check-file-.local-version
+#
+# Speculatively regenerate .local-version and check to see if it needs
+# to be updated.
+#
+# If NLUNITTEST_VERSION has been supplied anywhere other than in this file
+# (which is implicitly the contents of .local-version), then use that;
+# otherwise, attempt to generate it from the SCM system.
+#
+# This is called from $(call check-file,.local-version).
+#
+define check-file-.local-version
+if [ "$(origin NLUNITTEST_VERSION)" != "file" ]; then  \
+    echo "$(NLUNITTEST_VERSION)" > "$(2)";             \
+else                                                   \
+    $(abs_top_nlbuild_autotools_dir)/scripts/mkversion \
+        -b "$(NLUNITTEST_VERSION)" "$(top_srcdir)"     \
+        > "$(2)";                                      \
+fi
+endef
+
+#
+# check-file-.dist-version
+#
+# Speculatively regenerate .dist-version and check to see if it needs
+# to be updated.
+#
+# This is called from $(call check-file,.dist-version).
+#
+define check-file-.dist-version
+cat "$(1)" > "$(2)"
+endef
+
+#
+# A convenience target to allow package users to easily rerun the
+# package configuration according to the current configuration.
+#
+.PHONY: reconfigure
+reconfigure: $(builddir)/config.status
+	$(AM_V_at)$(<) --recheck
+
+#
+# Version file regeneration rules.
+#
+.PHONY: force
+
+$(builddir)/.local-version: $(srcdir)/.default-version force
+
+$(distdir)/.dist-version: $(builddir)/.local-version force
+
+$(distdir)/.dist-version $(builddir)/.local-version:
+	$(call check-file,$(@F))
+
+dist distcheck: $(BUILT_SOURCES)
+
+dist-hook: $(distdir)/.dist-version
+
+#
+# Top-level convenience target for making a documentation-only
+# distribution whose results appear at the top level of the build tree
+# in the same fashion that the distribution would be for 'make dist'.
+#
+
+.PHONY: docdist
+docdist: $(BUILT_SOURCES)
+	$(MAKE) -C doc docdistdir=$(abs_builddir) $(@)
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/Makefile.in b/nl-unit-test/Makefile.in
new file mode 100644
index 0000000..f9da144
--- /dev/null
+++ b/nl-unit-test/Makefile.in
@@ -0,0 +1,1025 @@
+# Makefile.in generated by automake 1.14.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest memory-
+#      mapped I/O library.
+#
+
+VPATH = @srcdir@
+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \	]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs	]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+target_triplet = @target@
+subdir = .
+DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \
+	$(top_srcdir)/configure $(am__configure_deps) \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs \
+	$(dist_doc_DATA) \
+	third_party/nlbuild-autotools/repo/autoconf/ar-lib \
+	third_party/nlbuild-autotools/repo/autoconf/compile \
+	third_party/nlbuild-autotools/repo/autoconf/config.guess \
+	third_party/nlbuild-autotools/repo/autoconf/config.sub \
+	third_party/nlbuild-autotools/repo/autoconf/depcomp \
+	third_party/nlbuild-autotools/repo/autoconf/install-sh \
+	third_party/nlbuild-autotools/repo/autoconf/missing \
+	third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs \
+	third_party/nlbuild-autotools/repo/autoconf/py-compile \
+	third_party/nlbuild-autotools/repo/autoconf/ltmain.sh \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/compile \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/config.guess \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/config.sub \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/install-sh \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/ltmain.sh \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/missing
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno config.status.lineno
+mkinstalldirs = $(SHELL) \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
+CONFIG_HEADER = $(top_builddir)/src/nlunittest-config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+SOURCES =
+DIST_SOURCES =
+RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
+	ctags-recursive dvi-recursive html-recursive info-recursive \
+	install-data-recursive install-dvi-recursive \
+	install-exec-recursive install-html-recursive \
+	install-info-recursive install-pdf-recursive \
+	install-ps-recursive install-recursive installcheck-recursive \
+	installdirs-recursive pdf-recursive ps-recursive \
+	tags-recursive uninstall-recursive
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+  test -z "$$files" \
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+am__installdirs = "$(DESTDIR)$(docdir)"
+DATA = $(dist_doc_DATA)
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
+  distclean-recursive maintainer-clean-recursive
+am__recursive_targets = \
+  $(RECURSIVE_TARGETS) \
+  $(RECURSIVE_CLEAN_TARGETS) \
+  $(am__extra_recursive_targets)
+AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \
+	cscope distdir dist dist-all distcheck
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates.  Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+  BEGIN { nonempty = 0; } \
+  { items[$$0] = 1; nonempty = 1; } \
+  END { if (nonempty) { for (i in items) print i; }; } \
+'
+# Make sure the list of sources is unique.  This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+  list='$(am__tagged_files)'; \
+  unique=`for i in $$list; do \
+    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+  done | $(am__uniquify_input)`
+ETAGS = etags
+CTAGS = ctags
+CSCOPE = cscope
+DIST_SUBDIRS = $(SUBDIRS)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+am__remove_distdir = \
+  if test -d "$(distdir)"; then \
+    find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
+      && rm -rf "$(distdir)" \
+      || { sleep 5 && rm -rf "$(distdir)"; }; \
+  else :; fi
+am__post_remove_distdir = $(am__remove_distdir)
+am__relativize = \
+  dir0=`pwd`; \
+  sed_first='s,^\([^/]*\)/.*$$,\1,'; \
+  sed_rest='s,^[^/]*/*,,'; \
+  sed_last='s,^.*/\([^/]*\)$$,\1,'; \
+  sed_butlast='s,/*[^/]*$$,,'; \
+  while test -n "$$dir1"; do \
+    first=`echo "$$dir1" | sed -e "$$sed_first"`; \
+    if test "$$first" != "."; then \
+      if test "$$first" = ".."; then \
+        dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
+        dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
+      else \
+        first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
+        if test "$$first2" = "$$first"; then \
+          dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
+        else \
+          dir2="../$$dir2"; \
+        fi; \
+        dir0="$$dir0"/"$$first"; \
+      fi; \
+    fi; \
+    dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
+  done; \
+  reldir="$$dir2"
+DIST_ARCHIVES = $(distdir).tar.gz
+GZIP_ENV = --best
+DIST_TARGETS = dist-gzip
+distuninstallcheck_listfiles = find . -type f -print
+am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \
+  | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$'
+distcleancheck_listfiles = find . -type f -print
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CMP = @CMP@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOT = @DOT@
+DOXYGEN = @DOXYGEN@
+DOXYGEN_USE_DOT = @DOXYGEN_USE_DOT@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBNLUNITTEST_VERSION_AGE = @LIBNLUNITTEST_VERSION_AGE@
+LIBNLUNITTEST_VERSION_CURRENT = @LIBNLUNITTEST_VERSION_CURRENT@
+LIBNLUNITTEST_VERSION_INFO = @LIBNLUNITTEST_VERSION_INFO@
+LIBNLUNITTEST_VERSION_REVISION = @LIBNLUNITTEST_VERSION_REVISION@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAINT = @MAINT@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJCOPY = @OBJCOPY@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = $(NLUNITTEST_VERSION)
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PERL = @PERL@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = $(PACKAGE_VERSION)
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_nlbuild_autotools_dir = @abs_top_nlbuild_autotools_dir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+nlbuild_autotools_stem = @nlbuild_autotools_stem@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target = @target@
+target_alias = @target_alias@
+target_cpu = @target_cpu@
+target_os = @target_os@
+target_vendor = @target_vendor@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+AM_MAKEFLAGS = --no-print-directory
+SUBDIRS = \
+    src                               \
+    doc                               \
+    $(NULL)
+
+EXTRA_DIST = \
+    CONTRIBUTING.md                   \
+    LICENSE                           \
+    README.md                         \
+    .default-version                  \
+    bootstrap                         \
+    bootstrap-configure               \
+    $(srcdir)/third_party             \
+    $(NULL)
+
+BUILT_SOURCES = \
+    .local-version                    \
+    $(NULL)
+
+dist_doc_DATA = \
+    $(NULL)
+
+DISTCLEANFILES = \
+    .local-version                    \
+    $(NULL)
+
+
+#
+# Package version files:
+#
+# .default-version - The default package version. This file is ALWAYS checked
+#                    in and should always represent the current baseline
+#                    version of the package.
+#
+# .dist-version    - The distributed package version. This file is NEVER
+#                    checked in within the upstream repository, is auto-
+#                    generated, and is only found in the package distribution.
+#
+# .local-version   - The current source code controlled package version. This
+#                    file is NEVER checked in within the upstream repository,
+#                    is auto-generated, and can always be found in both the
+#                    build tree and distribution.
+#
+# When present, the .local-version file is preferred first, the
+# .dist-version second, and the .default-version last.
+#
+VERSION_FILE := $(if $(wildcard $(builddir)/.local-version),$(builddir)/.local-version,$(if $(wildcard $(srcdir)/.dist-version),$(srcdir)/.dist-version,$(srcdir)/.default-version))
+all: $(BUILT_SOURCES)
+	$(MAKE) $(AM_MAKEFLAGS) all-recursive
+
+.SUFFIXES:
+am--refresh: Makefile
+	@:
+$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
+	      $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
+	$(am__cd) $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    echo ' $(SHELL) ./config.status'; \
+	    $(SHELL) ./config.status;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	$(SHELL) ./config.status --recheck
+
+$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+	$(am__cd) $(srcdir) && $(AUTOCONF)
+$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
+	$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+$(am__aclocal_m4_deps):
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool config.lt
+install-dist_docDATA: $(dist_doc_DATA)
+	@$(NORMAL_INSTALL)
+	@list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(docdir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(docdir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(docdir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(docdir)" || exit $$?; \
+	done
+
+uninstall-dist_docDATA:
+	@$(NORMAL_UNINSTALL)
+	@list='$(dist_doc_DATA)'; test -n "$(docdir)" || list=; \
+	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+	dir='$(DESTDIR)$(docdir)'; $(am__uninstall_files_from_dir)
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run 'make' without going through this Makefile.
+# To change the values of 'make' variables: instead of editing Makefiles,
+# (1) if the variable is set in 'config.status', edit 'config.status'
+#     (which will cause the Makefiles to be regenerated when you run 'make');
+# (2) otherwise, pass the desired values on the 'make' command line.
+$(am__recursive_targets):
+	@fail=; \
+	if $(am__make_keepgoing); then \
+	  failcom='fail=yes'; \
+	else \
+	  failcom='exit 1'; \
+	fi; \
+	dot_seen=no; \
+	target=`echo $@ | sed s/-recursive//`; \
+	case "$@" in \
+	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+	  *) list='$(SUBDIRS)' ;; \
+	esac; \
+	for subdir in $$list; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    dot_seen=yes; \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done; \
+	if test "$$dot_seen" = "no"; then \
+	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+	fi; test -z "$$fail"
+
+ID: $(am__tagged_files)
+	$(am__define_uniq_tagged_files); mkid -fID $$unique
+tags: tags-recursive
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+	set x; \
+	here=`pwd`; \
+	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+	  include_option=--etags-include; \
+	  empty_fix=.; \
+	else \
+	  include_option=--include; \
+	  empty_fix=; \
+	fi; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test ! -f $$subdir/TAGS || \
+	      set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
+	  fi; \
+	done; \
+	$(am__define_uniq_tagged_files); \
+	shift; \
+	if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  if test $$# -gt 0; then \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      "$$@" $$unique; \
+	  else \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      $$unique; \
+	  fi; \
+	fi
+ctags: ctags-recursive
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+	$(am__define_uniq_tagged_files); \
+	test -z "$(CTAGS_ARGS)$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && $(am__cd) $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) "$$here"
+cscope: cscope.files
+	test ! -s cscope.files \
+	  || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS)
+clean-cscope:
+	-rm -f cscope.files
+cscope.files: clean-cscope cscopelist
+cscopelist: cscopelist-recursive
+
+cscopelist-am: $(am__tagged_files)
+	list='$(am__tagged_files)'; \
+	case "$(srcdir)" in \
+	  [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+	  *) sdir=$(subdir)/$(srcdir) ;; \
+	esac; \
+	for i in $$list; do \
+	  if test -f "$$i"; then \
+	    echo "$(subdir)/$$i"; \
+	  else \
+	    echo "$$sdir/$$i"; \
+	  fi; \
+	done >> $(top_builddir)/cscope.files
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+	-rm -f cscope.out cscope.in.out cscope.po.out cscope.files
+
+distdir: $(DISTFILES)
+	$(am__remove_distdir)
+	test -d "$(distdir)" || mkdir "$(distdir)"
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d "$(distdir)/$$file"; then \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+	  else \
+	    test -f "$(distdir)/$$file" \
+	    || cp -p $$d/$$file "$(distdir)/$$file" \
+	    || exit 1; \
+	  fi; \
+	done
+	@list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    $(am__make_dryrun) \
+	      || test -d "$(distdir)/$$subdir" \
+	      || $(MKDIR_P) "$(distdir)/$$subdir" \
+	      || exit 1; \
+	    dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
+	    $(am__relativize); \
+	    new_distdir=$$reldir; \
+	    dir1=$$subdir; dir2="$(top_distdir)"; \
+	    $(am__relativize); \
+	    new_top_distdir=$$reldir; \
+	    echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
+	    echo "     am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
+	    ($(am__cd) $$subdir && \
+	      $(MAKE) $(AM_MAKEFLAGS) \
+	        top_distdir="$$new_top_distdir" \
+	        distdir="$$new_distdir" \
+		am__remove_distdir=: \
+		am__skip_length_check=: \
+		am__skip_mode_fix=: \
+	        distdir) \
+	      || exit 1; \
+	  fi; \
+	done
+	$(MAKE) $(AM_MAKEFLAGS) \
+	  top_distdir="$(top_distdir)" distdir="$(distdir)" \
+	  dist-hook
+	-test -n "$(am__skip_mode_fix)" \
+	|| find "$(distdir)" -type d ! -perm -755 \
+		-exec chmod u+rwx,go+rx {} \; -o \
+	  ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
+	|| chmod -R a+r "$(distdir)"
+dist-gzip: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__post_remove_distdir)
+
+dist-bzip2: distdir
+	tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2
+	$(am__post_remove_distdir)
+
+dist-lzip: distdir
+	tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz
+	$(am__post_remove_distdir)
+
+dist-xz: distdir
+	tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz
+	$(am__post_remove_distdir)
+
+dist-tarZ: distdir
+	@echo WARNING: "Support for shar distribution archives is" \
+	               "deprecated." >&2
+	@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
+	tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+	$(am__post_remove_distdir)
+
+dist-shar: distdir
+	@echo WARNING: "Support for distribution archives compressed with" \
+		       "legacy program 'compress' is deprecated." >&2
+	@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
+	shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+	$(am__post_remove_distdir)
+
+dist-zip: distdir
+	-rm -f $(distdir).zip
+	zip -rq $(distdir).zip $(distdir)
+	$(am__post_remove_distdir)
+
+dist dist-all:
+	$(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:'
+	$(am__post_remove_distdir)
+
+# This target untars the dist file and tries a VPATH configuration.  Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+distcheck: dist
+	case '$(DIST_ARCHIVES)' in \
+	*.tar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
+	*.tar.bz2*) \
+	  bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
+	*.tar.lz*) \
+	  lzip -dc $(distdir).tar.lz | $(am__untar) ;;\
+	*.tar.xz*) \
+	  xz -dc $(distdir).tar.xz | $(am__untar) ;;\
+	*.tar.Z*) \
+	  uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+	*.shar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
+	*.zip*) \
+	  unzip $(distdir).zip ;;\
+	esac
+	chmod -R a-w $(distdir)
+	chmod u+w $(distdir)
+	mkdir $(distdir)/_build $(distdir)/_inst
+	chmod a-w $(distdir)
+	test -d $(distdir)/_build || exit 0; \
+	dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+	  && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+	  && am__cwd=`pwd` \
+	  && $(am__cd) $(distdir)/_build \
+	  && ../configure \
+	    $(AM_DISTCHECK_CONFIGURE_FLAGS) \
+	    $(DISTCHECK_CONFIGURE_FLAGS) \
+	    --srcdir=.. --prefix="$$dc_install_base" \
+	  && $(MAKE) $(AM_MAKEFLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) dvi \
+	  && $(MAKE) $(AM_MAKEFLAGS) check \
+	  && $(MAKE) $(AM_MAKEFLAGS) install \
+	  && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+	  && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+	  && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+	        distuninstallcheck \
+	  && chmod -R a-w "$$dc_install_base" \
+	  && ({ \
+	       (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+	            distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+	      } || { rm -rf "$$dc_destdir"; exit 1; }) \
+	  && rm -rf "$$dc_destdir" \
+	  && $(MAKE) $(AM_MAKEFLAGS) dist \
+	  && rm -rf $(DIST_ARCHIVES) \
+	  && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
+	  && cd "$$am__cwd" \
+	  || exit 1
+	$(am__post_remove_distdir)
+	@(echo "$(distdir) archives ready for distribution: "; \
+	  list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+	  sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
+distuninstallcheck:
+	@test -n '$(distuninstallcheck_dir)' || { \
+	  echo 'ERROR: trying to run $@ with an empty' \
+	       '$$(distuninstallcheck_dir)' >&2; \
+	  exit 1; \
+	}; \
+	$(am__cd) '$(distuninstallcheck_dir)' || { \
+	  echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \
+	  exit 1; \
+	}; \
+	test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \
+	   || { echo "ERROR: files left after uninstall:" ; \
+	        if test -n "$(DESTDIR)"; then \
+	          echo "  (check DESTDIR support)"; \
+	        fi ; \
+	        $(distuninstallcheck_listfiles) ; \
+	        exit 1; } >&2
+distcleancheck: distclean
+	@if test '$(srcdir)' = . ; then \
+	  echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+	  exit 1 ; \
+	fi
+	@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+	  || { echo "ERROR: files left in build directory after distclean:" ; \
+	       $(distcleancheck_listfiles) ; \
+	       exit 1; } >&2
+check-am: all-am
+check: $(BUILT_SOURCES)
+	$(MAKE) $(AM_MAKEFLAGS) check-recursive
+all-am: Makefile $(DATA)
+installdirs: installdirs-recursive
+installdirs-am:
+	for dir in "$(DESTDIR)$(docdir)"; do \
+	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+	done
+install: $(BUILT_SOURCES)
+	$(MAKE) $(AM_MAKEFLAGS) install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-recursive
+install-strip:
+	if test -z '$(STRIP)'; then \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	      install; \
+	else \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	    "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+	fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+	-test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+	-test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES)
+clean: clean-recursive
+
+clean-am: clean-generic clean-libtool mostlyclean-am
+
+distclean: distclean-recursive
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic distclean-libtool \
+	distclean-tags
+
+dvi: dvi-recursive
+
+dvi-am:
+
+html: html-recursive
+
+html-am:
+
+info: info-recursive
+
+info-am:
+
+install-data-am: install-dist_docDATA
+
+install-dvi: install-dvi-recursive
+
+install-dvi-am:
+
+install-exec-am:
+
+install-html: install-html-recursive
+
+install-html-am:
+
+install-info: install-info-recursive
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-recursive
+
+install-pdf-am:
+
+install-ps: install-ps-recursive
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-recursive
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -rf $(top_srcdir)/autom4te.cache
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-recursive
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-recursive
+
+pdf-am:
+
+ps: ps-recursive
+
+ps-am:
+
+uninstall-am: uninstall-dist_docDATA
+
+.MAKE: $(am__recursive_targets) all check install install-am \
+	install-strip
+
+.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \
+	am--refresh check check-am clean clean-cscope clean-generic \
+	clean-libtool cscope cscopelist-am ctags ctags-am dist \
+	dist-all dist-bzip2 dist-gzip dist-hook dist-lzip dist-shar \
+	dist-tarZ dist-xz dist-zip distcheck distclean \
+	distclean-generic distclean-libtool distclean-tags \
+	distcleancheck distdir distuninstallcheck dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dist_docDATA install-dvi \
+	install-dvi-am install-exec install-exec-am install-html \
+	install-html-am install-info install-info-am install-man \
+	install-pdf install-pdf-am install-ps install-ps-am \
+	install-strip installcheck installcheck-am installdirs \
+	installdirs-am maintainer-clean maintainer-clean-generic \
+	mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
+	ps ps-am tags tags-am uninstall uninstall-am \
+	uninstall-dist_docDATA
+
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a source distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+
+NLUNITTEST_VERSION                ?= $(shell cat $(VERSION_FILE) 2> /dev/null)
+
+#
+# check-file-.local-version
+#
+# Speculatively regenerate .local-version and check to see if it needs
+# to be updated.
+#
+# If NLUNITTEST_VERSION has been supplied anywhere other than in this file
+# (which is implicitly the contents of .local-version), then use that;
+# otherwise, attempt to generate it from the SCM system.
+#
+# This is called from $(call check-file,.local-version).
+#
+define check-file-.local-version
+if [ "$(origin NLUNITTEST_VERSION)" != "file" ]; then  \
+    echo "$(NLUNITTEST_VERSION)" > "$(2)";             \
+else                                                   \
+    $(abs_top_nlbuild_autotools_dir)/scripts/mkversion \
+        -b "$(NLUNITTEST_VERSION)" "$(top_srcdir)"     \
+        > "$(2)";                                      \
+fi
+endef
+
+#
+# check-file-.dist-version
+#
+# Speculatively regenerate .dist-version and check to see if it needs
+# to be updated.
+#
+# This is called from $(call check-file,.dist-version).
+#
+define check-file-.dist-version
+cat "$(1)" > "$(2)"
+endef
+
+#
+# A convenience target to allow package users to easily rerun the
+# package configuration according to the current configuration.
+#
+.PHONY: reconfigure
+reconfigure: $(builddir)/config.status
+	$(AM_V_at)$(<) --recheck
+
+#
+# Version file regeneration rules.
+#
+.PHONY: force
+
+$(builddir)/.local-version: $(srcdir)/.default-version force
+
+$(distdir)/.dist-version: $(builddir)/.local-version force
+
+$(distdir)/.dist-version $(builddir)/.local-version:
+	$(call check-file,$(@F))
+
+dist distcheck: $(BUILT_SOURCES)
+
+dist-hook: $(distdir)/.dist-version
+
+#
+# Top-level convenience target for making a documentation-only
+# distribution whose results appear at the top level of the build tree
+# in the same fashion that the distribution would be for 'make dist'.
+#
+
+.PHONY: docdist
+docdist: $(BUILT_SOURCES)
+	$(MAKE) -C doc docdistdir=$(abs_builddir) $(@)
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/nl-unit-test/README.md b/nl-unit-test/README.md
new file mode 100644
index 0000000..82d354b
--- /dev/null
+++ b/nl-unit-test/README.md
@@ -0,0 +1,25 @@
+Nest Labs Unit Test
+===================
+
+Nest Labs Unit Test (nlunittest) is designed to provide a
+simple, portable unit test suite framework.
+
+It should be capable of running on just about any system, no
+matter how constrained and depends only on the C Standard Library.
+
+It is recognized and acknowledged that not every piece of code or
+subsystem, library, thread or process can be easily unit tested.
+However, unit tests are a Good Thing&tm; and will give you the
+confidence to make changes and prove to both yourself and other
+colleagues that you have things **right**, in so far as your
+unit tests cover **right**.
+
+Unit tests will be combined with other methods of verification to
+fully cover the various test requirements in the system. The
+purpose of the unit test is to isolate your module's methods and
+ensure that they have the proper output, affect state properly,
+and handle errors appropriately. This ensures each of the building
+blocks of the system functions correctly. These tests can also be
+used for regression testing so that any code changes that affect a
+module's output will be automatically flagged.
+
diff --git a/nl-unit-test/aclocal.m4 b/nl-unit-test/aclocal.m4
new file mode 100644
index 0000000..389df86
--- /dev/null
+++ b/nl-unit-test/aclocal.m4
@@ -0,0 +1,10375 @@
+# generated automatically by aclocal 1.14.1 -*- Autoconf -*-
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])])
+m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.68],,
+[m4_warning([this file was generated for autoconf 2.68.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+# Copyright (C) 2002-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_AUTOMAKE_VERSION(VERSION)
+# ----------------------------
+# Automake X.Y traces this macro to ensure aclocal.m4 has been
+# generated from the m4 files accompanying Automake X.Y.
+# (This private macro should not be called outside this file.)
+AC_DEFUN([AM_AUTOMAKE_VERSION],
+[am__api_version='1.14'
+dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+dnl require some minimum version.  Point them to the right macro.
+m4_if([$1], [1.14.1], [],
+      [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+])
+
+# _AM_AUTOCONF_VERSION(VERSION)
+# -----------------------------
+# aclocal traces this macro to find the Autoconf version.
+# This is a private macro too.  Using m4_define simplifies
+# the logic in aclocal, which can simply ignore this definition.
+m4_define([_AM_AUTOCONF_VERSION], [])
+
+# AM_SET_CURRENT_AUTOMAKE_VERSION
+# -------------------------------
+# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+# This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+[AM_AUTOMAKE_VERSION([1.14.1])dnl
+m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
+
+# AM_AUX_DIR_EXPAND                                         -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
+# $ac_aux_dir to '$srcdir/foo'.  In other projects, it is set to
+# '$srcdir', '$srcdir/..', or '$srcdir/../..'.
+#
+# Of course, Automake must honor this variable whenever it calls a
+# tool from the auxiliary directory.  The problem is that $srcdir (and
+# therefore $ac_aux_dir as well) can be either absolute or relative,
+# depending on how configure is run.  This is pretty annoying, since
+# it makes $ac_aux_dir quite unusable in subdirectories: in the top
+# source directory, any form will work fine, but in subdirectories a
+# relative path needs to be adjusted first.
+#
+# $ac_aux_dir/missing
+#    fails when called from a subdirectory if $ac_aux_dir is relative
+# $top_srcdir/$ac_aux_dir/missing
+#    fails if $ac_aux_dir is absolute,
+#    fails when called from a subdirectory in a VPATH build with
+#          a relative $ac_aux_dir
+#
+# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
+# are both prefixed by $srcdir.  In an in-source build this is usually
+# harmless because $srcdir is '.', but things will broke when you
+# start a VPATH build or use an absolute $srcdir.
+#
+# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
+# iff we strip the leading $srcdir from $ac_aux_dir.  That would be:
+#   am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
+# and then we would define $MISSING as
+#   MISSING="\${SHELL} $am_aux_dir/missing"
+# This will work as long as MISSING is not called from configure, because
+# unfortunately $(top_srcdir) has no meaning in configure.
+# However there are other variables, like CC, which are often used in
+# configure, and could therefore not use this "fixed" $ac_aux_dir.
+#
+# Another solution, used here, is to always expand $ac_aux_dir to an
+# absolute PATH.  The drawback is that using absolute paths prevent a
+# configured tree to be moved without reconfiguration.
+
+AC_DEFUN([AM_AUX_DIR_EXPAND],
+[dnl Rely on autoconf to set up CDPATH properly.
+AC_PREREQ([2.50])dnl
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+])
+
+# AM_CONDITIONAL                                            -*- Autoconf -*-
+
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_CONDITIONAL(NAME, SHELL-CONDITION)
+# -------------------------------------
+# Define a conditional.
+AC_DEFUN([AM_CONDITIONAL],
+[AC_PREREQ([2.52])dnl
+ m4_if([$1], [TRUE],  [AC_FATAL([$0: invalid condition: $1])],
+       [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
+AC_SUBST([$1_TRUE])dnl
+AC_SUBST([$1_FALSE])dnl
+_AM_SUBST_NOTMAKE([$1_TRUE])dnl
+_AM_SUBST_NOTMAKE([$1_FALSE])dnl
+m4_define([_AM_COND_VALUE_$1], [$2])dnl
+if $2; then
+  $1_TRUE=
+  $1_FALSE='#'
+else
+  $1_TRUE='#'
+  $1_FALSE=
+fi
+AC_CONFIG_COMMANDS_PRE(
+[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
+  AC_MSG_ERROR([[conditional "$1" was never defined.
+Usually this means the macro was only invoked conditionally.]])
+fi])])
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+
+# There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be
+# written in clear, in which case automake, when reading aclocal.m4,
+# will think it sees a *use*, and therefore will trigger all it's
+# C support machinery.  Also note that it means that autoscan, seeing
+# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
+
+
+# _AM_DEPENDENCIES(NAME)
+# ----------------------
+# See how the compiler implements dependency checking.
+# NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC".
+# We try a few techniques and use that to set a single cache variable.
+#
+# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
+# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
+# dependency, and given that the user is not expected to run this macro,
+# just rely on AC_PROG_CC.
+AC_DEFUN([_AM_DEPENDENCIES],
+[AC_REQUIRE([AM_SET_DEPDIR])dnl
+AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
+AC_REQUIRE([AM_MAKE_INCLUDE])dnl
+AC_REQUIRE([AM_DEP_TRACK])dnl
+
+m4_if([$1], [CC],   [depcc="$CC"   am_compiler_list=],
+      [$1], [CXX],  [depcc="$CXX"  am_compiler_list=],
+      [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
+      [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'],
+      [$1], [UPC],  [depcc="$UPC"  am_compiler_list=],
+      [$1], [GCJ],  [depcc="$GCJ"  am_compiler_list='gcc3 gcc'],
+                    [depcc="$$1"   am_compiler_list=])
+
+AC_CACHE_CHECK([dependency style of $depcc],
+               [am_cv_$1_dependencies_compiler_type],
+[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named 'D' -- because '-MD' means "put the output
+  # in D".
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_$1_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
+  fi
+  am__universal=false
+  m4_case([$1], [CC],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac],
+    [CXX],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac])
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
+      # Solaris 10 /bin/sh.
+      echo '/* dummy */' > sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with '-c' and '-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle '-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs.
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # After this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested.
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvc7 | msvc7msys | msvisualcpp | msvcmsys)
+      # This compiler won't grok '-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_$1_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_$1_dependencies_compiler_type=none
+fi
+])
+AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
+AM_CONDITIONAL([am__fastdep$1], [
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_$1_dependencies_compiler_type" = gcc3])
+])
+
+
+# AM_SET_DEPDIR
+# -------------
+# Choose a directory name for dependency files.
+# This macro is AC_REQUIREd in _AM_DEPENDENCIES.
+AC_DEFUN([AM_SET_DEPDIR],
+[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
+])
+
+
+# AM_DEP_TRACK
+# ------------
+AC_DEFUN([AM_DEP_TRACK],
+[AC_ARG_ENABLE([dependency-tracking], [dnl
+AS_HELP_STRING(
+  [--enable-dependency-tracking],
+  [do not reject slow dependency extractors])
+AS_HELP_STRING(
+  [--disable-dependency-tracking],
+  [speeds up one-time build])])
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+  am__nodep='_no'
+fi
+AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
+AC_SUBST([AMDEPBACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
+AC_SUBST([am__nodep])dnl
+_AM_SUBST_NOTMAKE([am__nodep])dnl
+])
+
+# Generate code to set up dependency tracking.              -*- Autoconf -*-
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+
+# _AM_OUTPUT_DEPENDENCY_COMMANDS
+# ------------------------------
+AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+[{
+  # Older Autoconf quotes --file arguments for eval, but not when files
+  # are listed without --file.  Let's play safe and only enable the eval
+  # if we detect the quoting.
+  case $CONFIG_FILES in
+  *\'*) eval set x "$CONFIG_FILES" ;;
+  *)   set x $CONFIG_FILES ;;
+  esac
+  shift
+  for mf
+  do
+    # Strip MF so we end up with the name of the file.
+    mf=`echo "$mf" | sed -e 's/:.*$//'`
+    # Check whether this is an Automake generated Makefile or not.
+    # We used to match only the files named 'Makefile.in', but
+    # some people rename them; so instead we look at the file content.
+    # Grep'ing the first line is not enough: some people post-process
+    # each Makefile.in and add a new line on top of each file to say so.
+    # Grep'ing the whole file is not good either: AIX grep has a line
+    # limit of 2048, but all sed's we know have understand at least 4000.
+    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+      dirpart=`AS_DIRNAME("$mf")`
+    else
+      continue
+    fi
+    # Extract the definition of DEPDIR, am__include, and am__quote
+    # from the Makefile without running 'make'.
+    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+    test -z "$DEPDIR" && continue
+    am__include=`sed -n 's/^am__include = //p' < "$mf"`
+    test -z "$am__include" && continue
+    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+    # Find all dependency output files, they are included files with
+    # $(DEPDIR) in their names.  We invoke sed twice because it is the
+    # simplest approach to changing $(DEPDIR) to its actual value in the
+    # expansion.
+    for file in `sed -n "
+      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
+      # Make sure the directory exists.
+      test -f "$dirpart/$file" && continue
+      fdir=`AS_DIRNAME(["$file"])`
+      AS_MKDIR_P([$dirpart/$fdir])
+      # echo "creating $dirpart/$file"
+      echo '# dummy' > "$dirpart/$file"
+    done
+  done
+}
+])# _AM_OUTPUT_DEPENDENCY_COMMANDS
+
+
+# AM_OUTPUT_DEPENDENCY_COMMANDS
+# -----------------------------
+# This macro should only be invoked once -- use via AC_REQUIRE.
+#
+# This code is only required when automatic dependency tracking
+# is enabled.  FIXME.  This creates each '.P' file that we will
+# need in order to bootstrap the dependency handling code.
+AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
+[AC_CONFIG_COMMANDS([depfiles],
+     [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
+     [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
+])
+
+# Do all the work for Automake.                             -*- Autoconf -*-
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This macro actually does too much.  Some checks are only needed if
+# your package does certain things.  But this isn't really a big deal.
+
+dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O.
+m4_define([AC_PROG_CC],
+m4_defn([AC_PROG_CC])
+[_AM_PROG_CC_C_O
+])
+
+# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
+# AM_INIT_AUTOMAKE([OPTIONS])
+# -----------------------------------------------
+# The call with PACKAGE and VERSION arguments is the old style
+# call (pre autoconf-2.50), which is being phased out.  PACKAGE
+# and VERSION should now be passed to AC_INIT and removed from
+# the call to AM_INIT_AUTOMAKE.
+# We support both call styles for the transition.  After
+# the next Automake release, Autoconf can make the AC_INIT
+# arguments mandatory, and then we can depend on a new Autoconf
+# release and drop the old call support.
+AC_DEFUN([AM_INIT_AUTOMAKE],
+[AC_PREREQ([2.65])dnl
+dnl Autoconf wants to disallow AM_ names.  We explicitly allow
+dnl the ones we care about.
+m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
+AC_REQUIRE([AC_PROG_INSTALL])dnl
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+AC_SUBST([CYGPATH_W])
+
+# Define the identity of the package.
+dnl Distinguish between old-style and new-style calls.
+m4_ifval([$2],
+[AC_DIAGNOSE([obsolete],
+             [$0: two- and three-arguments forms are deprecated.])
+m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ AC_SUBST([PACKAGE], [$1])dnl
+ AC_SUBST([VERSION], [$2])],
+[_AM_SET_OPTIONS([$1])dnl
+dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+m4_if(
+  m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]),
+  [ok:ok],,
+  [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+ AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
+
+_AM_IF_OPTION([no-define],,
+[AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package])
+ AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl
+
+# Some tools Automake needs.
+AC_REQUIRE([AM_SANITY_CHECK])dnl
+AC_REQUIRE([AC_ARG_PROGRAM])dnl
+AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}])
+AM_MISSING_PROG([AUTOCONF], [autoconf])
+AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}])
+AM_MISSING_PROG([AUTOHEADER], [autoheader])
+AM_MISSING_PROG([MAKEINFO], [makeinfo])
+AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
+AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+# For better backward compatibility.  To be removed once Automake 1.9.x
+# dies out for good.  For more background, see:
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+AC_SUBST([mkdir_p], ['$(MKDIR_P)'])
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([AC_PROG_MAKE_SET])dnl
+AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
+	      [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
+			     [_AM_PROG_TAR([v7])])])
+_AM_IF_OPTION([no-dependencies],,
+[AC_PROVIDE_IFELSE([AC_PROG_CC],
+		  [_AM_DEPENDENCIES([CC])],
+		  [m4_define([AC_PROG_CC],
+			     m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_CXX],
+		  [_AM_DEPENDENCIES([CXX])],
+		  [m4_define([AC_PROG_CXX],
+			     m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJC],
+		  [_AM_DEPENDENCIES([OBJC])],
+		  [m4_define([AC_PROG_OBJC],
+			     m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJCXX],
+		  [_AM_DEPENDENCIES([OBJCXX])],
+		  [m4_define([AC_PROG_OBJCXX],
+			     m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl
+])
+AC_REQUIRE([AM_SILENT_RULES])dnl
+dnl The testsuite driver may need to know about EXEEXT, so add the
+dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen.  This
+dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below.
+AC_CONFIG_COMMANDS_PRE(dnl
+[m4_provide_if([_AM_COMPILER_EXEEXT],
+  [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl
+
+# POSIX will say in a future version that running "rm -f" with no argument
+# is OK; and we want to be able to make that assumption in our Makefile
+# recipes.  So use an aggressive probe to check that the usage we want is
+# actually supported "in the wild" to an acceptable degree.
+# See automake bug#10828.
+# To make any issue more visible, cause the running configure to be aborted
+# by default if the 'rm' program in use doesn't match our expectations; the
+# user can still override this though.
+if rm -f && rm -fr && rm -rf; then : OK; else
+  cat >&2 <<'END'
+Oops!
+
+Your 'rm' program seems unable to run without file operands specified
+on the command line, even when the '-f' option is present.  This is contrary
+to the behaviour of most rm programs out there, and not conforming with
+the upcoming POSIX standard: <http://austingroupbugs.net/view.php?id=542>
+
+Please tell bug-automake@gnu.org about your system, including the value
+of your $PATH and any error possibly output before this message.  This
+can help us improve future automake versions.
+
+END
+  if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then
+    echo 'Configuration will proceed anyway, since you have set the' >&2
+    echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2
+    echo >&2
+  else
+    cat >&2 <<'END'
+Aborting the configuration process, to ensure you take notice of the issue.
+
+You can download and install GNU coreutils to get an 'rm' implementation
+that behaves properly: <http://www.gnu.org/software/coreutils/>.
+
+If you want to complete the configuration process using your problematic
+'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+to "yes", and re-run configure.
+
+END
+    AC_MSG_ERROR([Your 'rm' program is bad, sorry.])
+  fi
+fi])
+
+dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion.  Do not
+dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further
+dnl mangled by Autoconf and run in a shell conditional statement.
+m4_define([_AC_COMPILER_EXEEXT],
+m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])])
+
+# When config.status generates a header, we must update the stamp-h file.
+# This file resides in the same directory as the config header
+# that is generated.  The stamp files are numbered to have different names.
+
+# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
+# loop where config.status creates the headers, so we can generate
+# our stamp files there.
+AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
+[# Compute $1's index in $config_headers.
+_am_arg=$1
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $_am_arg | $_am_arg:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_SH
+# ------------------
+# Define $install_sh.
+AC_DEFUN([AM_PROG_INSTALL_SH],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+if test x"${install_sh}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
+  *)
+    install_sh="\${SHELL} $am_aux_dir/install-sh"
+  esac
+fi
+AC_SUBST([install_sh])])
+
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# Check whether the underlying file-system supports filenames
+# with a leading dot.  For instance MS-DOS doesn't.
+AC_DEFUN([AM_SET_LEADING_DOT],
+[rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+AC_SUBST([am__leading_dot])])
+
+# Add --enable-maintainer-mode option to configure.         -*- Autoconf -*-
+# From Jim Meyering
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MAINTAINER_MODE([DEFAULT-MODE])
+# ----------------------------------
+# Control maintainer-specific portions of Makefiles.
+# Default is to disable them, unless 'enable' is passed literally.
+# For symmetry, 'disable' may be passed as well.  Anyway, the user
+# can override the default with the --enable/--disable switch.
+AC_DEFUN([AM_MAINTAINER_MODE],
+[m4_case(m4_default([$1], [disable]),
+       [enable], [m4_define([am_maintainer_other], [disable])],
+       [disable], [m4_define([am_maintainer_other], [enable])],
+       [m4_define([am_maintainer_other], [enable])
+        m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])])
+AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles])
+  dnl maintainer-mode's default is 'disable' unless 'enable' is passed
+  AC_ARG_ENABLE([maintainer-mode],
+    [AS_HELP_STRING([--]am_maintainer_other[-maintainer-mode],
+      am_maintainer_other[ make rules and dependencies not useful
+      (and sometimes confusing) to the casual installer])],
+    [USE_MAINTAINER_MODE=$enableval],
+    [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes]))
+  AC_MSG_RESULT([$USE_MAINTAINER_MODE])
+  AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes])
+  MAINT=$MAINTAINER_MODE_TRUE
+  AC_SUBST([MAINT])dnl
+]
+)
+
+# Check to see how 'make' treats includes.	            -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MAKE_INCLUDE()
+# -----------------
+# Check to see how make treats includes.
+AC_DEFUN([AM_MAKE_INCLUDE],
+[am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo this is the am__doit target
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+AC_MSG_CHECKING([for style of include used by $am_make])
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# Ignore all kinds of additional output from 'make'.
+case `$am_make -s -f confmf 2> /dev/null` in #(
+*the\ am__doit\ target*)
+  am__include=include
+  am__quote=
+  _am_result=GNU
+  ;;
+esac
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   case `$am_make -s -f confmf 2> /dev/null` in #(
+   *the\ am__doit\ target*)
+     am__include=.include
+     am__quote="\""
+     _am_result=BSD
+     ;;
+   esac
+fi
+AC_SUBST([am__include])
+AC_SUBST([am__quote])
+AC_MSG_RESULT([$_am_result])
+rm -f confinc confmf
+])
+
+# Fake the existence of programs that GNU maintainers use.  -*- Autoconf -*-
+
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MISSING_PROG(NAME, PROGRAM)
+# ------------------------------
+AC_DEFUN([AM_MISSING_PROG],
+[AC_REQUIRE([AM_MISSING_HAS_RUN])
+$1=${$1-"${am_missing_run}$2"}
+AC_SUBST($1)])
+
+# AM_MISSING_HAS_RUN
+# ------------------
+# Define MISSING if not defined so far and test if it is modern enough.
+# If it is, set am_missing_run to use it, otherwise, to nothing.
+AC_DEFUN([AM_MISSING_HAS_RUN],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([missing])dnl
+if test x"${MISSING+set}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+  *)
+    MISSING="\${SHELL} $am_aux_dir/missing" ;;
+  esac
+fi
+# Use eval to expand $SHELL
+if eval "$MISSING --is-lightweight"; then
+  am_missing_run="$MISSING "
+else
+  am_missing_run=
+  AC_MSG_WARN(['missing' script is too old or missing])
+fi
+])
+
+# Helper functions for option handling.                     -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_MANGLE_OPTION(NAME)
+# -----------------------
+AC_DEFUN([_AM_MANGLE_OPTION],
+[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
+
+# _AM_SET_OPTION(NAME)
+# --------------------
+# Set option NAME.  Presently that only means defining a flag for this option.
+AC_DEFUN([_AM_SET_OPTION],
+[m4_define(_AM_MANGLE_OPTION([$1]), [1])])
+
+# _AM_SET_OPTIONS(OPTIONS)
+# ------------------------
+# OPTIONS is a space-separated list of Automake options.
+AC_DEFUN([_AM_SET_OPTIONS],
+[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
+
+# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
+# -------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+AC_DEFUN([_AM_IF_OPTION],
+[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_PROG_CC_C_O
+# ---------------
+# Like AC_PROG_CC_C_O, but changed for automake.  We rewrite AC_PROG_CC
+# to automatically call this.
+AC_DEFUN([_AM_PROG_CC_C_O],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([compile])dnl
+AC_LANG_PUSH([C])dnl
+AC_CACHE_CHECK(
+  [whether $CC understands -c and -o together],
+  [am_cv_prog_cc_c_o],
+  [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+  # Make sure it works both with $CC and with simple cc.
+  # Following AC_PROG_CC_C_O, we do the test twice because some
+  # compilers refuse to overwrite an existing .o file with -o,
+  # though they will create one.
+  am_cv_prog_cc_c_o=yes
+  for am_i in 1 2; do
+    if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \
+         && test -f conftest2.$ac_objext; then
+      : OK
+    else
+      am_cv_prog_cc_c_o=no
+      break
+    fi
+  done
+  rm -f core conftest*
+  unset am_i])
+if test "$am_cv_prog_cc_c_o" != yes; then
+   # Losing compiler, so override with the script.
+   # FIXME: It is wrong to rewrite CC.
+   # But if we don't then we get into trouble of one sort or another.
+   # A longer-term fix would be to have automake use am__CC in this case,
+   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
+   CC="$am_aux_dir/compile $CC"
+fi
+AC_LANG_POP([C])])
+
+# For backward compatibility.
+AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])])
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_RUN_LOG(COMMAND)
+# -------------------
+# Run COMMAND, save the exit status in ac_status, and log it.
+# (This has been adapted from Autoconf's _AC_RUN_LOG macro.)
+AC_DEFUN([AM_RUN_LOG],
+[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD
+   ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   (exit $ac_status); }])
+
+# Check to make sure that the build environment is sane.    -*- Autoconf -*-
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_SANITY_CHECK
+# ---------------
+AC_DEFUN([AM_SANITY_CHECK],
+[AC_MSG_CHECKING([whether build environment is sane])
+# Reject unsafe characters in $srcdir or the absolute working directory
+# name.  Accept space and tab only in the latter.
+am_lf='
+'
+case `pwd` in
+  *[[\\\"\#\$\&\'\`$am_lf]]*)
+    AC_MSG_ERROR([unsafe absolute working directory name]);;
+esac
+case $srcdir in
+  *[[\\\"\#\$\&\'\`$am_lf\ \	]]*)
+    AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);;
+esac
+
+# Do 'set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   am_has_slept=no
+   for am_try in 1 2; do
+     echo "timestamp, slept: $am_has_slept" > conftest.file
+     set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
+     if test "$[*]" = "X"; then
+	# -L didn't work.
+	set X `ls -t "$srcdir/configure" conftest.file`
+     fi
+     if test "$[*]" != "X $srcdir/configure conftest.file" \
+	&& test "$[*]" != "X conftest.file $srcdir/configure"; then
+
+	# If neither matched, then we have a broken ls.  This can happen
+	# if, for instance, CONFIG_SHELL is bash and it inherits a
+	# broken ls alias from the environment.  This has actually
+	# happened.  Such a system could not be considered "sane".
+	AC_MSG_ERROR([ls -t appears to fail.  Make sure there is not a broken
+  alias in your environment])
+     fi
+     if test "$[2]" = conftest.file || test $am_try -eq 2; then
+       break
+     fi
+     # Just in case.
+     sleep 1
+     am_has_slept=yes
+   done
+   test "$[2]" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   AC_MSG_ERROR([newly created file is older than distributed files!
+Check your system clock])
+fi
+AC_MSG_RESULT([yes])
+# If we didn't sleep, we still need to ensure time stamps of config.status and
+# generated files are strictly newer.
+am_sleep_pid=
+if grep 'slept: no' conftest.file >/dev/null 2>&1; then
+  ( sleep 1 ) &
+  am_sleep_pid=$!
+fi
+AC_CONFIG_COMMANDS_PRE(
+  [AC_MSG_CHECKING([that generated files are newer than configure])
+   if test -n "$am_sleep_pid"; then
+     # Hide warnings about reused PIDs.
+     wait $am_sleep_pid 2>/dev/null
+   fi
+   AC_MSG_RESULT([done])])
+rm -f conftest.file
+])
+
+# Copyright (C) 2009-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_SILENT_RULES([DEFAULT])
+# --------------------------
+# Enable less verbose build rules; with the default set to DEFAULT
+# ("yes" being less verbose, "no" or empty being verbose).
+AC_DEFUN([AM_SILENT_RULES],
+[AC_ARG_ENABLE([silent-rules], [dnl
+AS_HELP_STRING(
+  [--enable-silent-rules],
+  [less verbose build output (undo: "make V=1")])
+AS_HELP_STRING(
+  [--disable-silent-rules],
+  [verbose build output (undo: "make V=0")])dnl
+])
+case $enable_silent_rules in @%:@ (((
+  yes) AM_DEFAULT_VERBOSITY=0;;
+   no) AM_DEFAULT_VERBOSITY=1;;
+    *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);;
+esac
+dnl
+dnl A few 'make' implementations (e.g., NonStop OS and NextStep)
+dnl do not support nested variable expansions.
+dnl See automake bug#9928 and bug#10237.
+am_make=${MAKE-make}
+AC_CACHE_CHECK([whether $am_make supports nested variables],
+   [am_cv_make_support_nested_variables],
+   [if AS_ECHO([['TRUE=$(BAR$(V))
+BAR0=false
+BAR1=true
+V=1
+am__doit:
+	@$(TRUE)
+.PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then
+  am_cv_make_support_nested_variables=yes
+else
+  am_cv_make_support_nested_variables=no
+fi])
+if test $am_cv_make_support_nested_variables = yes; then
+  dnl Using '$V' instead of '$(V)' breaks IRIX make.
+  AM_V='$(V)'
+  AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
+else
+  AM_V=$AM_DEFAULT_VERBOSITY
+  AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
+fi
+AC_SUBST([AM_V])dnl
+AM_SUBST_NOTMAKE([AM_V])dnl
+AC_SUBST([AM_DEFAULT_V])dnl
+AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl
+AC_SUBST([AM_DEFAULT_VERBOSITY])dnl
+AM_BACKSLASH='\'
+AC_SUBST([AM_BACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl
+])
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_STRIP
+# ---------------------
+# One issue with vendor 'install' (even GNU) is that you can't
+# specify the program used to strip binaries.  This is especially
+# annoying in cross-compiling environments, where the build's strip
+# is unlikely to handle the host's binaries.
+# Fortunately install-sh will honor a STRIPPROG variable, so we
+# always use install-sh in "make install-strip", and initialize
+# STRIPPROG with the value of the STRIP variable (set by the user).
+AC_DEFUN([AM_PROG_INSTALL_STRIP],
+[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+# Installed binaries are usually stripped using 'strip' when the user
+# run "make install-strip".  However 'strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the 'STRIP' environment variable to overrule this program.
+dnl Don't test for $cross_compiling = yes, because it might be 'maybe'.
+if test "$cross_compiling" != no; then
+  AC_CHECK_TOOL([STRIP], [strip], :)
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+AC_SUBST([INSTALL_STRIP_PROGRAM])])
+
+# Copyright (C) 2006-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_SUBST_NOTMAKE(VARIABLE)
+# ---------------------------
+# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in.
+# This macro is traced by Automake.
+AC_DEFUN([_AM_SUBST_NOTMAKE])
+
+# AM_SUBST_NOTMAKE(VARIABLE)
+# --------------------------
+# Public sister of _AM_SUBST_NOTMAKE.
+AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
+
+# Check how to create a tarball.                            -*- Autoconf -*-
+
+# Copyright (C) 2004-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_PROG_TAR(FORMAT)
+# --------------------
+# Check how to create a tarball in format FORMAT.
+# FORMAT should be one of 'v7', 'ustar', or 'pax'.
+#
+# Substitute a variable $(am__tar) that is a command
+# writing to stdout a FORMAT-tarball containing the directory
+# $tardir.
+#     tardir=directory && $(am__tar) > result.tar
+#
+# Substitute a variable $(am__untar) that extract such
+# a tarball read from stdin.
+#     $(am__untar) < result.tar
+#
+AC_DEFUN([_AM_PROG_TAR],
+[# Always define AMTAR for backward compatibility.  Yes, it's still used
+# in the wild :-(  We should find a proper way to deprecate it ...
+AC_SUBST([AMTAR], ['$${TAR-tar}'])
+
+# We'll loop over all known methods to create a tar archive until one works.
+_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
+
+m4_if([$1], [v7],
+  [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'],
+
+  [m4_case([$1],
+    [ustar],
+     [# The POSIX 1988 'ustar' format is defined with fixed-size fields.
+      # There is notably a 21 bits limit for the UID and the GID.  In fact,
+      # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343
+      # and bug#13588).
+      am_max_uid=2097151 # 2^21 - 1
+      am_max_gid=$am_max_uid
+      # The $UID and $GID variables are not portable, so we need to resort
+      # to the POSIX-mandated id(1) utility.  Errors in the 'id' calls
+      # below are definitely unexpected, so allow the users to see them
+      # (that is, avoid stderr redirection).
+      am_uid=`id -u || echo unknown`
+      am_gid=`id -g || echo unknown`
+      AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format])
+      if test $am_uid -le $am_max_uid; then
+         AC_MSG_RESULT([yes])
+      else
+         AC_MSG_RESULT([no])
+         _am_tools=none
+      fi
+      AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format])
+      if test $am_gid -le $am_max_gid; then
+         AC_MSG_RESULT([yes])
+      else
+        AC_MSG_RESULT([no])
+        _am_tools=none
+      fi],
+
+  [pax],
+    [],
+
+  [m4_fatal([Unknown tar format])])
+
+  AC_MSG_CHECKING([how to create a $1 tar archive])
+
+  # Go ahead even if we have the value already cached.  We do so because we
+  # need to set the values for the 'am__tar' and 'am__untar' variables.
+  _am_tools=${am_cv_prog_tar_$1-$_am_tools}
+
+  for _am_tool in $_am_tools; do
+    case $_am_tool in
+    gnutar)
+      for _am_tar in tar gnutar gtar; do
+        AM_RUN_LOG([$_am_tar --version]) && break
+      done
+      am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
+      am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
+      am__untar="$_am_tar -xf -"
+      ;;
+    plaintar)
+      # Must skip GNU tar: if it does not support --format= it doesn't create
+      # ustar tarball either.
+      (tar --version) >/dev/null 2>&1 && continue
+      am__tar='tar chf - "$$tardir"'
+      am__tar_='tar chf - "$tardir"'
+      am__untar='tar xf -'
+      ;;
+    pax)
+      am__tar='pax -L -x $1 -w "$$tardir"'
+      am__tar_='pax -L -x $1 -w "$tardir"'
+      am__untar='pax -r'
+      ;;
+    cpio)
+      am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
+      am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
+      am__untar='cpio -i -H $1 -d'
+      ;;
+    none)
+      am__tar=false
+      am__tar_=false
+      am__untar=false
+      ;;
+    esac
+
+    # If the value was cached, stop now.  We just wanted to have am__tar
+    # and am__untar set.
+    test -n "${am_cv_prog_tar_$1}" && break
+
+    # tar/untar a dummy directory, and stop if the command works.
+    rm -rf conftest.dir
+    mkdir conftest.dir
+    echo GrepMe > conftest.dir/file
+    AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
+    rm -rf conftest.dir
+    if test -s conftest.tar; then
+      AM_RUN_LOG([$am__untar <conftest.tar])
+      AM_RUN_LOG([cat conftest.dir/file])
+      grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
+    fi
+  done
+  rm -rf conftest.dir
+
+  AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
+  AC_MSG_RESULT([$am_cv_prog_tar_$1])])
+
+AC_SUBST([am__tar])
+AC_SUBST([am__untar])
+]) # _AM_PROG_TAR
+
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+# _AX_CHECK_COMPILER_OPTION_WITH_VAR(language, variable, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - The variable to add the checked compiler option to.
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language to the provided variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_COMPILER_OPTION_WITH_VAR],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG compiler understands $3])
+    SAVE_[]_AC_LANG_PREFIX[]FLAGS=${_AC_LANG_PREFIX[]FLAGS}
+    SAVE_$2=${$2}
+    _AC_LANG_PREFIX[]FLAGS=$3
+    AC_TRY_COMPILE(,[;],AC_MSG_RESULT([yes]); _AC_LANG_PREFIX[]FLAGS="${SAVE_[]_AC_LANG_PREFIX[]FLAGS}"; $2="${SAVE_$2} $3",AC_MSG_RESULT([no]); _AC_LANG_PREFIX[]FLAGS=${SAVE_[]_AC_LANG_PREFIX[]FLAGS}; $2=${SAVE_$2});
+    unset SAVE_[]_AC_LANG_PREFIX[]FLAGS
+    unset SAVE_$2
+    AC_LANG_POP($1)
+])
+
+#
+# _AX_CHECK_COMPILER_OPTION(language, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_COMPILER_OPTION],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG compiler understands $2])
+    SAVE_[]_AC_LANG_PREFIX[]FLAGS=${_AC_LANG_PREFIX[]FLAGS}
+    _AC_LANG_PREFIX[]FLAGS=$2
+    AC_TRY_COMPILE(,[;],AC_MSG_RESULT([yes]); _AC_LANG_PREFIX[]FLAGS="${SAVE_[]_AC_LANG_PREFIX[]FLAGS} $2",AC_MSG_RESULT([no]); _AC_LANG_PREFIX[]FLAGS=${SAVE_[]_AC_LANG_PREFIX[]FLAGS});
+    unset SAVE_[]_AC_LANG_PREFIX[]FLAGS
+    AC_LANG_POP($1)
+])
+
+#
+# AX_CHECK_COMPILER_OPTION(language, [variable,] option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked compiler option
+#              to.
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language, optionally saving it to the specified variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_COMPILER_OPTION],
+[
+    ifelse($#,
+        3,
+        [_AX_CHECK_COMPILER_OPTION_WITH_VAR($1, $2, $3)],
+        [_AX_CHECK_COMPILER_OPTION($1, $2)])
+])
+
+#
+# AX_CHECK_COMPILER_OPTIONS(language, [variable,] option ...)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked compiler option
+#              to.
+#   options  - The compiler flags to check.
+#
+# Add, if supported, the specified compiler flags for the compiler selected
+# for the specified language, optionally saving it to the specified variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_COMPILER_OPTIONS],
+[
+    ifelse($#,
+        3,
+        [
+            for ax_compiler_option in [$3]; do
+                _AX_CHECK_COMPILER_OPTION_WITH_VAR([$1], [$2], $ax_compiler_option)
+            done
+	],
+        [
+            for ax_compiler_option in [$2]; do
+                _AX_CHECK_COMPILER_OPTION([$1], $ax_compiler_option)
+            done
+	])
+])
+
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+])
+
+# serial 57 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+       [m4_default([$3],
+		   [m4_fatal([Libtool version $1 or higher is required],
+		             63)])],
+       [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+  *\ * | *\	*)
+    AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# Calculate cc_basename.  Skip known compiler wrappers and cross-prefix.
+m4_defun([_LT_CC_BASENAME],
+[for cc_temp in $1""; do
+  case $cc_temp in
+    compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+    distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl
+dnl
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    _LT_PATH_MAGIC
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from `configure', and `config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool.  Notably,
+# `config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain="$ac_aux_dir/ltmain.sh"
+])# _LT_PROG_LTMAIN
+
+
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the `libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+              [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME.  Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+    [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+	[m4_ifval([$1], [$1], [$2])])
+    lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+    m4_ifval([$4],
+	[lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+    lt_dict_add_subkey([lt_decl_dict], [$2],
+	[tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+  [0], [m4_fatal([$0: too few arguments: $#])],
+  [1], [m4_fatal([$0: too few arguments: $#: $1])],
+  [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+  [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+  [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+    m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+    m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+     m4_if([$2], [],
+	   m4_quote(lt_decl_varnames),
+	m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+			lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to `config.status' so that its
+# declaration there will have the same value as in `configure'.  VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly.  In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+#    <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+    [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags="_LT_TAGS"dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+#    # Some comment about what VAR is for.
+#    visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+					   [description])))[]dnl
+m4_pushdef([_libtool_name],
+    m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+    [0], [_libtool_name=[$]$1],
+    [1], [_libtool_name=$lt_[]$1],
+    [2], [_libtool_name=$lt_[]$1],
+    [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
+# script.  Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+    m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS.  Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into `config.status', and then the shell code to quote escape them in
+# for loops in `config.status'.  Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+	dnl If the libtool generation code has been placed in $CONFIG_LT,
+	dnl instead of duplicating it all over again into config.status,
+	dnl then we will have config.status run $CONFIG_LT later, so it
+	dnl needs to know what name is stored there:
+        [AC_CONFIG_COMMANDS([libtool],
+            [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+    dnl If the libtool generation code is destined for config.status,
+    dnl expand the accumulated commands and init code now:
+    [AC_CONFIG_COMMANDS([libtool],
+        [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test $lt_write_fail = 0 && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+\`$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+  -q, --quiet     do not print progress messages
+  -d, --debug     don't remove temporary files
+
+Report bugs to <bug-libtool@gnu.org>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2011 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test $[#] != 0
+do
+  case $[1] in
+    --version | --v* | -V )
+      echo "$lt_cl_version"; exit 0 ;;
+    --help | --h* | -h )
+      echo "$lt_cl_help"; exit 0 ;;
+    --debug | --d* | -d )
+      debug=: ;;
+    --quiet | --q* | --silent | --s* | -q )
+      lt_cl_silent=: ;;
+
+    -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try \`$[0] --help' for more information.]) ;;
+
+    *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try \`$[0] --help' for more information.]) ;;
+  esac
+  shift
+done
+
+if $lt_cl_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure.  Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test "$silent" = yes &&
+  lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars.  Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+  m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+  m4_if(_LT_TAG, [C], [
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+  _LT_PROG_LTMAIN
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+  _LT_PROG_REPLACE_SHELLFNS
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+#    autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+  [C],			[_LT_LANG(C)],
+  [C++],		[_LT_LANG(CXX)],
+  [Go],			[_LT_LANG(GO)],
+  [Java],		[_LT_LANG(GCJ)],
+  [Fortran 77],		[_LT_LANG(F77)],
+  [Fortran],		[_LT_LANG(FC)],
+  [Windows Resource],	[_LT_LANG(RC)],
+  [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+    [_LT_LANG($1)],
+    [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+  [LT_SUPPORTED_TAG([$1])dnl
+  m4_append([_LT_TAGS], [$1 ])dnl
+  m4_define([_LT_LANG_]$1[_enabled], [])dnl
+  _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+m4_ifndef([AC_PROG_GO], [
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_GO.  When it is available in    #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+m4_defun([AC_PROG_GO],
+[AC_LANG_PUSH(Go)dnl
+AC_ARG_VAR([GOC],     [Go compiler command])dnl
+AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+AC_CHECK_TOOL(GOC, gccgo)
+if test -z "$GOC"; then
+  if test -n "$ac_tool_prefix"; then
+    AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo])
+  fi
+fi
+if test -z "$GOC"; then
+  AC_CHECK_PROG(GOC, gccgo, gccgo, false)
+fi
+])#m4_defun
+])#m4_ifndef
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+  [LT_LANG(CXX)],
+  [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+  [LT_LANG(F77)],
+  [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+  [LT_LANG(FC)],
+  [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+  [LT_LANG(GCJ)],
+  [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+    [LT_LANG(GCJ)],
+    [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+      [LT_LANG(GCJ)],
+      [m4_ifdef([AC_PROG_GCJ],
+	[m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([A][M_PROG_GCJ],
+	[m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([LT_PROG_GCJ],
+	[m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([AC_PROG_GO],
+  [LT_LANG(GO)],
+  [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+  [LT_LANG(RC)],
+  [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+  case $host_os in
+    rhapsody* | darwin*)
+    AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+    AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+    AC_CHECK_TOOL([LIPO], [lipo], [:])
+    AC_CHECK_TOOL([OTOOL], [otool], [:])
+    AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+    _LT_DECL([], [DSYMUTIL], [1],
+      [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+    _LT_DECL([], [NMEDIT], [1],
+      [Tool to change global to local symbols on Mac OS X])
+    _LT_DECL([], [LIPO], [1],
+      [Tool to manipulate fat objects and archives on Mac OS X])
+    _LT_DECL([], [OTOOL], [1],
+      [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+    _LT_DECL([], [OTOOL64], [1],
+      [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+    AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+      [lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	# If there is a non-empty error log, and "single_module"
+	# appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	# Otherwise, if the output was created with a 0 exit code from
+	# the compiler, it worked.
+	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi])
+
+    AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+      [lt_cv_ld_exported_symbols_list],
+      [lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+	[lt_cv_ld_exported_symbols_list=yes],
+	[lt_cv_ld_exported_symbols_list=no])
+	LDFLAGS="$save_LDFLAGS"
+    ])
+
+    AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+      [lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+      echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+      $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+      echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+      $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+    ])
+    case $host_os in
+    rhapsody* | darwin1.[[012]])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[[012]]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES([TAG])
+# ---------------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+  m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_automatic, $1)=yes
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+    m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes],
+                  [FC],  [_LT_TAGVAR(compiler_needs_object, $1)=yes])
+  else
+    _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+  fi
+  _LT_TAGVAR(link_all_deplibs, $1)=yes
+  _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+    m4_if([$1], [CXX],
+[   if test "$lt_cv_apple_cc_single_mod" != "yes"; then
+      _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
+      _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
+    fi
+],[])
+  else
+  _LT_TAGVAR(ld_shlibs, $1)=no
+  fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+  [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+  lt_aix_libpath_sed='[
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }]'
+  _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi],[])
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
+  fi
+  ])
+  aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script which will find a shell with a builtin
+# printf (which we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*" 
+}
+
+case "$ECHO" in
+  printf*) AC_MSG_RESULT([printf]) ;;
+  print*) AC_MSG_RESULT([print -r]) ;;
+  *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+  test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test "X`printf %s $ECHO`" = "X$ECHO" \
+      || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[  --with-sysroot[=DIR] Search for dependent libraries within DIR
+                        (or the compiler's sysroot if not specified).],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted.  We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+   if test "$GCC" = yes; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   AC_MSG_RESULT([${with_sysroot}])
+   AC_MSG_ERROR([The sysroot must be an absolute path.])
+   ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and in which our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+  [AS_HELP_STRING([--disable-libtool-lock],
+    [avoid locking (might break parallel builds)])])
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+    [AC_LANG_PUSH(C)
+     AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+     AC_LANG_POP])
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD="${LD-ld}_sol2"
+        fi
+        ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+  [lt_cv_ar_at_file=no
+   AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+     [echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+      AC_TRY_EVAL([lt_ar_try])
+      if test "$ac_status" -eq 0; then
+	# Ensure the archiver fails upon bogus file names.
+	rm -f conftest.$ac_objext libconftest.a
+	AC_TRY_EVAL([lt_ar_try])
+	if test "$ac_status" -ne 0; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+     ])
+  ])
+
+if test "x$lt_cv_ar_at_file" = xno; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+  [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+    [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+    [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+    [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#		[OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$3"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       $2=yes
+     fi
+   fi
+   $RM conftest*
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$5], , :, [$5])
+else
+    m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#                  [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $3"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&AS_MESSAGE_LOG_FD
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         $2=yes
+       fi
+     else
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$4], , :, [$4])
+else
+    m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+  i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[	 ]]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+])
+if test -n $lt_cv_sys_max_cmd_len ; then
+  AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+  AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+    [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+#                      ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "$cross_compiling" = yes; then :
+  [$4]
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}]
+_LT_EOF
+  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) $1 ;;
+      x$lt_dlneed_uscore) $2 ;;
+      x$lt_dlunknown|x*) $3 ;;
+    esac
+  else :
+    # compilation failed
+    $3
+  fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ])
+    ;;
+
+  *)
+    AC_CHECK_FUNC([shl_load],
+	  [lt_cv_dlopen="shl_load"],
+      [AC_CHECK_LIB([dld], [shl_load],
+	    [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
+	[AC_CHECK_FUNC([dlopen],
+	      [lt_cv_dlopen="dlopen"],
+	  [AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
+	    [AC_CHECK_LIB([svld], [dlopen],
+		  [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
+	      [AC_CHECK_LIB([dld], [dld_link],
+		    [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
+	      ])
+	    ])
+	  ])
+	])
+      ])
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    AC_CACHE_CHECK([whether a program can dlopen itself],
+	  lt_cv_dlopen_self, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+	    lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+    ])
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+	  lt_cv_dlopen_self_static, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+	    lt_cv_dlopen_self_static=no,  lt_cv_dlopen_self_static=cross)
+      ])
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+	 [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+	 [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+	 [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+     fi
+   fi
+   chmod u+w . 2>&AS_MESSAGE_LOG_FD
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+	[Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links="nottested"
+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  AC_MSG_CHECKING([if we can lock with hard links])
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  AC_MSG_RESULT([$hard_links])
+  if test "$hard_links" = no; then
+    AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+         [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
+  [Define to the sub-directory in which libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+   test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+   test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
+     test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
+    # Linking always hardcodes the temporary library directory.
+    _LT_TAGVAR(hardcode_action, $1)=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    _LT_TAGVAR(hardcode_action, $1)=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
+   test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+    [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      AC_MSG_RESULT([yes])
+    else
+      AC_MSG_RESULT([no])
+    fi
+    ;;
+  *)
+    AC_MSG_RESULT([no])
+    ;;
+  esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+	[], [
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+  if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([[A-Za-z]]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[[4-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[[01]] | aix4.[[01]].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[[45]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+m4_if([$1], [],[
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+    library_names_spec='${libname}.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec="$LIB"
+      if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[[23]].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+  freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[[3-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux # correct to gnu/linux during the next big refactor
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+    [lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+	 LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+      [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+	 [lt_cv_shlibpath_overrides_runpath=yes])])
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+    ])
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[[89]] | openbsd2.[[89]].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+    [Variables whose values should be saved in libtool wrapper scripts and
+    restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+    [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0],  [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+    [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+    [[List of archive names.  First name is the real one, the rest are links.
+    The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+    [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+    [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+    [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+    [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+    [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+    [[As "finish_cmds", except a single script fragment to be evaled but
+    not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+    [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+    [Compile-time system search path for libraries])
+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
+    [Run-time system search path for libraries])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program which can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] |  ?:[\\/]*])
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word.  This closes a longstanding sh security hole.
+  ac_dummy="m4_if([$2], , $PATH, [$2])"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/$1; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/$1"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac])
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  AC_MSG_RESULT($MAGIC_CMD)
+else
+  AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+	 [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program which can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+  else
+    MAGIC_CMD=:
+  fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+    [AS_HELP_STRING([--with-gnu-ld],
+	[assume the C compiler uses GNU ld @<:@default=no@:>@])],
+    [test "$withval" = no || with_gnu_ld=yes],
+    [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  AC_MSG_CHECKING([for ld used by $CC])
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [[\\/]]* | ?:[[\\/]]*)
+      re_direlt='/[[^/]][[^/]]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  AC_MSG_CHECKING([for GNU ld])
+else
+  AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi])
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  AC_MSG_RESULT($LD)
+else
+  AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+#   -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+  lt_cv_ld_reload_flag,
+  [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test "$GCC" != yes; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[[45]]*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[[3-9]]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+    [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+    [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+    [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+    [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi])
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+  AC_SUBST([DUMPBIN])
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+  [lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+  cat conftest.out >&AS_MESSAGE_LOG_FD
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh
+  # decide which to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+  ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+    [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
+  [lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*])
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+  MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+  # These system don't have libm, or don't need it
+  ;;
+*-ncr-sysv4.3*)
+  AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
+  AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+  ;;
+*)
+  AC_CHECK_LIB(m, cos, LIBM="-lm")
+  ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+  esac
+
+  _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+    lt_cv_prog_compiler_rtti_exceptions,
+    [-fno-rtti -fno-exceptions], [],
+    [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+	[Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[[BCDT]]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[[ABCDGISTW]]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[[ABCDEGRST]]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[[BCDEGRST]]'
+  ;;
+osf*)
+  symcode='[[BCDEGQRST]]'
+  ;;
+solaris*)
+  symcode='[[BDRT]]'
+  ;;
+sco3.2v5*)
+  symcode='[[DT]]'
+  ;;
+sysv4.2uw2*)
+  symcode='[[DT]]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[[ABDT]]'
+  ;;
+sysv4)
+  symcode='[[DFNSTU]]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK ['"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx]"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[	 ]]\($symcode$symcode*\)[[	 ]][[	 ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT@&t@_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT@&t@_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_globsym_save_LIBS=$LIBS
+	  lt_globsym_save_CFLAGS=$CFLAGS
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+	  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS=$lt_globsym_save_LIBS
+	  CFLAGS=$lt_globsym_save_CFLAGS
+	else
+	  echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  AC_MSG_RESULT(failed)
+else
+  AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+    [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+    [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_c_name_address],
+    [lt_cv_sys_global_symbol_to_c_name_address], [1],
+    [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+    [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+    [Transform the output of nm in a C name address pair when lib prefix is needed])
+_LT_DECL([], [nm_file_list_spec], [1],
+    [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+  # C++ specific cases for pic, static, wl, etc.
+  if test "$GXX" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+    aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+    mingw* | cygwin* | os2* | pw32* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+    *djgpp*)
+      # DJGPP does not support shared libraries at all
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+      ;;
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+    *qnx* | *nto*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+  else
+    case $host_os in
+      aix[[4-9]]*)
+	# All AIX code is PIC.
+	if test "$host_cpu" = ia64; then
+	  # AIX 5 now supports IA64 processor
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	else
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+	fi
+	;;
+      chorus*)
+	case $cc_basename in
+	cxch68*)
+	  # Green Hills C++ Compiler
+	  # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+	  ;;
+	esac
+	;;
+      mingw* | cygwin* | os2* | pw32* | cegcc*)
+	# This hack is so that the source file can tell whether it is being
+	# built for inclusion in a dll (and should export symbols for example).
+	m4_if([$1], [GCJ], [],
+	  [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+	;;
+      dgux*)
+	case $cc_basename in
+	  ec++*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  ghcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      freebsd* | dragonfly*)
+	# FreeBSD uses GNU C++
+	;;
+      hpux9* | hpux10* | hpux11*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    if test "$host_cpu" != ia64; then
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	    fi
+	    ;;
+	  aCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    case $host_cpu in
+	    hppa*64*|ia64*)
+	      # +Z the default
+	      ;;
+	    *)
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	      ;;
+	    esac
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      interix*)
+	# This is c89, which is MS Visual C++ (no shared libs)
+	# Anyone wants to do a port?
+	;;
+      irix5* | irix6* | nonstopux*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    # CC pic flag -KPIC is the default.
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+	case $cc_basename in
+	  KCC*)
+	    # KAI C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    ;;
+	  ecpc* )
+	    # old Intel C++ for x86_64 which still supported -KPIC.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  icpc* )
+	    # Intel C++, used to be incompatible with GCC.
+	    # ICC 10 doesn't accept -KPIC any more.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  pgCC* | pgcpp*)
+	    # Portland Group C++ compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  cxx*)
+	    # Compaq C++
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+	    # IBM XL 8.0, 9.0 on PPC and BlueGene
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+      lynxos*)
+	;;
+      m88k*)
+	;;
+      mvs*)
+	case $cc_basename in
+	  cxx*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      netbsd*)
+	;;
+      *qnx* | *nto*)
+        # QNX uses GNU C++, but need to define -shared option too, otherwise
+        # it will coredump.
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+        ;;
+      osf3* | osf4* | osf5*)
+	case $cc_basename in
+	  KCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    ;;
+	  RCC*)
+	    # Rational C++ 2.4.1
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  cxx*)
+	    # Digital/Compaq C++
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      psos*)
+	;;
+      solaris*)
+	case $cc_basename in
+	  CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	    ;;
+	  gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sunos4*)
+	case $cc_basename in
+	  CC*)
+	    # Sun C++ 4.x
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  lcc*)
+	    # Lucid
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	esac
+	;;
+      tandem*)
+	case $cc_basename in
+	  NCC*)
+	    # NonStop-UX NCC 3.20
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      vxworks*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+	;;
+    esac
+  fi
+],
+[
+  if test "$GCC" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+      if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      else
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC (with -KPIC) is the default.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+	;;
+      nagfor*)
+	# NAG Fortran compiler
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      ccc*)
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+        # All Alpha code is PIC.
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+	  ;;
+	*Sun\ F* | *Sun*Fortran*)
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  ;;
+        *Intel*\ [[CF]]*Compiler*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	  ;;
+	*Portland\ Group*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # All OSF/1 code is PIC.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    rdos*)
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    unicos*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+    esac
+  fi
+])
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+    ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+    ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+  _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+    [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+    [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+    [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+     "" | " "*) ;;
+     *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+     esac],
+    [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+     _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+	[Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+	[How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+  _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+  $lt_tmp_static_flag,
+  [],
+  [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+	[Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  case $host_os in
+  aix[[4-9]]*)
+    # If we're using GNU nm, then we don't want the "-C" option.
+    # -C means demangle to AIX nm, but means don't demangle with GNU nm
+    # Also, AIX nm treats weak defined symbols like other global defined
+    # symbols, whereas GNU nm marks them as "W".
+    if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    else
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    fi
+    ;;
+  pw32*)
+    _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+    ;;
+  cygwin* | mingw* | cegcc*)
+    case $cc_basename in
+    cl*)
+      _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+      ;;
+    *)
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+      ;;
+    esac
+    ;;
+  *)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+    ;;
+  esac
+], [
+  runpath_var=
+  _LT_TAGVAR(allow_undefined_flag, $1)=
+  _LT_TAGVAR(always_export_symbols, $1)=no
+  _LT_TAGVAR(archive_cmds, $1)=
+  _LT_TAGVAR(archive_expsym_cmds, $1)=
+  _LT_TAGVAR(compiler_needs_object, $1)=no
+  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+  _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(hardcode_automatic, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+  _LT_TAGVAR(hardcode_minus_L, $1)=no
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  _LT_TAGVAR(inherit_rpath, $1)=no
+  _LT_TAGVAR(link_all_deplibs, $1)=unknown
+  _LT_TAGVAR(module_cmds, $1)=
+  _LT_TAGVAR(module_expsym_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+  _LT_TAGVAR(thread_safe_flag_spec, $1)=
+  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  _LT_TAGVAR(include_expsyms, $1)=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  _LT_TAGVAR(ld_shlibs, $1)=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+	  *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[[3-9]]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	_LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+      # as there is no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=no
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    haiku*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    interix[[3-9]]*)
+      _LT_TAGVAR(hardcode_direct, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=' $pic_flag'
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+	  tmp_sharedflag='--shared' ;;
+	xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	_LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+    esac
+
+    if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
+      runpath_var=
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	_LT_TAGVAR(hardcode_direct, $1)=unsupported
+      fi
+      ;;
+
+    aix[[4-9]]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      _LT_TAGVAR(archive_cmds, $1)=''
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[[012]]|aix4.[[012]].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	_LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX([$1])
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	  _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 _LT_SYS_MODULE_PATH_AIX([$1])
+	 _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	  fi
+	  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[[45]]*)
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+	# Native MSVC
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	_LT_TAGVAR(always_export_symbols, $1)=yes
+	_LT_TAGVAR(file_list_spec, $1)='@'
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	  else
+	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	  fi~
+	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	  linknames='
+	# The linker will not automatically build a static lib if we build a DLL.
+	# _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	_LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+	_LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+	# Don't use ranlib
+	_LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	_LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	  lt_tool_outputfile="@TOOL_OUTPUT@"~
+	  case $lt_outputfile in
+	    *.exe|*.EXE) ;;
+	    *)
+	      lt_outputfile="$lt_outputfile.exe"
+	      lt_tool_outputfile="$lt_tool_outputfile.exe"
+	      ;;
+	  esac~
+	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	    $RM "$lt_outputfile.manifest";
+	  fi'
+	;;
+      *)
+	# Assume MSVC wrapper
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+	# The linker will automatically build a .lib file if we build a DLL.
+	_LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	# FIXME: Should let the user specify the lib program.
+	_LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+      _LT_DARWIN_LINKER_FEATURES($1)
+      ;;
+
+    dgux*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	_LT_TAGVAR(hardcode_minus_L, $1)=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	m4_if($1, [], [
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  _LT_LINKER_OPTION([if $CC understands -b],
+	    _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+	    [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+	    [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+	  [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  ;;
+	*)
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+	# This should be the same for all languages, so no per-tag cache variable.
+	AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+	  [lt_cv_irix_exported_symbol],
+	  [save_LDFLAGS="$LDFLAGS"
+	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+	   AC_LINK_IFELSE(
+	     [AC_LANG_SOURCE(
+	        [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+			      [C++], [[int foo (void) { return 0; }]],
+			      [Fortran 77], [[
+      subroutine foo
+      end]],
+			      [Fortran], [[
+      subroutine foo
+      end]])])],
+	      [lt_cv_irix_exported_symbol=yes],
+	      [lt_cv_irix_exported_symbol=no])
+           LDFLAGS="$save_LDFLAGS"])
+	if test "$lt_cv_irix_exported_symbol" = yes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+	fi
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(inherit_rpath, $1)=yes
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
+	     _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	     ;;
+	   *)
+	     _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      case $host_os in
+      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+        ;;
+	motorola)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4.3*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	_LT_TAGVAR(ld_shlibs, $1)=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+    [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+  # Assume -lc should be added
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $_LT_TAGVAR(archive_cmds, $1) in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+	[lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+	[$RM conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+	  pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+	  _LT_TAGVAR(allow_undefined_flag, $1)=
+	  if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+	  then
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	  else
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  fi
+	  _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM conftest*
+	])
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+    [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+    [enable_shared_with_static_runtimes], [0],
+    [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+    [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+    [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+    [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+    [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+    [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+    [Commands used to build a loadable module if different from building
+    a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+    [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+    [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+    [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+    [Flag to hardcode $libdir into a binary during linking.
+    This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+    [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary and the resulting library dependency is
+    "absolute", i.e impossible to change by setting ${shlibpath_var} if the
+    library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+    [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+    [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+    [Set to "yes" if building a shared library automatically hardcodes DIR
+    into the library and all subsequent libraries and executables linked
+    against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+    [Set to yes if linker adds runtime paths of dependent libraries
+    to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+    [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+    [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+    [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+    [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+    [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+    [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+    [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+    [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl    [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC="$CC"
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_SYS_DYNAMIC_LINKER($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+  LT_SYS_DLOPEN_SELF
+  _LT_CMD_STRIPLIB
+
+  # Report which library types will actually be built
+  AC_MSG_CHECKING([if libtool supports shared libraries])
+  AC_MSG_RESULT([$can_build_shared])
+
+  AC_MSG_CHECKING([whether to build shared libraries])
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[[4-9]]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  AC_MSG_RESULT([$enable_shared])
+
+  AC_MSG_CHECKING([whether to build static libraries])
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  AC_MSG_RESULT([$enable_static])
+
+  _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC="$lt_save_CC"
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+    ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+    (test "X$CXX" != "Xg++"))) ; then
+  AC_PROG_CXXCPP
+else
+  _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_caught_CXX_error" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="int some_variable = 0;"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_CFLAGS=$CFLAGS
+  lt_save_LD=$LD
+  lt_save_GCC=$GCC
+  GCC=$GXX
+  lt_save_with_gnu_ld=$with_gnu_ld
+  lt_save_path_LD=$lt_cv_path_LD
+  if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+    lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+  else
+    $as_unset lt_cv_prog_gnu_ld
+  fi
+  if test -n "${lt_cv_path_LDCXX+set}"; then
+    lt_cv_path_LD=$lt_cv_path_LDCXX
+  else
+    $as_unset lt_cv_path_LD
+  fi
+  test -z "${LDCXX+set}" || LD=$LDCXX
+  CC=${CXX-"c++"}
+  CFLAGS=$CXXFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    # We don't want -fno-exception when compiling C++ code, so set the
+    # no_builtin_flag separately
+    if test "$GXX" = yes; then
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+    else
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+    fi
+
+    if test "$GXX" = yes; then
+      # Set up default GNU C++ configuration
+
+      LT_PATH_LD
+
+      # Check if GNU C++ uses GNU ld as the underlying linker, since the
+      # archiving commands below assume that GNU ld is being used.
+      if test "$with_gnu_ld" = yes; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+        # If archive_cmds runs LD, not CC, wlarc should be empty
+        # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+        #     investigate it a little bit more. (MM)
+        wlarc='${wl}'
+
+        # ancient GNU ld didn't support --whole-archive et. al.
+        if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+	  $GREP 'no-whole-archive' > /dev/null; then
+          _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+        else
+          _LT_TAGVAR(whole_archive_flag_spec, $1)=
+        fi
+      else
+        with_gnu_ld=no
+        wlarc=
+
+        # A generic and very simple default shared library creation
+        # command for GNU C++ for the case where it uses the native
+        # linker, instead of GNU ld.  If possible, this setting should
+        # overridden to take advantage of the native linker features on
+        # the platform it is being used on.
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+      fi
+
+      # Commands to make compiler produce verbose output that lists
+      # what "hidden" libraries, object files and flags are used when
+      # linking a shared library.
+      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+    else
+      GXX=no
+      with_gnu_ld=no
+      wlarc=
+    fi
+
+    # PORTME: fill in a description of your system's C++ link characteristics
+    AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+    _LT_TAGVAR(ld_shlibs, $1)=yes
+    case $host_os in
+      aix3*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+      aix[[4-9]]*)
+        if test "$host_cpu" = ia64; then
+          # On IA64, the linker does run time linking by default, so we don't
+          # have to do anything special.
+          aix_use_runtimelinking=no
+          exp_sym_flag='-Bexport'
+          no_entry_flag=""
+        else
+          aix_use_runtimelinking=no
+
+          # Test if we are trying to use run time linking or normal
+          # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+          # need to do runtime linking.
+          case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	    for ld_flag in $LDFLAGS; do
+	      case $ld_flag in
+	      *-brtl*)
+	        aix_use_runtimelinking=yes
+	        break
+	        ;;
+	      esac
+	    done
+	    ;;
+          esac
+
+          exp_sym_flag='-bexport'
+          no_entry_flag='-bnoentry'
+        fi
+
+        # When large executables or shared objects are built, AIX ld can
+        # have problems creating the table of contents.  If linking a library
+        # or program results in "error TOC overflow" add -mminimal-toc to
+        # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+        # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+        _LT_TAGVAR(archive_cmds, $1)=''
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+        if test "$GXX" = yes; then
+          case $host_os in aix4.[[012]]|aix4.[[012]].*)
+          # We only want to do this on AIX 4.2 and lower, the check
+          # below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	     strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	    # We have reworked collect2
+	    :
+	  else
+	    # We have old collect2
+	    _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	    # It fails to find uninstalled libraries when the uninstalled
+	    # path is not listed in the libpath.  Setting hardcode_minus_L
+	    # to unsupported forces relinking
+	    _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+          esac
+          shared_flag='-shared'
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag="$shared_flag "'${wl}-G'
+	  fi
+        else
+          # not using gcc
+          if test "$host_cpu" = ia64; then
+	  # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	  # chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+          else
+	    if test "$aix_use_runtimelinking" = yes; then
+	      shared_flag='${wl}-G'
+	    else
+	      shared_flag='${wl}-bM:SRE'
+	    fi
+          fi
+        fi
+
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+        # It seems that -bexpall does not export symbols beginning with
+        # underscore (_), so it is better to generate a list of symbols to
+	# export.
+        _LT_TAGVAR(always_export_symbols, $1)=yes
+        if test "$aix_use_runtimelinking" = yes; then
+          # Warning - without using the other runtime loading flags (-brtl),
+          # -berok will link without error, but may produce a broken library.
+          _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+          # Determine the default libpath from the value encoded in an empty
+          # executable.
+          _LT_SYS_MODULE_PATH_AIX([$1])
+          _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+        else
+          if test "$host_cpu" = ia64; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	    _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+          else
+	    # Determine the default libpath from the value encoded in an
+	    # empty executable.
+	    _LT_SYS_MODULE_PATH_AIX([$1])
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	    # Warning - without using the other run time loading flags,
+	    # -berok will link without error, but may produce a broken library.
+	    _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	    _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	    if test "$with_gnu_ld" = yes; then
+	      # We only use this code for GNU lds that support --whole-archive.
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    else
+	      # Exported symbols can be pulled into shared objects from archives
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	    fi
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	    # This is similar to how AIX traditionally builds its shared
+	    # libraries.
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+          fi
+        fi
+        ;;
+
+      beos*)
+	if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	  # support --undefined.  This deserves some investigation.  FIXME
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      chorus*)
+        case $cc_basename in
+          *)
+	  # FIXME: insert proper C++ library support
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	  ;;
+        esac
+        ;;
+
+      cygwin* | mingw* | pw32* | cegcc*)
+	case $GXX,$cc_basename in
+	,cl* | no,cl*)
+	  # Native MSVC
+	  # hardcode_libdir_flag_spec is actually meaningless, as there is
+	  # no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=yes
+	  _LT_TAGVAR(file_list_spec, $1)='@'
+	  # Tell ltmain to make .lib files, not .a files.
+	  libext=lib
+	  # Tell ltmain to make .dll files, not .so files.
+	  shrext_cmds=".dll"
+	  # FIXME: Setting linknames here is a bad hack.
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	    else
+	      $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	    fi~
+	    $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	    linknames='
+	  # The linker will not automatically build a static lib if we build a DLL.
+	  # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	  # Don't use ranlib
+	  _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	  _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	    lt_tool_outputfile="@TOOL_OUTPUT@"~
+	    case $lt_outputfile in
+	      *.exe|*.EXE) ;;
+	      *)
+		lt_outputfile="$lt_outputfile.exe"
+		lt_tool_outputfile="$lt_tool_outputfile.exe"
+		;;
+	    esac~
+	    func_to_tool_file "$lt_outputfile"~
+	    if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	      $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	      $RM "$lt_outputfile.manifest";
+	    fi'
+	  ;;
+	*)
+	  # g++
+	  # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+	  # as there is no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=no
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+	  if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	    # If the export-symbols file already is a .def file (1st line
+	    # is EXPORTS), use it as is; otherwise, prepend...
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      cp $export_symbols $output_objdir/$soname.def;
+	    else
+	      echo EXPORTS > $output_objdir/$soname.def;
+	      cat $export_symbols >> $output_objdir/$soname.def;
+	    fi~
+	    $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	  ;;
+	esac
+	;;
+      darwin* | rhapsody*)
+        _LT_DARWIN_LINKER_FEATURES($1)
+	;;
+
+      dgux*)
+        case $cc_basename in
+          ec++*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          ghcx*)
+	    # Green Hills C++ Compiler
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      freebsd2.*)
+        # C++ shared libraries reported to be fairly broken before
+	# switch to ELF
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      freebsd-elf*)
+        _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+        ;;
+
+      freebsd* | dragonfly*)
+        # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+        # conventions
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+        ;;
+
+      gnu*)
+        ;;
+
+      haiku*)
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        ;;
+
+      hpux9*)
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+				             # but as the default
+				             # location of the library.
+
+        case $cc_basename in
+          CC*)
+            # FIXME: insert proper C++ library support
+            _LT_TAGVAR(ld_shlibs, $1)=no
+            ;;
+          aCC*)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            # Commands to make compiler produce verbose output that lists
+            # what "hidden" libraries, object files and flags are used when
+            # linking a shared library.
+            #
+            # There doesn't appear to be a way to prevent this compiler from
+            # explicitly linking system object files so we need to strip them
+            # from the output so that they don't get included in the library
+            # dependencies.
+            output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+            ;;
+          *)
+            if test "$GXX" = yes; then
+              _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            else
+              # FIXME: insert proper C++ library support
+              _LT_TAGVAR(ld_shlibs, $1)=no
+            fi
+            ;;
+        esac
+        ;;
+
+      hpux10*|hpux11*)
+        if test $with_gnu_ld = no; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+          case $host_cpu in
+            hppa*64*|ia64*)
+              ;;
+            *)
+	      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+              ;;
+          esac
+        fi
+        case $host_cpu in
+          hppa*64*|ia64*)
+            _LT_TAGVAR(hardcode_direct, $1)=no
+            _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+            ;;
+          *)
+            _LT_TAGVAR(hardcode_direct, $1)=yes
+            _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+					         # but as the default
+					         # location of the library.
+            ;;
+        esac
+
+        case $cc_basename in
+          CC*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          aCC*)
+	    case $host_cpu in
+	      hppa*64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      ia64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      *)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	    esac
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test $with_gnu_ld = no; then
+	        case $host_cpu in
+	          hppa*64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          ia64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          *)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	        esac
+	      fi
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      interix[[3-9]]*)
+	_LT_TAGVAR(hardcode_direct, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+	# Instead, shared libraries are loaded at an image base (0x10000000 by
+	# default) and relocated if they conflict, which is a slow very memory
+	# consuming and fragmenting process.  To avoid this, we pick a random,
+	# 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+	# time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	;;
+      irix5* | irix6*)
+        case $cc_basename in
+          CC*)
+	    # SGI C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -ar", where "CC" is the IRIX C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test "$with_gnu_ld" = no; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	      else
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+	      fi
+	    fi
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+	    ;;
+        esac
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(inherit_rpath, $1)=yes
+        ;;
+
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+	    ;;
+	  icpc* | ecpc* )
+	    # Intel C++
+	    with_gnu_ld=yes
+	    # version 8.0 and above of icpc choke on multiply defined symbols
+	    # if we add $predep_objects and $postdep_objects, however 7.1 and
+	    # earlier do not add the objects themselves.
+	    case `$CC -V 2>&1` in
+	      *"Version 7."*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	      *)  # Version 8.0 or newer
+	        tmp_idyn=
+	        case $host_cpu in
+		  ia64*) tmp_idyn=' -i_dynamic';;
+		esac
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	    esac
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    ;;
+          pgCC* | pgcpp*)
+            # Portland Group C++ compiler
+	    case `$CC -V` in
+	    *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+	      _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+		compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+	      _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+		$AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+		$RANLIB $oldlib'
+	      _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    *) # Version 6 and above use weak symbols
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+            ;;
+	  cxx*)
+	    # Compaq C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname  -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
+
+	    runpath_var=LD_RUN_PATH
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+	    ;;
+	  xl* | mpixl* | bgxl*)
+	    # IBM XL 8.0 on PPC, with GNU ld
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    if test "x$supports_anon_versioning" = xyes; then
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+		cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+		echo "local: *; };" >> $output_objdir/$libname.ver~
+		$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+	    fi
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	      _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+	      # Not sure whether something based on
+	      # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+	      # would be better.
+	      output_verbose_link_cmd='func_echo_all'
+
+	      # Archives containing C++ object files must be created using
+	      # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	      # necessary to make sure instantiated templates are included
+	      # in the archive.
+	      _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+
+      lynxos*)
+        # FIXME: insert proper C++ library support
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      m88k*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      mvs*)
+        case $cc_basename in
+          cxx*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	  *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	esac
+	;;
+
+      netbsd*)
+        if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable  -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+	  wlarc=
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	fi
+	# Workaround some broken pre-1.5 toolchains
+	output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+	;;
+
+      *nto* | *qnx*)
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+	;;
+
+      openbsd2*)
+        # C++ shared libraries are fairly broken
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      openbsd*)
+	if test -f /usr/libexec/ld.so; then
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+	  fi
+	  output_verbose_link_cmd=func_echo_all
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      osf3* | osf4* | osf5*)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Archives containing C++ object files must be created using
+	    # the KAI C++ compiler.
+	    case $host in
+	      osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+	      *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+	    esac
+	    ;;
+          RCC*)
+	    # Rational C++ 2.4.1
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          cxx*)
+	    case $host in
+	      osf3*)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+		;;
+	      *)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+	          echo "-hidden">> $lib.exp~
+	          $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp  `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~
+	          $RM $lib.exp'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+		;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+	  *)
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	      case $host in
+	        osf3*)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	        *)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	      esac
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	      # Commands to make compiler produce verbose output that lists
+	      # what "hidden" libraries, object files and flags are used when
+	      # linking a shared library.
+	      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      psos*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      sunos4*)
+        case $cc_basename in
+          CC*)
+	    # Sun C++ 4.x
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          lcc*)
+	    # Lucid
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      solaris*)
+        case $cc_basename in
+          CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+            _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+	    _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag}  -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	      $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	    _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	    case $host_os in
+	      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+	      *)
+		# The compiler driver will combine and reorder linker options,
+		# but understands `-z linker_flag'.
+	        # Supported since Solaris 2.6 (maybe 2.5.1?)
+		_LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	        ;;
+	    esac
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+	    output_verbose_link_cmd='func_echo_all'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	    ;;
+          gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+
+	    # The C++ compiler must be used to create the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    # GNU C++ compiler with Solaris linker
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+	      if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      else
+	        # g++ 2.7 appears to require `-G' NOT `-shared' on this
+	        # platform.
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      fi
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
+	      case $host_os in
+		solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+		*)
+		  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+		  ;;
+	      esac
+	    fi
+	    ;;
+        esac
+        ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      case $cc_basename in
+        CC*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+      esac
+      ;;
+
+      sysv5* | sco3.2v5* | sco5v6*)
+	# Note: We can NOT use -z defs as we might desire, because we do not
+	# link with -lc, and that would cause any symbols used from libc to
+	# always be unresolved, which means just about no library would
+	# ever link correctly.  If we're not using GNU ld we use -z text
+	# though, which does catch some bad symbols but isn't as heavy-handed
+	# as -z defs.
+	_LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+	_LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+	_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+	_LT_TAGVAR(link_all_deplibs, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+	runpath_var='LD_RUN_PATH'
+
+	case $cc_basename in
+          CC*)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+	      '"$_LT_TAGVAR(old_archive_cmds, $1)"
+	    _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+	      '"$_LT_TAGVAR(reload_cmds, $1)"
+	    ;;
+	  *)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    ;;
+	esac
+      ;;
+
+      tandem*)
+        case $cc_basename in
+          NCC*)
+	    # NonStop-UX NCC 3.20
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      vxworks*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      *)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+    esac
+
+    AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+    test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+    _LT_TAGVAR(GCC, $1)="$GXX"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+  LDCXX=$LD
+  LD=$lt_save_LD
+  GCC=$lt_save_GCC
+  with_gnu_ld=$lt_save_with_gnu_ld
+  lt_cv_path_LDCXX=$lt_cv_path_LD
+  lt_cv_path_LD=$lt_save_path_LD
+  lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+  lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test "$_lt_caught_CXX_error" != yes
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+  case ${2} in
+  .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+  *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+  esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library.  It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+  Foo (void) { a = 0; }
+private:
+  int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer*4 a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+  private int a;
+  public void bar (void) {
+    a = 0;
+  }
+};
+_LT_EOF
+], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF
+package foo
+func foo() {
+}
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+  # Parse the compiler output and extract the necessary
+  # objects, libraries and library flags.
+
+  # Sentinel used to keep track of whether or not we are before
+  # the conftest object file.
+  pre_test_object_deps_done=no
+
+  for p in `eval "$output_verbose_link_cmd"`; do
+    case ${prev}${p} in
+
+    -L* | -R* | -l*)
+       # Some compilers place space between "-{L,R}" and the path.
+       # Remove the space.
+       if test $p = "-L" ||
+          test $p = "-R"; then
+	 prev=$p
+	 continue
+       fi
+
+       # Expand the sysroot to ease extracting the directories later.
+       if test -z "$prev"; then
+         case $p in
+         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
+         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
+         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
+         esac
+       fi
+       case $p in
+       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
+       esac
+       if test "$pre_test_object_deps_done" = no; then
+	 case ${prev} in
+	 -L | -R)
+	   # Internal compiler library paths should come after those
+	   # provided the user.  The postdeps already come after the
+	   # user supplied libs so there is no need to process them.
+	   if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
+	   else
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
+	   fi
+	   ;;
+	 # The "-l" case would never come before the object being
+	 # linked, so don't bother handling this case.
+	 esac
+       else
+	 if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+	   _LT_TAGVAR(postdeps, $1)="${prev}${p}"
+	 else
+	   _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+	 fi
+       fi
+       prev=
+       ;;
+
+    *.lto.$objext) ;; # Ignore GCC LTO objects
+    *.$objext)
+       # This assumes that the test object file only shows up
+       # once in the compiler output.
+       if test "$p" = "conftest.$objext"; then
+	 pre_test_object_deps_done=yes
+	 continue
+       fi
+
+       if test "$pre_test_object_deps_done" = no; then
+	 if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+	   _LT_TAGVAR(predep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+	 fi
+       else
+	 if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+	   _LT_TAGVAR(postdep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+	 fi
+       fi
+       ;;
+
+    *) ;; # Ignore the rest.
+
+    esac
+  done
+
+  # Clean up.
+  rm -f a.out a.exe
+else
+  echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+  # Interix 3.5 installs completely hosed .la files for C++, so rather than
+  # hack all around it, let's just trust "g++" to DTRT.
+  _LT_TAGVAR(predep_objects,$1)=
+  _LT_TAGVAR(postdep_objects,$1)=
+  _LT_TAGVAR(postdeps,$1)=
+  ;;
+
+linux*)
+  case `$CC -V 2>&1 | sed 5q` in
+  *Sun\ C*)
+    # Sun C++ 5.9
+
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+
+solaris*)
+  case $cc_basename in
+  CC* | sunCC*)
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    # Adding this requires a known-good setup of shared libraries for
+    # Sun compiler versions before 5.6, else PIC objects from an old
+    # archive will be linked into the output, leading to subtle bugs.
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+    [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+    [Dependencies to place before and after the objects being linked to
+    create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+    [The library search path used internally by the compiler when linking
+    a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test "X$F77" = "Xno"; then
+  _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_F77" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${F77-"f77"}
+  CFLAGS=$FFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+  GCC=$G77
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$G77"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC="$lt_save_CC"
+  CFLAGS="$lt_save_CFLAGS"
+fi # test "$_lt_disable_F77" != yes
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test "X$FC" = "Xno"; then
+  _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_FC" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${FC-"f95"}
+  CFLAGS=$FCFLAGS
+  compiler=$CC
+  GCC=$ac_cv_fc_compiler_gnu
+
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+fi # test "$_lt_disable_FC" != yes
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_GO_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Go compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GO_CONFIG],
+[AC_REQUIRE([LT_PROG_GO])dnl
+AC_LANG_SAVE
+
+# Source file extension for Go test sources.
+ac_ext=go
+
+# Object file extension for compiled Go test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="package main; func main() { }"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='package main; func main() { }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GOC-"gccgo"}
+CFLAGS=$GOFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# Go did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GO_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code="$lt_simple_compile_test_code"
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+  :
+  _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+  [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+    [AC_CHECK_TOOL(GCJ, gcj,)
+      test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
+      AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_GO
+# ----------
+AC_DEFUN([LT_PROG_GO],
+[AC_CHECK_TOOL(GOC, gccgo,)
+])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+    [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_SED.  When it is available in   #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for lt_ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+        lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+      fi
+    done
+  done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+  test ! -f $lt_ac_sed && continue
+  cat /dev/null > conftest.in
+  lt_ac_count=0
+  echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+  # Check for GNU sed and select it if it is found.
+  if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+    lt_cv_path_SED=$lt_ac_sed
+    break
+  fi
+  while true; do
+    cat conftest.in conftest.in >conftest.tmp
+    mv conftest.tmp conftest.in
+    cp conftest.in conftest.nl
+    echo >>conftest.nl
+    $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+    cmp -s conftest.out conftest.nl || break
+    # 10000 chars as input seems more than enough
+    test $lt_ac_count -gt 10 && break
+    lt_ac_count=`expr $lt_ac_count + 1`
+    if test $lt_ac_count -gt $lt_ac_max; then
+      lt_ac_max=$lt_ac_count
+      lt_cv_path_SED=$lt_ac_sed
+    fi
+  done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,b/c, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+AC_MSG_RESULT([$xsi_shell])
+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
+
+AC_MSG_CHECKING([whether the shell understands "+="])
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+AC_MSG_RESULT([$lt_shell_append])
+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
+# ------------------------------------------------------
+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
+m4_defun([_LT_PROG_FUNCTION_REPLACE],
+[dnl {
+sed -e '/^$1 ()$/,/^} # $1 /c\
+$1 ()\
+{\
+m4_bpatsubsts([$2], [$], [\\], [^\([	 ]\)], [\\\1])
+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+])
+
+
+# _LT_PROG_REPLACE_SHELLFNS
+# -------------------------
+# Replace existing portable implementations of several shell functions with
+# equivalent extended shell implementations where those features are available..
+m4_defun([_LT_PROG_REPLACE_SHELLFNS],
+[if test x"$xsi_shell" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_xform], [    func_xform_result=${1%.*}.lo])
+
+  _LT_PROG_FUNCTION_REPLACE([func_arith], [    func_arith_result=$(( $[*] ))])
+
+  _LT_PROG_FUNCTION_REPLACE([func_len], [    func_len_result=${#1}])
+fi
+
+if test x"$lt_shell_append" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_append], [    eval "${1}+=\\${2}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
+    func_quote_for_eval "${2}"
+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
+    eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
+
+  # Save a `func_append' function call where possible by direct use of '+='
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+else
+  # Save a `func_append' function call even when '+=' is not available
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+  AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
+fi
+])
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine which file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path).  These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+         [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+         [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
+
+# Helper functions for option handling.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
+#   Inc.
+#   Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 7 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it.  Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+        _LT_MANGLE_DEFUN([$1], [$2]),
+    [m4_warning([Unknown $1 option `$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+	    [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+		      [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME.  If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+    [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+  dnl
+  dnl Simply set some default values (i.e off) if boolean options were not
+  dnl specified:
+  _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+  ])
+  _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+  ])
+  dnl
+  dnl If no reference was made to various pairs of opposing options, then
+  dnl we run the default mode handler for the pair.  For example, if neither
+  dnl `shared' nor `disable-shared' was passed, we enable building of shared
+  dnl archives by default:
+  _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+  _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+  		   [_LT_ENABLE_FAST_INSTALL])
+  ])
+])# _LT_SET_OPTIONS
+
+
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  AC_CHECK_TOOL(AS, as, false)
+  AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+  AC_CHECK_TOOL(OBJDUMP, objdump, false)
+  ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS],      [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the `shared' and
+# `disable-shared' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+    [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+	[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+    _LT_DECL([build_libtool_libs], [enable_shared], [0],
+	[Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the `static' and
+# `disable-static' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+    [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+	[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+    _LT_DECL([build_old_libs], [enable_static], [0],
+	[Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the `fast-install'
+# and `disable-fast-install' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+    [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+    [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+	 [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the `pic-only' and `no-pic'
+# LT_INIT options.
+# MODE is either `yes' or `no'.  If omitted, it defaults to `both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+    [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
+	[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+    [lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for lt_pkg in $withval; do
+	IFS="$lt_save_ifs"
+	if test "X$lt_pkg" = "X$lt_p"; then
+	  pic_mode=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [pic_mode=default])
+
+test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+		 [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+		 [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+		 [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+		 [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+		 [m4_define([_LTDL_TYPE], [convenience])])
+
+# ltsugar.m4 -- libtool m4 base layer.                         -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59 which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+	   m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn.  Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+       [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+	     [m4_foreach([_Lt_suffix],
+		]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+	[_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+	  [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+		 [lt_append([$1], [$2], [$3])$4],
+		 [$5])],
+	  [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+	m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+    m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+	[$5],
+    [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+  [lt_join(m4_quote(m4_default([$4], [[, ]])),
+           lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+		      [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
+
+# ltversion.m4 -- version numbers			-*- Autoconf -*-
+#
+#   Copyright (C) 2004 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 3337 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4.2])
+m4_define([LT_PACKAGE_REVISION], [1.3337])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4.2'
+macro_revision='1.3337'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
+
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions.    -*-Autoconf-*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick.  It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else.  This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. 
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION],	[AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP],		[AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT],		[AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX],	[AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN],		[AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR],		[AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL],	[AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN],		[AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER],	[AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK],		[AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE],	[AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF],	[AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O],	[AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR],		[AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR],		[AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP],	[AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC],		[AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU],		[AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG],	[AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD],	[AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS],	[AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP],	[AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP],		[AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED],		[AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME],		[AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE],	[AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE],	[AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL],		[AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP],		[AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN],		[AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER],	[AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG],		[AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL],	[AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX],		[AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77],		[AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ],		[AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG],	[AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG],	[AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG],	[AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG],	[AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG],	[AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG],		[AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C],	[AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS],	[AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP],		[AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS],	[AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77],		[AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC],		[AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX],		[AC_DEFUN([_LT_PROG_CXX])])
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-debug configuration option to the package and controls
+#      whether the package will be built for debug instances of programs
+#      and libraries.
+#
+
+#
+# NL_ENABLE_DEBUG(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-debug configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and controls
+# whether the package will be built with or without -DDEBUG enabled.
+#
+# The value 'nl_cv_build_debug' will be set to the result. In
+# addition, the contents of CFLAGS, CXXFLAGS, OBJCFLAGS, and
+# OBJCXXFLAGS may be altered by the use of this macro, adding -DDEBUG
+# if this option is asserted.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_DEBUG],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build debug instances of programs and libraries],
+        nl_cv_build_debug,
+        [
+            AC_ARG_ENABLE(debug,
+                [AS_HELP_STRING([--enable-debug],[Enable the generation of debug instances @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_debug=${enableval}
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-debug])
+                        ;;
+
+                    esac
+                ],
+                [
+                    nl_cv_build_debug=$1
+                ])
+
+            if test "${nl_cv_build_debug}" = "yes"; then
+                CFLAGS="${CFLAGS} -DDEBUG"
+                CXXFLAGS="${CXXFLAGS} -DDEBUG"
+                OBJCFLAGS="${OBJCFLAGS} -DDEBUG"
+                OBJCXXFLAGS="${OBJCXXFLAGS} -DDEBUG"
+            fi
+    ])
+])
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --disable-docs configuration option to the package and controls
+#      whether the package will be built with or without documentation.
+#
+
+#
+# NL_ENABLE_DOCS(default, dot_default)
+#
+#   default     - Whether the option should be automatic (auto), enabled
+#                 (yes), disabled (no) by default.
+#   dot_default - Whether Doxygen should use (YES) or not use (NO)
+#                 GraphViz dot.
+#
+# Adds an --disable-docs configuration option to the package with a
+# default value of 'default' (should be 'auto', 'no' or 'yes') and
+# controls whether the package will be built with or without Doxygen-based
+# documentation.
+#
+# The value 'nl_cv_build_docs' will be set to the result. In addition:
+#
+#   DOXYGEN         - Will be set to the path of the Doxygen executable.
+#   DOT             - Will be set to the path of the GraphViz dot
+#                     executable.
+#   DOXYGEN_USE_DOT - Will be set to 'NO' or 'YES' depending on whether
+#                     GraphViz dot is available.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_DOCS],
+[
+    # Check whether or not the 'default' value is sane.
+
+    m4_case([$1],
+        [auto],[],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'auto', 'yes' or 'no'])])
+
+    # Check whether or not the 'dot_default' value is sane.
+
+    m4_case([$2],
+        [YES],[],
+        [NO],[],
+        [m4_fatal([$0: invalid default value '$2'; must be 'YES' or 'NO'])])
+
+    DOXYGEN_USE_DOT=$2
+
+    AC_ARG_VAR(DOXYGEN, [Doxygen executable])
+    AC_ARG_VAR(DOT,     [GraphViz 'dot' executable, which may be used, when present, to generate Doxygen class graphs])
+
+    AC_PATH_PROG(DOXYGEN, doxygen)
+    AC_PATH_PROG(DOT, dot)
+
+    AC_CACHE_CHECK([whether to build documentation],
+        nl_cv_build_docs,
+        [
+	    AC_ARG_ENABLE(docs,
+		[AS_HELP_STRING([--disable-docs],[Enable building documentation (requires Doxygen) @<:@default=$1@:>@.])],
+		[
+		    case "${enableval}" in 
+
+		    auto|no|yes)
+			nl_cv_build_docs=${enableval}
+			;;
+
+		    *)
+			AC_MSG_ERROR([Invalid value ${enableval} for --disable-docs])
+			;;
+
+		    esac
+		],
+		[nl_cv_build_docs=$1])
+
+	    if test "x${DOXYGEN}" != "x"; then
+		nl_cv_have_doxygen=yes
+	    else
+		nl_cv_have_doxygen=no
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "auto"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    nl_cv_build_docs=no
+		else
+		    nl_cv_build_docs=yes
+		fi
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "yes"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    AC_MSG_ERROR([Building docs was explicitly requested but Doxygen cannot be found])
+		elif test "${nl_cv_have_doxygen}" = "yes"; then
+		    if test "x${DOT}" != "x"; then
+			DOXYGEN_USE_DOT=YES
+		    fi
+		fi
+	    fi
+    ])
+
+    AC_SUBST(DOXYGEN_USE_DOT)
+])
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-optimization configuration option to the package and
+#      controls whether the package will be built with or without code
+#      optimization.
+#
+
+#
+# NL_ENABLE_OPTIMIZATION(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-optimization configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and controls
+# whether the package will be built with or without code optimization.
+#
+# The value 'nl_cv_build_optimized' will be set to the result. In
+# addition, the contents of CFLAGS, CXXFLAGS, OBJCFLAGS, and OBJCXXFLAGS may
+# be altered by the use of this macro, converting -O<something> to -O0.
+#
+# NOTE: The behavior of this is influenced by nl_cv_build_coverage from
+#       NL_ENABLE_COVERAGE
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_OPTIMIZATION],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build code-optimized instances of programs and libraries],
+        nl_cv_build_optimized,
+        [
+            AC_ARG_ENABLE(optimization,
+                [AS_HELP_STRING([--enable-optimization],[Enable the generation of code-optimized instances @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_optimized=${enableval}
+
+                        if test "${nl_cv_build_coverage}" = "yes" && test "${nl_cv_build_optimized}" = "yes"; then
+                            AC_MSG_ERROR([both --enable-optimization and --enable-coverage cannot used. Please, choose one or the other to enable.])
+                        fi
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-optimized])
+                        ;;
+
+                    esac
+                ],
+                [
+                    if test "${nl_cv_build_coverage}" = "yes"; then
+                        AC_MSG_WARN([--enable-coverage was specified, optimization disabled])
+                        nl_cv_build_optimized=no
+            
+                    else
+                        nl_cv_build_optimized=$1
+            
+                    fi
+                ])
+
+            if test "${nl_cv_build_optimized}" = "no"; then
+                CFLAGS="`echo ${CFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                CXXFLAGS="`echo ${CXXFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                OBJCFLAGS="`echo ${OBJCFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                OBJCXXFLAGS="`echo ${OBJCXXFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+            fi
+    ])
+])
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro for filtering
+#      the autoconf canonical build, host, or target.
+#
+#      Mac OS X / Darwin ends up putting some versioning cruft on the
+#      end of its tuples that most users of these variables rarely
+#      care about.
+#
+
+#
+# _NL_FILTERED_CANONICAL(name)
+#
+#   name - The existing autoconf variable to filter
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'name'
+#   generated from AC_CANONICAL_<NAME> and saves it in
+#   'nl_filtered_<name>'.
+#
+_NL_FILTERED_CANONICAL(name)
+AC_DEFUN([_NL_FILTERED_CANONICAL],
+[
+    AC_CACHE_CHECK([filtered $1 system type],
+        nl_cv_filtered_$1,
+        nl_cv_filtered_$1=`echo ${$1} | sed -e 's/[[[[:digit:].]]]*$//g'`
+        nl_filtered_$1=${nl_cv_filtered_$1})
+])
+
+#
+# NL_FILTERED_CANONICAL_BUILD
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'build'
+#   generated from AC_CANONICAL_BUILD and saves it in
+#   'nl_filtered_build'.
+#
+NL_FILTERED_CANONICAL_BUILD
+AC_DEFUN([NL_FILTERED_CANONICAL_BUILD],
+[
+    AC_REQUIRE([AC_CANONICAL_BUILD])
+    _NL_FILTERED_CANONICAL(build)
+])
+
+#
+# NL_FILTERED_CANONICAL_HOST
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'host'
+#   generated from AC_CANONICAL_HOST and saves it in
+#   'nl_filtered_build'.
+#
+NL_FILTERED_CANONICAL_HOST
+AC_DEFUN([NL_FILTERED_CANONICAL_HOST],
+[
+    AC_REQUIRE([AC_CANONICAL_HOST])
+    _NL_FILTERED_CANONICAL(host)
+])
+
+#
+# NL_FILTERED_CANONICAL_TARGET
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'target'
+#   generated from AC_CANONICAL_TARGET and saves it in
+#   'nl_filtered_target'.
+#
+NL_FILTERED_CANONICAL_TARGET
+AC_DEFUN([NL_FILTERED_CANONICAL_TARGET],
+[
+    AC_REQUIRE([AC_CANONICAL_TARGET])
+    _NL_FILTERED_CANONICAL(target)
+])
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines GNU autoconf M4-style macros that ensure the
+#      -Werror compiler option for GCC-based or -compatible compilers
+#      do not break some autoconf tests (see
+#      http://lists.gnu.org/archive/html/autoconf-patches/2008-09/msg00014.html).
+#
+#      If -Werror has been passed transform it into -Wno-error for
+#      CPPFLAGS, CFLAGS, CXXFLAGS, OBJCFLAGS, and OBJCXXFLAGS with
+#      NL_SAVE_WERROR. Transform them back again with
+#      NL_RESTORE_WERROR.
+#
+
+# 
+# _NL_SAVE_WERROR_FOR_VAR(variable)
+#
+#   variable - The compiler flags variable to scan for the presence of
+#              -Werror and, if present, transform to -Wno-error.
+#
+# This transforms, for the specified compiler flags variable, -Werror
+# to -Wno-error, if it was it present. The original state may be
+# restored by invoking _NL_RESTORE_WERROR_FOR_VAR([variable]).
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([_NL_SAVE_WERROR_FOR_VAR],
+[
+    if echo "${$1}" | grep -q '\-Werror'; then
+	$1="`echo ${$1} | sed -e 's,-Werror\([[[:space:]]]\),-Wno-error\1,g'`"
+	nl_had_$1_werror=yes
+    else
+	nl_had_$1_werror=no
+    fi
+])
+
+#
+# _NL_RESTORE_WERROR_FOR_VAR(variable)
+#
+#   variable - The compiler flag for which to restore -Wno-error back
+#              to -Werror if it was originally passed in by the user as
+#              such.
+#
+# This restores, for the specified compiler flags variable, -Werror
+# from -Wno-error, if it was initially set as -Werror at the time
+# _NL_SAVE_WERROR_FOR_VAR([variable]) was invoked.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([_NL_RESTORE_WERROR_FOR_VAR],
+[
+    if test "${nl_had_$1_werror}" = "yes"; then
+	$1="`echo ${$1} | sed -e 's,-Wno-error\([[[:space:]]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_$1_werror
+])
+
+# 
+# NL_SAVE_WERROR
+#
+# This transforms, for each of CFLAGS, CXXFLAGS, OBJCFLAGS, and
+# OBJCXXFLAGS, -Werror to -Wno-error, if it was it present. The
+# original state may be restored by invoking NL_RESTORE_WERROR.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_SAVE_WERROR],
+[
+    _NL_SAVE_WERROR_FOR_VAR([CPPFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([CFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([CXXFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([OBJCFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([OBJCXXFLAGS])
+])
+
+#
+# NL_RESTORE_WERROR
+#
+# This restores, for each of OBJCXXFLAGS, OBJCFLAGS, CXXFLAGS, and
+# CFLAGS, -Werror from -Wno-error, if it was initially set as -Werror
+# at the time NL_SAVE_WERROR was invoked.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_RESTORE_WERROR],
+[
+    _NL_RESTORE_WERROR_FOR_VAR([OBJCXXFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([OBJCFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CXXFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CPPFLAGS])
+])
+
diff --git a/nl-unit-test/bootstrap b/nl-unit-test/bootstrap
new file mode 100755
index 0000000..37bcdaa
--- /dev/null
+++ b/nl-unit-test/bootstrap
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a trampoline script to the nlbuild-autotools
+#      bootstrap script and augments it by providing the path to the
+#      nlbuild-autotools repository for this project.
+#
+
+# Set this to the relative location of nlbuild-autotools to this script
+
+nlbuild_autotools_stem="third_party/nlbuild-autotools/repo"
+
+# Establish some key directories
+
+srcdir=`dirname ${0}`
+abs_srcdir=`pwd`
+abs_top_srcdir="${abs_srcdir}"
+
+exec ${srcdir}/${nlbuild_autotools_stem}/scripts/bootstrap -I "${abs_top_srcdir}/${nlbuild_autotools_stem}" $*
diff --git a/nl-unit-test/bootstrap-configure b/nl-unit-test/bootstrap-configure
new file mode 120000
index 0000000..627efaa
--- /dev/null
+++ b/nl-unit-test/bootstrap-configure
@@ -0,0 +1 @@
+third_party/nlbuild-autotools/repo/scripts/bootstrap-configure
\ No newline at end of file
diff --git a/nl-unit-test/configure b/nl-unit-test/configure
new file mode 100755
index 0000000..d43d42b
--- /dev/null
+++ b/nl-unit-test/configure
@@ -0,0 +1,16354 @@
+#! /bin/sh
+# Guess values for system-dependent variables and create Makefiles.
+# Generated by GNU Autoconf 2.68 for nlunittest g899ce0d.
+#
+# Report bugs to <developer@nest.com>.
+#
+#
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+#
+#
+# This configure script is free software; the Free Software Foundation
+# gives unlimited permission to copy, distribute and modify it.
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested="  as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
+  as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
+  eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
+  test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
+
+  test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\
+      || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1
+test \$(( 1 + 1 )) = 2 || exit 1"
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  break 2
+fi
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org and developer@nest.com
+$0: about your system, including any error possibly output
+$0: before this message. Then install a modern shell, or
+$0: manually run the script under such a shell if you do
+$0: have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+
+  as_lineno_1=$LINENO as_lineno_1a=$LINENO
+  as_lineno_2=$LINENO as_lineno_2a=$LINENO
+  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
+  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
+  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
+  sed -n '
+    p
+    /[$]LINENO/=
+  ' <$as_myself |
+    sed '
+      s/[$]LINENO.*/&-/
+      t lineno
+      b
+      :lineno
+      N
+      :loop
+      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
+      t loop
+      s/-\n.*//
+    ' >$as_me.lineno &&
+  chmod +x "$as_me.lineno" ||
+    { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
+
+  # Don't try to exec as it changes $[0], causing all sort of problems
+  # (the dirname of $[0] is not the place where we might find the
+  # original and so on.  Autoconf is especially sensitive to this).
+  . "./$as_me.lineno"
+  # Exit status is that of the last command.
+  exit
+}
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+SHELL=${CONFIG_SHELL-/bin/sh}
+
+
+test -n "$DJDIR" || exec 7<&0 </dev/null
+exec 6>&1
+
+# Name of the host.
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# so uname gets run too.
+ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
+
+#
+# Initializations.
+#
+ac_default_prefix=/usr/local
+ac_clean_files=
+ac_config_libobj_dir=.
+LIBOBJS=
+cross_compiling=no
+subdirs=
+MFLAGS=
+MAKEFLAGS=
+
+# Identity of this package.
+PACKAGE_NAME='nlunittest'
+PACKAGE_TARNAME='nlunittest'
+PACKAGE_VERSION='g899ce0d'
+PACKAGE_STRING='nlunittest g899ce0d'
+PACKAGE_BUGREPORT='developer@nest.com'
+PACKAGE_URL='https://developer.nest.com/'
+
+ac_unique_file="src/nltest.h"
+ac_no_link=no
+# Factoring default headers for most tests.
+ac_includes_default="\
+#include <stdio.h>
+#ifdef HAVE_SYS_TYPES_H
+# include <sys/types.h>
+#endif
+#ifdef HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_STRING_H
+# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
+#  include <memory.h>
+# endif
+# include <string.h>
+#endif
+#ifdef HAVE_STRINGS_H
+# include <strings.h>
+#endif
+#ifdef HAVE_INTTYPES_H
+# include <inttypes.h>
+#endif
+#ifdef HAVE_STDINT_H
+# include <stdint.h>
+#endif
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif"
+
+ac_subst_vars='am__EXEEXT_FALSE
+am__EXEEXT_TRUE
+LTLIBOBJS
+LIBOBJS
+NLUNITTEST_BUILD_DOCS_FALSE
+NLUNITTEST_BUILD_DOCS_TRUE
+DOXYGEN_USE_DOT
+DOT
+DOXYGEN
+NLUNITTEST_BUILD_OPTIMIZED_FALSE
+NLUNITTEST_BUILD_OPTIMIZED_TRUE
+NLUNITTEST_BUILD_DEBUG_FALSE
+NLUNITTEST_BUILD_DEBUG_TRUE
+OTOOL64
+OTOOL
+LIPO
+NMEDIT
+DSYMUTIL
+MANIFEST_TOOL
+ac_ct_AR
+DLLTOOL
+OBJDUMP
+NM
+ac_ct_DUMPBIN
+DUMPBIN
+LD
+FGREP
+EGREP
+GREP
+SED
+LIBTOOL
+PERL
+CMP
+LN_S
+OBJCOPY
+RANLIB
+AR
+CPP
+am__fastdepCC_FALSE
+am__fastdepCC_TRUE
+CCDEPMODE
+am__nodep
+AMDEPBACKSLASH
+AMDEP_FALSE
+AMDEP_TRUE
+am__quote
+am__include
+DEPDIR
+OBJEXT
+EXEEXT
+ac_ct_CC
+CPPFLAGS
+LDFLAGS
+CFLAGS
+CC
+MAINT
+MAINTAINER_MODE_FALSE
+MAINTAINER_MODE_TRUE
+AM_BACKSLASH
+AM_DEFAULT_VERBOSITY
+AM_DEFAULT_V
+AM_V
+am__untar
+am__tar
+AMTAR
+am__leading_dot
+SET_MAKE
+AWK
+mkdir_p
+MKDIR_P
+INSTALL_STRIP_PROGRAM
+STRIP
+install_sh
+MAKEINFO
+AUTOHEADER
+AUTOMAKE
+AUTOCONF
+ACLOCAL
+VERSION
+PACKAGE
+CYGPATH_W
+am__isrc
+INSTALL_DATA
+INSTALL_SCRIPT
+INSTALL_PROGRAM
+target_os
+target_vendor
+target_cpu
+target
+host_os
+host_vendor
+host_cpu
+host
+build_os
+build_vendor
+build_cpu
+build
+LIBNLUNITTEST_VERSION_INFO
+LIBNLUNITTEST_VERSION_REVISION
+LIBNLUNITTEST_VERSION_AGE
+LIBNLUNITTEST_VERSION_CURRENT
+abs_top_nlbuild_autotools_dir
+nlbuild_autotools_stem
+target_alias
+host_alias
+build_alias
+LIBS
+ECHO_T
+ECHO_N
+ECHO_C
+DEFS
+mandir
+localedir
+libdir
+psdir
+pdfdir
+dvidir
+htmldir
+infodir
+docdir
+oldincludedir
+includedir
+localstatedir
+sharedstatedir
+sysconfdir
+datadir
+datarootdir
+libexecdir
+sbindir
+bindir
+program_transform_name
+prefix
+exec_prefix
+PACKAGE_URL
+PACKAGE_BUGREPORT
+PACKAGE_STRING
+PACKAGE_VERSION
+PACKAGE_TARNAME
+PACKAGE_NAME
+PATH_SEPARATOR
+SHELL'
+ac_subst_files=''
+ac_user_opts='
+enable_option_checking
+enable_silent_rules
+enable_maintainer_mode
+enable_dependency_tracking
+enable_shared
+enable_static
+with_pic
+enable_fast_install
+with_gnu_ld
+with_sysroot
+enable_libtool_lock
+enable_debug
+enable_optimization
+enable_docs
+'
+      ac_precious_vars='build_alias
+host_alias
+target_alias
+CC
+CFLAGS
+LDFLAGS
+LIBS
+CPPFLAGS
+CPP
+DOXYGEN
+DOT'
+
+
+# Initialize some variables set by options.
+ac_init_help=
+ac_init_version=false
+ac_unrecognized_opts=
+ac_unrecognized_sep=
+# The variables have the same names as the options, with
+# dashes changed to underlines.
+cache_file=/dev/null
+exec_prefix=NONE
+no_create=
+no_recursion=
+prefix=NONE
+program_prefix=NONE
+program_suffix=NONE
+program_transform_name=s,x,x,
+silent=
+site=
+srcdir=
+verbose=
+x_includes=NONE
+x_libraries=NONE
+
+# Installation directory options.
+# These are left unexpanded so users can "make install exec_prefix=/foo"
+# and all the variables that are supposed to be based on exec_prefix
+# by default will actually change.
+# Use braces instead of parens because sh, perl, etc. also accept them.
+# (The list follows the same order as the GNU Coding Standards.)
+bindir='${exec_prefix}/bin'
+sbindir='${exec_prefix}/sbin'
+libexecdir='${exec_prefix}/libexec'
+datarootdir='${prefix}/share'
+datadir='${datarootdir}'
+sysconfdir='${prefix}/etc'
+sharedstatedir='${prefix}/com'
+localstatedir='${prefix}/var'
+includedir='${prefix}/include'
+oldincludedir='/usr/include'
+docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
+infodir='${datarootdir}/info'
+htmldir='${docdir}'
+dvidir='${docdir}'
+pdfdir='${docdir}'
+psdir='${docdir}'
+libdir='${exec_prefix}/lib'
+localedir='${datarootdir}/locale'
+mandir='${datarootdir}/man'
+
+ac_prev=
+ac_dashdash=
+for ac_option
+do
+  # If the previous option needs an argument, assign it.
+  if test -n "$ac_prev"; then
+    eval $ac_prev=\$ac_option
+    ac_prev=
+    continue
+  fi
+
+  case $ac_option in
+  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+  *=)   ac_optarg= ;;
+  *)    ac_optarg=yes ;;
+  esac
+
+  # Accept the important Cygnus configure options, so we can diagnose typos.
+
+  case $ac_dashdash$ac_option in
+  --)
+    ac_dashdash=yes ;;
+
+  -bindir | --bindir | --bindi | --bind | --bin | --bi)
+    ac_prev=bindir ;;
+  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
+    bindir=$ac_optarg ;;
+
+  -build | --build | --buil | --bui | --bu)
+    ac_prev=build_alias ;;
+  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
+    build_alias=$ac_optarg ;;
+
+  -cache-file | --cache-file | --cache-fil | --cache-fi \
+  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+    ac_prev=cache_file ;;
+  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
+    cache_file=$ac_optarg ;;
+
+  --config-cache | -C)
+    cache_file=config.cache ;;
+
+  -datadir | --datadir | --datadi | --datad)
+    ac_prev=datadir ;;
+  -datadir=* | --datadir=* | --datadi=* | --datad=*)
+    datadir=$ac_optarg ;;
+
+  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
+  | --dataroo | --dataro | --datar)
+    ac_prev=datarootdir ;;
+  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
+  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
+    datarootdir=$ac_optarg ;;
+
+  -disable-* | --disable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=no ;;
+
+  -docdir | --docdir | --docdi | --doc | --do)
+    ac_prev=docdir ;;
+  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
+    docdir=$ac_optarg ;;
+
+  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
+    ac_prev=dvidir ;;
+  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
+    dvidir=$ac_optarg ;;
+
+  -enable-* | --enable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=\$ac_optarg ;;
+
+  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
+  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
+  | --exec | --exe | --ex)
+    ac_prev=exec_prefix ;;
+  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
+  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
+  | --exec=* | --exe=* | --ex=*)
+    exec_prefix=$ac_optarg ;;
+
+  -gas | --gas | --ga | --g)
+    # Obsolete; use --with-gas.
+    with_gas=yes ;;
+
+  -help | --help | --hel | --he | -h)
+    ac_init_help=long ;;
+  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
+    ac_init_help=recursive ;;
+  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
+    ac_init_help=short ;;
+
+  -host | --host | --hos | --ho)
+    ac_prev=host_alias ;;
+  -host=* | --host=* | --hos=* | --ho=*)
+    host_alias=$ac_optarg ;;
+
+  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
+    ac_prev=htmldir ;;
+  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
+  | --ht=*)
+    htmldir=$ac_optarg ;;
+
+  -includedir | --includedir | --includedi | --included | --include \
+  | --includ | --inclu | --incl | --inc)
+    ac_prev=includedir ;;
+  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
+  | --includ=* | --inclu=* | --incl=* | --inc=*)
+    includedir=$ac_optarg ;;
+
+  -infodir | --infodir | --infodi | --infod | --info | --inf)
+    ac_prev=infodir ;;
+  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
+    infodir=$ac_optarg ;;
+
+  -libdir | --libdir | --libdi | --libd)
+    ac_prev=libdir ;;
+  -libdir=* | --libdir=* | --libdi=* | --libd=*)
+    libdir=$ac_optarg ;;
+
+  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
+  | --libexe | --libex | --libe)
+    ac_prev=libexecdir ;;
+  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
+  | --libexe=* | --libex=* | --libe=*)
+    libexecdir=$ac_optarg ;;
+
+  -localedir | --localedir | --localedi | --localed | --locale)
+    ac_prev=localedir ;;
+  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
+    localedir=$ac_optarg ;;
+
+  -localstatedir | --localstatedir | --localstatedi | --localstated \
+  | --localstate | --localstat | --localsta | --localst | --locals)
+    ac_prev=localstatedir ;;
+  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
+  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
+    localstatedir=$ac_optarg ;;
+
+  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
+    ac_prev=mandir ;;
+  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
+    mandir=$ac_optarg ;;
+
+  -nfp | --nfp | --nf)
+    # Obsolete; use --without-fp.
+    with_fp=no ;;
+
+  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
+  | --no-cr | --no-c | -n)
+    no_create=yes ;;
+
+  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
+  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
+    no_recursion=yes ;;
+
+  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
+  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
+  | --oldin | --oldi | --old | --ol | --o)
+    ac_prev=oldincludedir ;;
+  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
+  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
+  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
+    oldincludedir=$ac_optarg ;;
+
+  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+    ac_prev=prefix ;;
+  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+    prefix=$ac_optarg ;;
+
+  -program-prefix | --program-prefix | --program-prefi | --program-pref \
+  | --program-pre | --program-pr | --program-p)
+    ac_prev=program_prefix ;;
+  -program-prefix=* | --program-prefix=* | --program-prefi=* \
+  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
+    program_prefix=$ac_optarg ;;
+
+  -program-suffix | --program-suffix | --program-suffi | --program-suff \
+  | --program-suf | --program-su | --program-s)
+    ac_prev=program_suffix ;;
+  -program-suffix=* | --program-suffix=* | --program-suffi=* \
+  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
+    program_suffix=$ac_optarg ;;
+
+  -program-transform-name | --program-transform-name \
+  | --program-transform-nam | --program-transform-na \
+  | --program-transform-n | --program-transform- \
+  | --program-transform | --program-transfor \
+  | --program-transfo | --program-transf \
+  | --program-trans | --program-tran \
+  | --progr-tra | --program-tr | --program-t)
+    ac_prev=program_transform_name ;;
+  -program-transform-name=* | --program-transform-name=* \
+  | --program-transform-nam=* | --program-transform-na=* \
+  | --program-transform-n=* | --program-transform-=* \
+  | --program-transform=* | --program-transfor=* \
+  | --program-transfo=* | --program-transf=* \
+  | --program-trans=* | --program-tran=* \
+  | --progr-tra=* | --program-tr=* | --program-t=*)
+    program_transform_name=$ac_optarg ;;
+
+  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
+    ac_prev=pdfdir ;;
+  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
+    pdfdir=$ac_optarg ;;
+
+  -psdir | --psdir | --psdi | --psd | --ps)
+    ac_prev=psdir ;;
+  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
+    psdir=$ac_optarg ;;
+
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil)
+    silent=yes ;;
+
+  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
+    ac_prev=sbindir ;;
+  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
+  | --sbi=* | --sb=*)
+    sbindir=$ac_optarg ;;
+
+  -sharedstatedir | --sharedstatedir | --sharedstatedi \
+  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
+  | --sharedst | --shareds | --shared | --share | --shar \
+  | --sha | --sh)
+    ac_prev=sharedstatedir ;;
+  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
+  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
+  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
+  | --sha=* | --sh=*)
+    sharedstatedir=$ac_optarg ;;
+
+  -site | --site | --sit)
+    ac_prev=site ;;
+  -site=* | --site=* | --sit=*)
+    site=$ac_optarg ;;
+
+  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+    ac_prev=srcdir ;;
+  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+    srcdir=$ac_optarg ;;
+
+  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
+  | --syscon | --sysco | --sysc | --sys | --sy)
+    ac_prev=sysconfdir ;;
+  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
+  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
+    sysconfdir=$ac_optarg ;;
+
+  -target | --target | --targe | --targ | --tar | --ta | --t)
+    ac_prev=target_alias ;;
+  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
+    target_alias=$ac_optarg ;;
+
+  -v | -verbose | --verbose | --verbos | --verbo | --verb)
+    verbose=yes ;;
+
+  -version | --version | --versio | --versi | --vers | -V)
+    ac_init_version=: ;;
+
+  -with-* | --with-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=\$ac_optarg ;;
+
+  -without-* | --without-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=no ;;
+
+  --x)
+    # Obsolete; use --with-x.
+    with_x=yes ;;
+
+  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
+  | --x-incl | --x-inc | --x-in | --x-i)
+    ac_prev=x_includes ;;
+  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
+  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
+    x_includes=$ac_optarg ;;
+
+  -x-libraries | --x-libraries | --x-librarie | --x-librari \
+  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
+    ac_prev=x_libraries ;;
+  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
+  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
+    x_libraries=$ac_optarg ;;
+
+  -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
+    ;;
+
+  *=*)
+    ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
+    # Reject names that are not valid shell variable names.
+    case $ac_envvar in #(
+      '' | [0-9]* | *[!_$as_cr_alnum]* )
+      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
+    esac
+    eval $ac_envvar=\$ac_optarg
+    export $ac_envvar ;;
+
+  *)
+    # FIXME: should be removed in autoconf 3.0.
+    $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
+    expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
+      $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
+    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+    ;;
+
+  esac
+done
+
+if test -n "$ac_prev"; then
+  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
+  as_fn_error $? "missing argument to $ac_option"
+fi
+
+if test -n "$ac_unrecognized_opts"; then
+  case $enable_option_checking in
+    no) ;;
+    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
+    *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
+  esac
+fi
+
+# Check all directory arguments for consistency.
+for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
+		datadir sysconfdir sharedstatedir localstatedir includedir \
+		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
+		libdir localedir mandir
+do
+  eval ac_val=\$$ac_var
+  # Remove trailing slashes.
+  case $ac_val in
+    */ )
+      ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
+      eval $ac_var=\$ac_val;;
+  esac
+  # Be sure to have absolute directory names.
+  case $ac_val in
+    [\\/$]* | ?:[\\/]* )  continue;;
+    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
+  esac
+  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
+done
+
+# There might be people who depend on the old broken behavior: `$host'
+# used to hold the argument of --host etc.
+# FIXME: To remove some day.
+build=$build_alias
+host=$host_alias
+target=$target_alias
+
+# FIXME: To remove some day.
+if test "x$host_alias" != x; then
+  if test "x$build_alias" = x; then
+    cross_compiling=maybe
+    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used" >&2
+  elif test "x$build_alias" != "x$host_alias"; then
+    cross_compiling=yes
+  fi
+fi
+
+ac_tool_prefix=
+test -n "$host_alias" && ac_tool_prefix=$host_alias-
+
+test "$silent" = yes && exec 6>/dev/null
+
+
+ac_pwd=`pwd` && test -n "$ac_pwd" &&
+ac_ls_di=`ls -di .` &&
+ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
+  as_fn_error $? "working directory cannot be determined"
+test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
+  as_fn_error $? "pwd does not report name of working directory"
+
+
+# Find the source files, if location was not specified.
+if test -z "$srcdir"; then
+  ac_srcdir_defaulted=yes
+  # Try the directory containing this script, then the parent directory.
+  ac_confdir=`$as_dirname -- "$as_myself" ||
+$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_myself" : 'X\(//\)[^/]' \| \
+	 X"$as_myself" : 'X\(//\)$' \| \
+	 X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_myself" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  srcdir=$ac_confdir
+  if test ! -r "$srcdir/$ac_unique_file"; then
+    srcdir=..
+  fi
+else
+  ac_srcdir_defaulted=no
+fi
+if test ! -r "$srcdir/$ac_unique_file"; then
+  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
+  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
+fi
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
+ac_abs_confdir=`(
+	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
+	pwd)`
+# When building in place, set srcdir=.
+if test "$ac_abs_confdir" = "$ac_pwd"; then
+  srcdir=.
+fi
+# Remove unnecessary trailing slashes from srcdir.
+# Double slashes in file names in object file debugging info
+# mess up M-x gdb in Emacs.
+case $srcdir in
+*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
+esac
+for ac_var in $ac_precious_vars; do
+  eval ac_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_env_${ac_var}_value=\$${ac_var}
+  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_cv_env_${ac_var}_value=\$${ac_var}
+done
+
+#
+# Report the --help message.
+#
+if test "$ac_init_help" = "long"; then
+  # Omit some internal or obsolete options to make the list less imposing.
+  # This message is too long to be a string in the A/UX 3.1 sh.
+  cat <<_ACEOF
+\`configure' configures nlunittest g899ce0d to adapt to many kinds of systems.
+
+Usage: $0 [OPTION]... [VAR=VALUE]...
+
+To assign environment variables (e.g., CC, CFLAGS...), specify them as
+VAR=VALUE.  See below for descriptions of some of the useful variables.
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+  -h, --help              display this help and exit
+      --help=short        display options specific to this package
+      --help=recursive    display the short help of all the included packages
+  -V, --version           display version information and exit
+  -q, --quiet, --silent   do not print \`checking ...' messages
+      --cache-file=FILE   cache test results in FILE [disabled]
+  -C, --config-cache      alias for \`--cache-file=config.cache'
+  -n, --no-create         do not create output files
+      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
+
+Installation directories:
+  --prefix=PREFIX         install architecture-independent files in PREFIX
+                          [$ac_default_prefix]
+  --exec-prefix=EPREFIX   install architecture-dependent files in EPREFIX
+                          [PREFIX]
+
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
+
+For better control, use the options below.
+
+Fine tuning of the installation directories:
+  --bindir=DIR            user executables [EPREFIX/bin]
+  --sbindir=DIR           system admin executables [EPREFIX/sbin]
+  --libexecdir=DIR        program executables [EPREFIX/libexec]
+  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
+  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
+  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
+  --libdir=DIR            object code libraries [EPREFIX/lib]
+  --includedir=DIR        C header files [PREFIX/include]
+  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
+  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
+  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
+  --infodir=DIR           info documentation [DATAROOTDIR/info]
+  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
+  --mandir=DIR            man documentation [DATAROOTDIR/man]
+  --docdir=DIR            documentation root [DATAROOTDIR/doc/nlunittest]
+  --htmldir=DIR           html documentation [DOCDIR]
+  --dvidir=DIR            dvi documentation [DOCDIR]
+  --pdfdir=DIR            pdf documentation [DOCDIR]
+  --psdir=DIR             ps documentation [DOCDIR]
+_ACEOF
+
+  cat <<\_ACEOF
+
+Program names:
+  --program-prefix=PREFIX            prepend PREFIX to installed program names
+  --program-suffix=SUFFIX            append SUFFIX to installed program names
+  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
+
+System types:
+  --build=BUILD     configure for building on BUILD [guessed]
+  --host=HOST       cross-compile to build programs to run on HOST [BUILD]
+  --target=TARGET   configure for building compilers for TARGET [HOST]
+_ACEOF
+fi
+
+if test -n "$ac_init_help"; then
+  case $ac_init_help in
+     short | recursive ) echo "Configuration of nlunittest g899ce0d:";;
+   esac
+  cat <<\_ACEOF
+
+Optional Features:
+  --disable-option-checking  ignore unrecognized --enable/--with options
+  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
+  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
+  --enable-silent-rules   less verbose build output (undo: "make V=1")
+  --disable-silent-rules  verbose build output (undo: "make V=0")
+  --enable-maintainer-mode
+                          enable make rules and dependencies not useful (and
+                          sometimes confusing) to the casual installer
+  --enable-dependency-tracking
+                          do not reject slow dependency extractors
+  --disable-dependency-tracking
+                          speeds up one-time build
+  --enable-shared[=PKGS]  build shared libraries [default=yes]
+  --enable-static[=PKGS]  build static libraries [default=yes]
+  --enable-fast-install[=PKGS]
+                          optimize for fast installation [default=yes]
+  --disable-libtool-lock  avoid locking (might break parallel builds)
+  --enable-debug          Enable the generation of debug instances
+                          [default=no].
+  --enable-optimization   Enable the generation of code-optimized instances
+                          [default=yes].
+  --disable-docs          Enable building documentation (requires Doxygen)
+                          [default=auto].
+
+Optional Packages:
+  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
+  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
+  --with-pic[=PKGS]       try to use only PIC/non-PIC objects [default=use
+                          both]
+  --with-gnu-ld           assume the C compiler uses GNU ld [default=no]
+  --with-sysroot=DIR Search for dependent libraries within DIR
+                        (or the compiler's sysroot if not specified).
+
+Some influential environment variables:
+  CC          C compiler command
+  CFLAGS      C compiler flags
+  LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries in a
+              nonstandard directory <lib dir>
+  LIBS        libraries to pass to the linker, e.g. -l<library>
+  CPPFLAGS    (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
+              you have headers in a nonstandard directory <include dir>
+  CPP         C preprocessor
+  DOXYGEN     Doxygen executable
+  DOT         GraphViz 'dot' executable, which may be used, when present, to
+              generate Doxygen class graphs
+
+Use these variables to override the choices made by `configure' or to help
+it to find libraries and programs with nonstandard names/locations.
+
+Report bugs to <developer@nest.com>.
+nlunittest home page: <https://developer.nest.com/>.
+_ACEOF
+ac_status=$?
+fi
+
+if test "$ac_init_help" = "recursive"; then
+  # If there are subdirs, report their specific --help.
+  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
+    test -d "$ac_dir" ||
+      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
+      continue
+    ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+    cd "$ac_dir" || { ac_status=$?; continue; }
+    # Check for guested configure.
+    if test -f "$ac_srcdir/configure.gnu"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
+    elif test -f "$ac_srcdir/configure"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure" --help=recursive
+    else
+      $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
+    fi || ac_status=$?
+    cd "$ac_pwd" || { ac_status=$?; break; }
+  done
+fi
+
+test -n "$ac_init_help" && exit $ac_status
+if $ac_init_version; then
+  cat <<\_ACEOF
+nlunittest configure g899ce0d
+generated by GNU Autoconf 2.68
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This configure script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it.
+_ACEOF
+  exit
+fi
+
+## ------------------------ ##
+## Autoconf initialization. ##
+## ------------------------ ##
+
+# ac_fn_c_try_compile LINENO
+# --------------------------
+# Try to compile conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext
+  if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest.$ac_objext; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_compile
+
+# ac_fn_c_try_cpp LINENO
+# ----------------------
+# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_cpp ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_cpp conftest.$ac_ext"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } > conftest.i && {
+	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+    ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_cpp
+
+# ac_fn_c_try_link LINENO
+# -----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_link ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext conftest$ac_exeext
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest$ac_exeext && {
+	 test "$cross_compiling" = yes ||
+	 $as_test_x conftest$ac_exeext
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+  # interfere with the next link command; also delete a directory that is
+  # left behind by Apple's compiler.  We do this before executing the actions.
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_link
+
+# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
+# -------------------------------------------------------
+# Tests whether HEADER exists and can be compiled using the include files in
+# INCLUDES, setting the cache variable VAR accordingly.
+ac_fn_c_check_header_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+#include <$2>
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_header_compile
+
+# ac_fn_c_try_run LINENO
+# ----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
+# that executables *can* be run.
+ac_fn_c_try_run ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: program exited with status $ac_status" >&5
+       $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+       ac_retval=$ac_status
+fi
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_run
+
+# ac_fn_c_check_func LINENO FUNC VAR
+# ----------------------------------
+# Tests whether FUNC exists, setting the cache variable VAR accordingly
+ac_fn_c_check_func ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
+   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
+#define $2 innocuous_$2
+
+/* System header to define __stub macros and hopefully few prototypes,
+    which can conflict with char $2 (); below.
+    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+    <limits.h> exists even on freestanding compilers.  */
+
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+
+#undef $2
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char $2 ();
+/* The GNU C library defines this for functions which it implements
+    to always fail with ENOSYS.  Some functions are actually named
+    something starting with __ and the normal name is an alias.  */
+#if defined __stub_$2 || defined __stub___$2
+choke me
+#endif
+
+int
+main ()
+{
+return $2 ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_func
+
+# ac_fn_c_check_type LINENO TYPE VAR INCLUDES
+# -------------------------------------------
+# Tests whether TYPE exists after having included INCLUDES, setting cache
+# variable VAR accordingly.
+ac_fn_c_check_type ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  eval "$3=no"
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+int
+main ()
+{
+if (sizeof ($2))
+	 return 0;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+int
+main ()
+{
+if (sizeof (($2)))
+	    return 0;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  eval "$3=yes"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_type
+cat >config.log <<_ACEOF
+This file contains any messages produced by compilers while
+running configure, to aid debugging if configure makes a mistake.
+
+It was created by nlunittest $as_me g899ce0d, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  $ $0 $@
+
+_ACEOF
+exec 5>>config.log
+{
+cat <<_ASUNAME
+## --------- ##
+## Platform. ##
+## --------- ##
+
+hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
+
+/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
+/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
+/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
+/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
+
+_ASUNAME
+
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    $as_echo "PATH: $as_dir"
+  done
+IFS=$as_save_IFS
+
+} >&5
+
+cat >&5 <<_ACEOF
+
+
+## ----------- ##
+## Core tests. ##
+## ----------- ##
+
+_ACEOF
+
+
+# Keep a trace of the command line.
+# Strip out --no-create and --no-recursion so they do not pile up.
+# Strip out --silent because we don't want to record it for future runs.
+# Also quote any args containing shell meta-characters.
+# Make two passes to allow for proper duplicate-argument suppression.
+ac_configure_args=
+ac_configure_args0=
+ac_configure_args1=
+ac_must_keep_next=false
+for ac_pass in 1 2
+do
+  for ac_arg
+  do
+    case $ac_arg in
+    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
+    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+    | -silent | --silent | --silen | --sile | --sil)
+      continue ;;
+    *\'*)
+      ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    case $ac_pass in
+    1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
+    2)
+      as_fn_append ac_configure_args1 " '$ac_arg'"
+      if test $ac_must_keep_next = true; then
+	ac_must_keep_next=false # Got value, back to normal.
+      else
+	case $ac_arg in
+	  *=* | --config-cache | -C | -disable-* | --disable-* \
+	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
+	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
+	  | -with-* | --with-* | -without-* | --without-* | --x)
+	    case "$ac_configure_args0 " in
+	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
+	    esac
+	    ;;
+	  -* ) ac_must_keep_next=true ;;
+	esac
+      fi
+      as_fn_append ac_configure_args " '$ac_arg'"
+      ;;
+    esac
+  done
+done
+{ ac_configure_args0=; unset ac_configure_args0;}
+{ ac_configure_args1=; unset ac_configure_args1;}
+
+# When interrupted or exit'd, cleanup temporary files, and complete
+# config.log.  We remove comments because anyway the quotes in there
+# would cause problems or look ugly.
+# WARNING: Use '\'' to represent an apostrophe within the trap.
+# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
+trap 'exit_status=$?
+  # Save into config.log some information that might help in debugging.
+  {
+    echo
+
+    $as_echo "## ---------------- ##
+## Cache variables. ##
+## ---------------- ##"
+    echo
+    # The following way of writing the cache mishandles newlines in values,
+(
+  for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+  (set) 2>&1 |
+    case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      sed -n \
+	"s/'\''/'\''\\\\'\'''\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
+      ;; #(
+    *)
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+)
+    echo
+
+    $as_echo "## ----------------- ##
+## Output variables. ##
+## ----------------- ##"
+    echo
+    for ac_var in $ac_subst_vars
+    do
+      eval ac_val=\$$ac_var
+      case $ac_val in
+      *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+      esac
+      $as_echo "$ac_var='\''$ac_val'\''"
+    done | sort
+    echo
+
+    if test -n "$ac_subst_files"; then
+      $as_echo "## ------------------- ##
+## File substitutions. ##
+## ------------------- ##"
+      echo
+      for ac_var in $ac_subst_files
+      do
+	eval ac_val=\$$ac_var
+	case $ac_val in
+	*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+	esac
+	$as_echo "$ac_var='\''$ac_val'\''"
+      done | sort
+      echo
+    fi
+
+    if test -s confdefs.h; then
+      $as_echo "## ----------- ##
+## confdefs.h. ##
+## ----------- ##"
+      echo
+      cat confdefs.h
+      echo
+    fi
+    test "$ac_signal" != 0 &&
+      $as_echo "$as_me: caught signal $ac_signal"
+    $as_echo "$as_me: exit $exit_status"
+  } >&5
+  rm -f core *.core core.conftest.* &&
+    rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
+    exit $exit_status
+' 0
+for ac_signal in 1 2 13 15; do
+  trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
+done
+ac_signal=0
+
+# confdefs.h avoids OS command line length limits that DEFS can exceed.
+rm -f -r conftest* confdefs.h
+
+$as_echo "/* confdefs.h */" > confdefs.h
+
+# Predefined preprocessor variables.
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_NAME "$PACKAGE_NAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_VERSION "$PACKAGE_VERSION"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_STRING "$PACKAGE_STRING"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_URL "$PACKAGE_URL"
+_ACEOF
+
+
+# Let the site file select an alternate cache file if it wants to.
+# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
+if test -n "$CONFIG_SITE"; then
+  # We do not want a PATH search for config.site.
+  case $CONFIG_SITE in #((
+    -*)  ac_site_file1=./$CONFIG_SITE;;
+    */*) ac_site_file1=$CONFIG_SITE;;
+    *)   ac_site_file1=./$CONFIG_SITE;;
+  esac
+elif test "x$prefix" != xNONE; then
+  ac_site_file1=$prefix/share/config.site
+  ac_site_file2=$prefix/etc/config.site
+else
+  ac_site_file1=$ac_default_prefix/share/config.site
+  ac_site_file2=$ac_default_prefix/etc/config.site
+fi
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
+do
+  test "x$ac_site_file" = xNONE && continue
+  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
+$as_echo "$as_me: loading site script $ac_site_file" >&6;}
+    sed 's/^/| /' "$ac_site_file" >&5
+    . "$ac_site_file" \
+      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "failed to load site script $ac_site_file
+See \`config.log' for more details" "$LINENO" 5; }
+  fi
+done
+
+if test -r "$cache_file"; then
+  # Some versions of bash will fail to source /dev/null (special files
+  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
+  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
+$as_echo "$as_me: loading cache $cache_file" >&6;}
+    case $cache_file in
+      [\\/]* | ?:[\\/]* ) . "$cache_file";;
+      *)                      . "./$cache_file";;
+    esac
+  fi
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
+$as_echo "$as_me: creating cache $cache_file" >&6;}
+  >$cache_file
+fi
+
+# Check that the precious variables saved in the cache have kept the same
+# value.
+ac_cache_corrupted=false
+for ac_var in $ac_precious_vars; do
+  eval ac_old_set=\$ac_cv_env_${ac_var}_set
+  eval ac_new_set=\$ac_env_${ac_var}_set
+  eval ac_old_val=\$ac_cv_env_${ac_var}_value
+  eval ac_new_val=\$ac_env_${ac_var}_value
+  case $ac_old_set,$ac_new_set in
+    set,)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,set)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,);;
+    *)
+      if test "x$ac_old_val" != "x$ac_new_val"; then
+	# differences in whitespace do not lead to failure.
+	ac_old_val_w=`echo x $ac_old_val`
+	ac_new_val_w=`echo x $ac_new_val`
+	if test "$ac_old_val_w" != "$ac_new_val_w"; then
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
+$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
+	  ac_cache_corrupted=:
+	else
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
+$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
+	  eval $ac_var=\$ac_old_val
+	fi
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   former value:  \`$ac_old_val'" >&5
+$as_echo "$as_me:   former value:  \`$ac_old_val'" >&2;}
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   current value: \`$ac_new_val'" >&5
+$as_echo "$as_me:   current value: \`$ac_new_val'" >&2;}
+      fi;;
+  esac
+  # Pass precious variables to config.status.
+  if test "$ac_new_set" = set; then
+    case $ac_new_val in
+    *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
+    *) ac_arg=$ac_var=$ac_new_val ;;
+    esac
+    case " $ac_configure_args " in
+      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
+      *) as_fn_append ac_configure_args " '$ac_arg'" ;;
+    esac
+  fi
+done
+if $ac_cache_corrupted; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
+$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
+  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+fi
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+
+# Tell the rest of the build system the absolute path where the
+# nlbuild-autotools repository is rooted at.
+
+nlbuild_autotools_stem=third_party/nlbuild-autotools/repo
+
+abs_top_nlbuild_autotools_dir=\${abs_top_srcdir}/\${nlbuild_autotools_stem}
+
+
+#
+# NLUNITTEST interface current, revision, and age versions.
+#
+# NOTE: At present, NLUNITTEST makes NO ABI compatibility
+#       commitments. Consequently, these simply serve as documentation
+#       for how the interfaces have evolved.
+#
+# Maintainters: Please manage these fields as follows:
+#
+#   Interfaces removed:    CURRENT++, AGE = 0, REVISION = 0
+#   Interfaces added:      CURRENT++, AGE++,   REVISION = 0
+#   No interfaces changed:                     REVISION++
+#
+#
+LIBNLUNITTEST_VERSION_CURRENT=1
+
+LIBNLUNITTEST_VERSION_AGE=0
+
+LIBNLUNITTEST_VERSION_REVISION=0
+
+LIBNLUNITTEST_VERSION_INFO=${LIBNLUNITTEST_VERSION_CURRENT}:${LIBNLUNITTEST_VERSION_REVISION}:${LIBNLUNITTEST_VERSION_AGE}
+
+
+#
+# Check the sanity of the source directory by checking for the
+# presence of a key watch file
+#
+
+
+#
+# Tell autoconf where to find auxilliary build tools (e.g. config.guess,
+# install-sh, missing, etc.)
+#
+ac_aux_dir=
+for ac_dir in third_party/nlbuild-autotools/repo/autoconf "$srcdir"/third_party/nlbuild-autotools/repo/autoconf; do
+  if test -f "$ac_dir/install-sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install-sh -c"
+    break
+  elif test -f "$ac_dir/install.sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install.sh -c"
+    break
+  elif test -f "$ac_dir/shtool"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/shtool install -c"
+    break
+  fi
+done
+if test -z "$ac_aux_dir"; then
+  as_fn_error $? "cannot find install-sh, install.sh, or shtool in third_party/nlbuild-autotools/repo/autoconf \"$srcdir\"/third_party/nlbuild-autotools/repo/autoconf" "$LINENO" 5
+fi
+
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
+
+
+
+#
+# Tell autoconf where to find auxilliary M4 macros
+#
+
+
+#
+# Tell autoconf what file the package is using to aggregate C preprocessor
+# defines.
+#
+ac_config_headers="$ac_config_headers src/nlunittest-config.h"
+
+
+#
+# Figure out what the canonical build, host and target tuples are.
+#
+# Make sure we can run config.sub.
+$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
+  as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
+$as_echo_n "checking build system type... " >&6; }
+if ${ac_cv_build+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_build_alias=$build_alias
+test "x$ac_build_alias" = x &&
+  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
+test "x$ac_build_alias" = x &&
+  as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
+ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
+  as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
+$as_echo "$ac_cv_build" >&6; }
+case $ac_cv_build in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
+esac
+build=$ac_cv_build
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_build
+shift
+build_cpu=$1
+build_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+build_os=$*
+IFS=$ac_save_IFS
+case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
+$as_echo_n "checking host system type... " >&6; }
+if ${ac_cv_host+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test "x$host_alias" = x; then
+  ac_cv_host=$ac_cv_build
+else
+  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
+    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
+$as_echo "$ac_cv_host" >&6; }
+case $ac_cv_host in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
+esac
+host=$ac_cv_host
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_host
+shift
+host_cpu=$1
+host_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+host_os=$*
+IFS=$ac_save_IFS
+case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking target system type" >&5
+$as_echo_n "checking target system type... " >&6; }
+if ${ac_cv_target+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test "x$target_alias" = x; then
+  ac_cv_target=$ac_cv_host
+else
+  ac_cv_target=`$SHELL "$ac_aux_dir/config.sub" $target_alias` ||
+    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $target_alias failed" "$LINENO" 5
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_target" >&5
+$as_echo "$ac_cv_target" >&6; }
+case $ac_cv_target in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical target" "$LINENO" 5;;
+esac
+target=$ac_cv_target
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_target
+shift
+target_cpu=$1
+target_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+target_os=$*
+IFS=$ac_save_IFS
+case $target_os in *\ *) target_os=`echo "$target_os" | sed 's/ /-/g'`;; esac
+
+
+# The aliases save the names the user supplied, while $host etc.
+# will get canonicalized.
+test -n "$target_alias" &&
+  test "$program_prefix$program_suffix$program_transform_name" = \
+    NONENONEs,x,x, &&
+  program_prefix=${target_alias}-
+
+#
+# Mac OS X / Darwin ends up putting some versioning cruft on the end of its
+# tuple that we don't care about in this script. Create "clean" variables
+# devoid of it.
+#
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking filtered build system type" >&5
+$as_echo_n "checking filtered build system type... " >&6; }
+if ${nl_cv_filtered_build+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  nl_cv_filtered_build=`echo ${build} | sed -e 's/[[:digit:].]*$//g'`
+        nl_filtered_build=${nl_cv_filtered_build}
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_filtered_build" >&5
+$as_echo "$nl_cv_filtered_build" >&6; }
+
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking filtered host system type" >&5
+$as_echo_n "checking filtered host system type... " >&6; }
+if ${nl_cv_filtered_host+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  nl_cv_filtered_host=`echo ${host} | sed -e 's/[[:digit:].]*$//g'`
+        nl_filtered_host=${nl_cv_filtered_host}
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_filtered_host" >&5
+$as_echo "$nl_cv_filtered_host" >&6; }
+
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking filtered target system type" >&5
+$as_echo_n "checking filtered target system type... " >&6; }
+if ${nl_cv_filtered_target+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  nl_cv_filtered_target=`echo ${target} | sed -e 's/[[:digit:].]*$//g'`
+        nl_filtered_target=${nl_cv_filtered_target}
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_filtered_target" >&5
+$as_echo "$nl_cv_filtered_target" >&6; }
+
+
+
+#
+# Configure automake with the desired options, indicating that this is not
+# a native GNU package, that we want "silent" build rules, and that we want
+# objects built in the same subdirectory as their source rather than collapsed
+# together at the top-level directory.
+#
+# Disable silent build rules by either passing --disable-silent-rules to
+# configure or passing V=1 to make
+#
+am__api_version='1.14'
+
+# Find a good install program.  We prefer a C program (faster),
+# so one script is as good as another.  But avoid the broken or
+# incompatible versions:
+# SysV /etc/install, /usr/sbin/install
+# SunOS /usr/etc/install
+# IRIX /sbin/install
+# AIX /bin/install
+# AmigaOS /C/install, which installs bootblocks on floppy discs
+# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
+# AFS /usr/afsws/bin/install, which mishandles nonexistent args
+# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
+# OS/2's system install, which has a completely different semantic
+# ./install, which can be erroneously created by make from ./install.sh.
+# Reject install programs that cannot install multiple files.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
+$as_echo_n "checking for a BSD-compatible install... " >&6; }
+if test -z "$INSTALL"; then
+if ${ac_cv_path_install+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    # Account for people who put trailing slashes in PATH elements.
+case $as_dir/ in #((
+  ./ | .// | /[cC]/* | \
+  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
+  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
+  /usr/ucb/* ) ;;
+  *)
+    # OSF1 and SCO ODT 3.0 have their own names for install.
+    # Don't use installbsd from OSF since it installs stuff as root
+    # by default.
+    for ac_prog in ginstall scoinst install; do
+      for ac_exec_ext in '' $ac_executable_extensions; do
+	if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
+	  if test $ac_prog = install &&
+	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # AIX install.  It has an incompatible calling convention.
+	    :
+	  elif test $ac_prog = install &&
+	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # program-specific install script used by HP pwplus--don't use.
+	    :
+	  else
+	    rm -rf conftest.one conftest.two conftest.dir
+	    echo one > conftest.one
+	    echo two > conftest.two
+	    mkdir conftest.dir
+	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
+	      test -s conftest.one && test -s conftest.two &&
+	      test -s conftest.dir/conftest.one &&
+	      test -s conftest.dir/conftest.two
+	    then
+	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
+	      break 3
+	    fi
+	  fi
+	fi
+      done
+    done
+    ;;
+esac
+
+  done
+IFS=$as_save_IFS
+
+rm -rf conftest.one conftest.two conftest.dir
+
+fi
+  if test "${ac_cv_path_install+set}" = set; then
+    INSTALL=$ac_cv_path_install
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for INSTALL within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    INSTALL=$ac_install_sh
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
+$as_echo "$INSTALL" >&6; }
+
+# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
+# It thinks the first close brace ends the variable substitution.
+test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
+
+test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
+
+test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
+$as_echo_n "checking whether build environment is sane... " >&6; }
+# Reject unsafe characters in $srcdir or the absolute working directory
+# name.  Accept space and tab only in the latter.
+am_lf='
+'
+case `pwd` in
+  *[\\\"\#\$\&\'\`$am_lf]*)
+    as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
+esac
+case $srcdir in
+  *[\\\"\#\$\&\'\`$am_lf\ \	]*)
+    as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;;
+esac
+
+# Do 'set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   am_has_slept=no
+   for am_try in 1 2; do
+     echo "timestamp, slept: $am_has_slept" > conftest.file
+     set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
+     if test "$*" = "X"; then
+	# -L didn't work.
+	set X `ls -t "$srcdir/configure" conftest.file`
+     fi
+     if test "$*" != "X $srcdir/configure conftest.file" \
+	&& test "$*" != "X conftest.file $srcdir/configure"; then
+
+	# If neither matched, then we have a broken ls.  This can happen
+	# if, for instance, CONFIG_SHELL is bash and it inherits a
+	# broken ls alias from the environment.  This has actually
+	# happened.  Such a system could not be considered "sane".
+	as_fn_error $? "ls -t appears to fail.  Make sure there is not a broken
+  alias in your environment" "$LINENO" 5
+     fi
+     if test "$2" = conftest.file || test $am_try -eq 2; then
+       break
+     fi
+     # Just in case.
+     sleep 1
+     am_has_slept=yes
+   done
+   test "$2" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   as_fn_error $? "newly created file is older than distributed files!
+Check your system clock" "$LINENO" 5
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+# If we didn't sleep, we still need to ensure time stamps of config.status and
+# generated files are strictly newer.
+am_sleep_pid=
+if grep 'slept: no' conftest.file >/dev/null 2>&1; then
+  ( sleep 1 ) &
+  am_sleep_pid=$!
+fi
+
+rm -f conftest.file
+
+test "$program_prefix" != NONE &&
+  program_transform_name="s&^&$program_prefix&;$program_transform_name"
+# Use a double $ so make ignores it.
+test "$program_suffix" != NONE &&
+  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
+# Double any \ or $.
+# By default was `s,x,x', remove it if useless.
+ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
+program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
+
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+
+if test x"${MISSING+set}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+  *)
+    MISSING="\${SHELL} $am_aux_dir/missing" ;;
+  esac
+fi
+# Use eval to expand $SHELL
+if eval "$MISSING --is-lightweight"; then
+  am_missing_run="$MISSING "
+else
+  am_missing_run=
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5
+$as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;}
+fi
+
+if test x"${install_sh}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
+  *)
+    install_sh="\${SHELL} $am_aux_dir/install-sh"
+  esac
+fi
+
+# Installed binaries are usually stripped using 'strip' when the user
+# run "make install-strip".  However 'strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the 'STRIP' environment variable to overrule this program.
+if test "$cross_compiling" != no; then
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
+$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
+if test -z "$MKDIR_P"; then
+  if ${ac_cv_path_mkdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in mkdir gmkdir; do
+	 for ac_exec_ext in '' $ac_executable_extensions; do
+	   { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
+	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
+	     'mkdir (GNU coreutils) '* | \
+	     'mkdir (coreutils) '* | \
+	     'mkdir (fileutils) '4.1*)
+	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
+	       break 3;;
+	   esac
+	 done
+       done
+  done
+IFS=$as_save_IFS
+
+fi
+
+  test -d ./--version && rmdir ./--version
+  if test "${ac_cv_path_mkdir+set}" = set; then
+    MKDIR_P="$ac_cv_path_mkdir -p"
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for MKDIR_P within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    MKDIR_P="$ac_install_sh -d"
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
+$as_echo "$MKDIR_P" >&6; }
+
+for ac_prog in gawk mawk nawk awk
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AWK+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AWK"; then
+  ac_cv_prog_AWK="$AWK" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AWK="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AWK=$ac_cv_prog_AWK
+if test -n "$AWK"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
+$as_echo "$AWK" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$AWK" && break
+done
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
+$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
+set x ${MAKE-make}
+ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
+if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat >conftest.make <<\_ACEOF
+SHELL = /bin/sh
+all:
+	@echo '@@@%%%=$(MAKE)=@@@%%%'
+_ACEOF
+# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
+case `${MAKE-make} -f conftest.make 2>/dev/null` in
+  *@@@%%%=?*=@@@%%%*)
+    eval ac_cv_prog_make_${ac_make}_set=yes;;
+  *)
+    eval ac_cv_prog_make_${ac_make}_set=no;;
+esac
+rm -f conftest.make
+fi
+if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+  SET_MAKE=
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+  SET_MAKE="MAKE=${MAKE-make}"
+fi
+
+rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+
+# Check whether --enable-silent-rules was given.
+if test "${enable_silent_rules+set}" = set; then :
+  enableval=$enable_silent_rules;
+fi
+
+case $enable_silent_rules in # (((
+  yes) AM_DEFAULT_VERBOSITY=0;;
+   no) AM_DEFAULT_VERBOSITY=1;;
+    *) AM_DEFAULT_VERBOSITY=1;;
+esac
+am_make=${MAKE-make}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5
+$as_echo_n "checking whether $am_make supports nested variables... " >&6; }
+if ${am_cv_make_support_nested_variables+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if $as_echo 'TRUE=$(BAR$(V))
+BAR0=false
+BAR1=true
+V=1
+am__doit:
+	@$(TRUE)
+.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then
+  am_cv_make_support_nested_variables=yes
+else
+  am_cv_make_support_nested_variables=no
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5
+$as_echo "$am_cv_make_support_nested_variables" >&6; }
+if test $am_cv_make_support_nested_variables = yes; then
+    AM_V='$(V)'
+  AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
+else
+  AM_V=$AM_DEFAULT_VERBOSITY
+  AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
+fi
+AM_BACKSLASH='\'
+
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  am__isrc=' -I$(srcdir)'
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+
+
+# Define the identity of the package.
+ PACKAGE='nlunittest'
+ VERSION='g899ce0d'
+
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE "$PACKAGE"
+_ACEOF
+
+
+cat >>confdefs.h <<_ACEOF
+#define VERSION "$VERSION"
+_ACEOF
+
+# Some tools Automake needs.
+
+ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
+
+
+AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
+
+
+AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
+
+
+AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
+
+
+MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
+
+# For better backward compatibility.  To be removed once Automake 1.9.x
+# dies out for good.  For more background, see:
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+mkdir_p='$(MKDIR_P)'
+
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+# Always define AMTAR for backward compatibility.  Yes, it's still used
+# in the wild :-(  We should find a proper way to deprecate it ...
+AMTAR='$${TAR-tar}'
+
+
+# We'll loop over all known methods to create a tar archive until one works.
+_am_tools='gnutar  pax cpio none'
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to create a pax tar archive" >&5
+$as_echo_n "checking how to create a pax tar archive... " >&6; }
+
+  # Go ahead even if we have the value already cached.  We do so because we
+  # need to set the values for the 'am__tar' and 'am__untar' variables.
+  _am_tools=${am_cv_prog_tar_pax-$_am_tools}
+
+  for _am_tool in $_am_tools; do
+    case $_am_tool in
+    gnutar)
+      for _am_tar in tar gnutar gtar; do
+        { echo "$as_me:$LINENO: $_am_tar --version" >&5
+   ($_am_tar --version) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); } && break
+      done
+      am__tar="$_am_tar --format=posix -chf - "'"$$tardir"'
+      am__tar_="$_am_tar --format=posix -chf - "'"$tardir"'
+      am__untar="$_am_tar -xf -"
+      ;;
+    plaintar)
+      # Must skip GNU tar: if it does not support --format= it doesn't create
+      # ustar tarball either.
+      (tar --version) >/dev/null 2>&1 && continue
+      am__tar='tar chf - "$$tardir"'
+      am__tar_='tar chf - "$tardir"'
+      am__untar='tar xf -'
+      ;;
+    pax)
+      am__tar='pax -L -x pax -w "$$tardir"'
+      am__tar_='pax -L -x pax -w "$tardir"'
+      am__untar='pax -r'
+      ;;
+    cpio)
+      am__tar='find "$$tardir" -print | cpio -o -H pax -L'
+      am__tar_='find "$tardir" -print | cpio -o -H pax -L'
+      am__untar='cpio -i -H pax -d'
+      ;;
+    none)
+      am__tar=false
+      am__tar_=false
+      am__untar=false
+      ;;
+    esac
+
+    # If the value was cached, stop now.  We just wanted to have am__tar
+    # and am__untar set.
+    test -n "${am_cv_prog_tar_pax}" && break
+
+    # tar/untar a dummy directory, and stop if the command works.
+    rm -rf conftest.dir
+    mkdir conftest.dir
+    echo GrepMe > conftest.dir/file
+    { echo "$as_me:$LINENO: tardir=conftest.dir && eval $am__tar_ >conftest.tar" >&5
+   (tardir=conftest.dir && eval $am__tar_ >conftest.tar) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); }
+    rm -rf conftest.dir
+    if test -s conftest.tar; then
+      { echo "$as_me:$LINENO: $am__untar <conftest.tar" >&5
+   ($am__untar <conftest.tar) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); }
+      { echo "$as_me:$LINENO: cat conftest.dir/file" >&5
+   (cat conftest.dir/file) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); }
+      grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
+    fi
+  done
+  rm -rf conftest.dir
+
+  if ${am_cv_prog_tar_pax+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  am_cv_prog_tar_pax=$_am_tool
+fi
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_tar_pax" >&5
+$as_echo "$am_cv_prog_tar_pax" >&6; }
+
+
+
+
+
+
+# POSIX will say in a future version that running "rm -f" with no argument
+# is OK; and we want to be able to make that assumption in our Makefile
+# recipes.  So use an aggressive probe to check that the usage we want is
+# actually supported "in the wild" to an acceptable degree.
+# See automake bug#10828.
+# To make any issue more visible, cause the running configure to be aborted
+# by default if the 'rm' program in use doesn't match our expectations; the
+# user can still override this though.
+if rm -f && rm -fr && rm -rf; then : OK; else
+  cat >&2 <<'END'
+Oops!
+
+Your 'rm' program seems unable to run without file operands specified
+on the command line, even when the '-f' option is present.  This is contrary
+to the behaviour of most rm programs out there, and not conforming with
+the upcoming POSIX standard: <http://austingroupbugs.net/view.php?id=542>
+
+Please tell bug-automake@gnu.org about your system, including the value
+of your $PATH and any error possibly output before this message.  This
+can help us improve future automake versions.
+
+END
+  if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then
+    echo 'Configuration will proceed anyway, since you have set the' >&2
+    echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2
+    echo >&2
+  else
+    cat >&2 <<'END'
+Aborting the configuration process, to ensure you take notice of the issue.
+
+You can download and install GNU coreutils to get an 'rm' implementation
+that behaves properly: <http://www.gnu.org/software/coreutils/>.
+
+If you want to complete the configuration process using your problematic
+'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+to "yes", and re-run configure.
+
+END
+    as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5
+  fi
+fi
+
+#
+# Silent build rules requires at least automake-1.11. Employ
+# techniques for not breaking earlier versions of automake.
+#
+# Check whether --enable-silent-rules was given.
+if test "${enable_silent_rules+set}" = set; then :
+  enableval=$enable_silent_rules;
+fi
+
+case $enable_silent_rules in # (((
+  yes) AM_DEFAULT_VERBOSITY=0;;
+   no) AM_DEFAULT_VERBOSITY=1;;
+    *) AM_DEFAULT_VERBOSITY=0;;
+esac
+am_make=${MAKE-make}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5
+$as_echo_n "checking whether $am_make supports nested variables... " >&6; }
+if ${am_cv_make_support_nested_variables+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if $as_echo 'TRUE=$(BAR$(V))
+BAR0=false
+BAR1=true
+V=1
+am__doit:
+	@$(TRUE)
+.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then
+  am_cv_make_support_nested_variables=yes
+else
+  am_cv_make_support_nested_variables=no
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5
+$as_echo "$am_cv_make_support_nested_variables" >&6; }
+if test $am_cv_make_support_nested_variables = yes; then
+    AM_V='$(V)'
+  AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
+else
+  AM_V=$AM_DEFAULT_VERBOSITY
+  AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
+fi
+AM_BACKSLASH='\'
+
+# Check whether --enable-silent-rules was given.
+if test "${enable_silent_rules+set}" = set; then :
+  enableval=$enable_silent_rules;
+fi
+
+case $enable_silent_rules in # (((
+  yes) AM_DEFAULT_VERBOSITY=0;;
+   no) AM_DEFAULT_VERBOSITY=1;;
+    *) AM_DEFAULT_VERBOSITY=0;;
+esac
+am_make=${MAKE-make}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5
+$as_echo_n "checking whether $am_make supports nested variables... " >&6; }
+if ${am_cv_make_support_nested_variables+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if $as_echo 'TRUE=$(BAR$(V))
+BAR0=false
+BAR1=true
+V=1
+am__doit:
+	@$(TRUE)
+.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then
+  am_cv_make_support_nested_variables=yes
+else
+  am_cv_make_support_nested_variables=no
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5
+$as_echo "$am_cv_make_support_nested_variables" >&6; }
+if test $am_cv_make_support_nested_variables = yes; then
+    AM_V='$(V)'
+  AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
+else
+  AM_V=$AM_DEFAULT_VERBOSITY
+  AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
+fi
+AM_BACKSLASH='\'
+
+
+#
+# Enable maintainer mode to prevent the package from constantly trying
+# to rebuild configure, Makefile.in, etc. Rebuilding such files rarely,
+# if ever, needs to be done "in the field".
+#
+# Use the included 'bootstrap' script instead when necessary.
+#
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5
+$as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; }
+    # Check whether --enable-maintainer-mode was given.
+if test "${enable_maintainer_mode+set}" = set; then :
+  enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval
+else
+  USE_MAINTAINER_MODE=no
+fi
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5
+$as_echo "$USE_MAINTAINER_MODE" >&6; }
+   if test $USE_MAINTAINER_MODE = yes; then
+  MAINTAINER_MODE_TRUE=
+  MAINTAINER_MODE_FALSE='#'
+else
+  MAINTAINER_MODE_TRUE='#'
+  MAINTAINER_MODE_FALSE=
+fi
+
+  MAINT=$MAINTAINER_MODE_TRUE
+
+
+
+#
+# Checks for build host programs
+#
+
+# If we are cross-compiling and we are on an embedded target that
+# doesn't support independent, standalone executables, then all
+# compiler tests that attempt to create an executable will fail. In
+# such circumstances, set AC_NO_EXECUTABLES (see http://sourceware.org/
+# ml/newlib/2006/msg00353.html).
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to disable executable checking" >&5
+$as_echo_n "checking whether to disable executable checking... " >&6; }
+if test "$cross_compiling" = yes; then
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+# Passing -Werror to GCC-based or -compatible compilers breaks some
+# autoconf tests (see
+# http://lists.gnu.org/archive/html/autoconf-patches/2008-09/msg00014.html).
+#
+# If -Werror has been passed transform it into -Wno-error. We'll
+# transform it back later with NL_RESTORE_WERROR.
+
+
+
+    if echo "${CPPFLAGS}" | grep -q '\-Werror'; then
+	CPPFLAGS="`echo ${CPPFLAGS} | sed -e 's,-Werror\([[:space:]]\),-Wno-error\1,g'`"
+	nl_had_CPPFLAGS_werror=yes
+    else
+	nl_had_CPPFLAGS_werror=no
+    fi
+
+
+    if echo "${CFLAGS}" | grep -q '\-Werror'; then
+	CFLAGS="`echo ${CFLAGS} | sed -e 's,-Werror\([[:space:]]\),-Wno-error\1,g'`"
+	nl_had_CFLAGS_werror=yes
+    else
+	nl_had_CFLAGS_werror=no
+    fi
+
+
+    if echo "${CXXFLAGS}" | grep -q '\-Werror'; then
+	CXXFLAGS="`echo ${CXXFLAGS} | sed -e 's,-Werror\([[:space:]]\),-Wno-error\1,g'`"
+	nl_had_CXXFLAGS_werror=yes
+    else
+	nl_had_CXXFLAGS_werror=no
+    fi
+
+
+    if echo "${OBJCFLAGS}" | grep -q '\-Werror'; then
+	OBJCFLAGS="`echo ${OBJCFLAGS} | sed -e 's,-Werror\([[:space:]]\),-Wno-error\1,g'`"
+	nl_had_OBJCFLAGS_werror=yes
+    else
+	nl_had_OBJCFLAGS_werror=no
+    fi
+
+
+    if echo "${OBJCXXFLAGS}" | grep -q '\-Werror'; then
+	OBJCXXFLAGS="`echo ${OBJCXXFLAGS} | sed -e 's,-Werror\([[:space:]]\),-Wno-error\1,g'`"
+	nl_had_OBJCXXFLAGS_werror=yes
+    else
+	nl_had_OBJCXXFLAGS_werror=no
+    fi
+
+
+
+# Check for compilers.
+#
+# These should be checked BEFORE we check for and, implicitly,
+# initialize libtool such that libtool knows what languages it has to
+# work with.
+
+DEPDIR="${am__leading_dot}deps"
+
+ac_config_commands="$ac_config_commands depfiles"
+
+
+am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo this is the am__doit target
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
+$as_echo_n "checking for style of include used by $am_make... " >&6; }
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# Ignore all kinds of additional output from 'make'.
+case `$am_make -s -f confmf 2> /dev/null` in #(
+*the\ am__doit\ target*)
+  am__include=include
+  am__quote=
+  _am_result=GNU
+  ;;
+esac
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   case `$am_make -s -f confmf 2> /dev/null` in #(
+   *the\ am__doit\ target*)
+     am__include=.include
+     am__quote="\""
+     _am_result=BSD
+     ;;
+   esac
+fi
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
+$as_echo "$_am_result" >&6; }
+rm -f confinc confmf
+
+# Check whether --enable-dependency-tracking was given.
+if test "${enable_dependency_tracking+set}" = set; then :
+  enableval=$enable_dependency_tracking;
+fi
+
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+  am__nodep='_no'
+fi
+ if test "x$enable_dependency_tracking" != xno; then
+  AMDEP_TRUE=
+  AMDEP_FALSE='#'
+else
+  AMDEP_TRUE='#'
+  AMDEP_FALSE=
+fi
+
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_CC"; then
+  ac_ct_CC=$CC
+  # Extract the first word of "gcc", so it can be a program name with args.
+set dummy gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+else
+  CC="$ac_cv_prog_CC"
+fi
+
+if test -z "$CC"; then
+          if test -n "$ac_tool_prefix"; then
+    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  fi
+fi
+if test -z "$CC"; then
+  # Extract the first word of "cc", so it can be a program name with args.
+set dummy cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+  ac_prog_rejected=no
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
+       ac_prog_rejected=yes
+       continue
+     fi
+    ac_cv_prog_CC="cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+if test $ac_prog_rejected = yes; then
+  # We found a bogon in the path, so make sure we never use it.
+  set dummy $ac_cv_prog_CC
+  shift
+  if test $# != 0; then
+    # We chose a different compiler from the bogus one.
+    # However, it has the same basename, so the bogon will be chosen
+    # first if we set CC to just the basename; use the full file name.
+    shift
+    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
+  fi
+fi
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$CC"; then
+  if test -n "$ac_tool_prefix"; then
+  for ac_prog in cl.exe
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$CC" && break
+  done
+fi
+if test -z "$CC"; then
+  ac_ct_CC=$CC
+  for ac_prog in cl.exe
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_CC" && break
+done
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+fi
+
+fi
+
+
+test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "no acceptable C compiler found in \$PATH
+See \`config.log' for more details" "$LINENO" 5; }
+
+# Provide some information about the compiler.
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
+set X $ac_compile
+ac_compiler=$2
+for ac_option in --version -v -V -qversion; do
+  { { ac_try="$ac_compiler $ac_option >&5"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    sed '10a\
+... rest of stderr output deleted ...
+         10q' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+  fi
+  rm -f conftest.er1 conftest.err
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+done
+
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+  ac_no_link=no
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
+# Try to create an executable without -o first, disregard a.out.
+# It will help us diagnose broken compilers, and finding out an intuition
+# of exeext.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
+$as_echo_n "checking whether the C compiler works... " >&6; }
+ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
+
+# The possible output files:
+ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
+
+ac_rmfiles=
+for ac_file in $ac_files
+do
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
+  esac
+done
+rm -f $ac_rmfiles
+
+if { { ac_try="$ac_link_default"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link_default") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
+# in a Makefile.  We should not override ac_cv_exeext if it was cached,
+# so that the user can short-circuit this test for compilers unknown to
+# Autoconf.
+for ac_file in $ac_files ''
+do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
+	;;
+    [ab].out )
+	# We found the default executable, but exeext='' is most
+	# certainly right.
+	break;;
+    *.* )
+	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
+	then :; else
+	   ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	fi
+	# We set ac_cv_exeext here because the later test for it is not
+	# safe: cross compilers may not add the suffix if given an `-o'
+	# argument, so we may need to know it at that point already.
+	# Even if this section looks crufty: it has the advantage of
+	# actually working.
+	break;;
+    * )
+	break;;
+  esac
+done
+test "$ac_cv_exeext" = no && ac_cv_exeext=
+
+else
+  ac_file=''
+fi
+if test -z "$ac_file"; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+$as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error 77 "C compiler cannot create executables
+See \`config.log' for more details" "$LINENO" 5; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
+$as_echo_n "checking for C compiler default output file name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
+$as_echo "$ac_file" >&6; }
+ac_exeext=$ac_cv_exeext
+
+rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
+$as_echo_n "checking for suffix of executables... " >&6; }
+if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
+for ac_file in conftest.exe conftest conftest.*; do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	  break;;
+    * ) break;;
+  esac
+done
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest conftest$ac_cv_exeext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
+$as_echo "$ac_cv_exeext" >&6; }
+
+rm -f conftest.$ac_ext
+EXEEXT=$ac_cv_exeext
+ac_exeext=$EXEEXT
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdio.h>
+int
+main ()
+{
+FILE *f = fopen ("conftest.out", "w");
+ return ferror (f) || fclose (f) != 0;
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files="$ac_clean_files conftest.out"
+# Check that the compiler produces executables we can run.  If not, either
+# the compiler is broken, or we cross compile.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
+if test "$cross_compiling" != yes; then
+  { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+  if { ac_try='./conftest$ac_cv_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then
+    cross_compiling=no
+  else
+    if test "$cross_compiling" = maybe; then
+	cross_compiling=yes
+    else
+	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
+    fi
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
+
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
+ac_clean_files=$ac_clean_files_save
+
+else
+  rm -f -r a.out a.exe b.out conftest.$ac_ext conftest.o conftest.obj conftest.dSYM
+  ac_no_link=yes
+  # Setting cross_compile will disable run tests; it will
+  # also disable AC_CHECK_FILE but that's generally
+  # correct if we can't link.
+  cross_compiling=yes
+  EXEEXT=
+  # Check that the compiler produces executables we can run.  If not, either
+# the compiler is broken, or we cross compile.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
+if test "$cross_compiling" != yes; then
+  { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+  if { ac_try='./conftest$ac_cv_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then
+    cross_compiling=no
+  else
+    if test "$cross_compiling" = maybe; then
+	cross_compiling=yes
+    else
+	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
+    fi
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
+$as_echo_n "checking for suffix of object files... " >&6; }
+if ${ac_cv_objext+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+rm -f conftest.o conftest.obj
+if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  for ac_file in conftest.o conftest.obj conftest.*; do
+  test -f "$ac_file" || continue;
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
+    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
+       break;;
+  esac
+done
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest.$ac_cv_objext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
+$as_echo "$ac_cv_objext" >&6; }
+OBJEXT=$ac_cv_objext
+ac_objext=$OBJEXT
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
+$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
+if ${ac_cv_c_compiler_gnu+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+#ifndef __GNUC__
+       choke me
+#endif
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_compiler_gnu=yes
+else
+  ac_compiler_gnu=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ac_cv_c_compiler_gnu=$ac_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
+$as_echo "$ac_cv_c_compiler_gnu" >&6; }
+if test $ac_compiler_gnu = yes; then
+  GCC=yes
+else
+  GCC=
+fi
+ac_test_CFLAGS=${CFLAGS+set}
+ac_save_CFLAGS=$CFLAGS
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
+$as_echo_n "checking whether $CC accepts -g... " >&6; }
+if ${ac_cv_prog_cc_g+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_save_c_werror_flag=$ac_c_werror_flag
+   ac_c_werror_flag=yes
+   ac_cv_prog_cc_g=no
+   CFLAGS="-g"
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+else
+  CFLAGS=""
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  ac_c_werror_flag=$ac_save_c_werror_flag
+	 CFLAGS="-g"
+	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+   ac_c_werror_flag=$ac_save_c_werror_flag
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
+$as_echo "$ac_cv_prog_cc_g" >&6; }
+if test "$ac_test_CFLAGS" = set; then
+  CFLAGS=$ac_save_CFLAGS
+elif test $ac_cv_prog_cc_g = yes; then
+  if test "$GCC" = yes; then
+    CFLAGS="-g -O2"
+  else
+    CFLAGS="-g"
+  fi
+else
+  if test "$GCC" = yes; then
+    CFLAGS="-O2"
+  else
+    CFLAGS=
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
+$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
+if ${ac_cv_prog_cc_c89+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_cv_prog_cc_c89=no
+ac_save_CC=$CC
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdarg.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+     char **p;
+     int i;
+{
+  return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+  char *s;
+  va_list v;
+  va_start (v,p);
+  s = g (p, va_arg (v,int));
+  va_end (v);
+  return s;
+}
+
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
+   function prototypes and stuff, but not '\xHH' hex character constants.
+   These don't provoke an error unfortunately, instead are silently treated
+   as 'x'.  The following induces an error, until -std is added to get
+   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
+   array size at least.  It's necessary to write '\x00'==0 to get something
+   that's true only with -std.  */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
+
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+   inside strings and character constants.  */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;
+int
+main ()
+{
+return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
+  ;
+  return 0;
+}
+_ACEOF
+for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
+	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
+do
+  CC="$ac_save_CC $ac_arg"
+  if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_c89=$ac_arg
+fi
+rm -f core conftest.err conftest.$ac_objext
+  test "x$ac_cv_prog_cc_c89" != "xno" && break
+done
+rm -f conftest.$ac_ext
+CC=$ac_save_CC
+
+fi
+# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_c89" in
+  x)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
+$as_echo "none needed" >&6; } ;;
+  xno)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
+$as_echo "unsupported" >&6; } ;;
+  *)
+    CC="$CC $ac_cv_prog_cc_c89"
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
+$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
+esac
+if test "x$ac_cv_prog_cc_c89" != xno; then :
+
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5
+$as_echo_n "checking whether $CC understands -c and -o together... " >&6; }
+if ${am_cv_prog_cc_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+  # Make sure it works both with $CC and with simple cc.
+  # Following AC_PROG_CC_C_O, we do the test twice because some
+  # compilers refuse to overwrite an existing .o file with -o,
+  # though they will create one.
+  am_cv_prog_cc_c_o=yes
+  for am_i in 1 2; do
+    if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5
+   ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); } \
+         && test -f conftest2.$ac_objext; then
+      : OK
+    else
+      am_cv_prog_cc_c_o=no
+      break
+    fi
+  done
+  rm -f core conftest*
+  unset am_i
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5
+$as_echo "$am_cv_prog_cc_c_o" >&6; }
+if test "$am_cv_prog_cc_c_o" != yes; then
+   # Losing compiler, so override with the script.
+   # FIXME: It is wrong to rewrite CC.
+   # But if we don't then we get into trouble of one sort or another.
+   # A longer-term fix would be to have automake use am__CC in this case,
+   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
+   CC="$am_aux_dir/compile $CC"
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+depcc="$CC"   am_compiler_list=
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
+$as_echo_n "checking dependency style of $depcc... " >&6; }
+if ${am_cv_CC_dependencies_compiler_type+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named 'D' -- because '-MD' means "put the output
+  # in D".
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_CC_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
+  fi
+  am__universal=false
+  case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
+      # Solaris 10 /bin/sh.
+      echo '/* dummy */' > sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with '-c' and '-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle '-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs.
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # After this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested.
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvc7 | msvc7msys | msvisualcpp | msvcmsys)
+      # This compiler won't grok '-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_CC_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_CC_dependencies_compiler_type=none
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
+$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
+CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
+
+ if
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
+  am__fastdepCC_TRUE=
+  am__fastdepCC_FALSE='#'
+else
+  am__fastdepCC_TRUE='#'
+  am__fastdepCC_FALSE=
+fi
+
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
+$as_echo_n "checking how to run the C preprocessor... " >&6; }
+# On Suns, sometimes $CPP names a directory.
+if test -n "$CPP" && test -d "$CPP"; then
+  CPP=
+fi
+if test -z "$CPP"; then
+  if ${ac_cv_prog_CPP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+      # Double quotes because CPP needs to be expanded
+    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
+    do
+      ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+  break
+fi
+
+    done
+    ac_cv_prog_CPP=$CPP
+
+fi
+  CPP=$ac_cv_prog_CPP
+else
+  ac_cv_prog_CPP=$CPP
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
+$as_echo "$CPP" >&6; }
+ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+ac_c_preproc_warn_flag=yes
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_CC"; then
+  ac_ct_CC=$CC
+  # Extract the first word of "gcc", so it can be a program name with args.
+set dummy gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+else
+  CC="$ac_cv_prog_CC"
+fi
+
+if test -z "$CC"; then
+          if test -n "$ac_tool_prefix"; then
+    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  fi
+fi
+if test -z "$CC"; then
+  # Extract the first word of "cc", so it can be a program name with args.
+set dummy cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+  ac_prog_rejected=no
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
+       ac_prog_rejected=yes
+       continue
+     fi
+    ac_cv_prog_CC="cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+if test $ac_prog_rejected = yes; then
+  # We found a bogon in the path, so make sure we never use it.
+  set dummy $ac_cv_prog_CC
+  shift
+  if test $# != 0; then
+    # We chose a different compiler from the bogus one.
+    # However, it has the same basename, so the bogon will be chosen
+    # first if we set CC to just the basename; use the full file name.
+    shift
+    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
+  fi
+fi
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$CC"; then
+  if test -n "$ac_tool_prefix"; then
+  for ac_prog in cl.exe
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$CC" && break
+  done
+fi
+if test -z "$CC"; then
+  ac_ct_CC=$CC
+  for ac_prog in cl.exe
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_CC" && break
+done
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+fi
+
+fi
+
+
+test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "no acceptable C compiler found in \$PATH
+See \`config.log' for more details" "$LINENO" 5; }
+
+# Provide some information about the compiler.
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
+set X $ac_compile
+ac_compiler=$2
+for ac_option in --version -v -V -qversion; do
+  { { ac_try="$ac_compiler $ac_option >&5"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    sed '10a\
+... rest of stderr output deleted ...
+         10q' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+  fi
+  rm -f conftest.er1 conftest.err
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+done
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
+$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
+if ${ac_cv_c_compiler_gnu+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+#ifndef __GNUC__
+       choke me
+#endif
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_compiler_gnu=yes
+else
+  ac_compiler_gnu=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ac_cv_c_compiler_gnu=$ac_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
+$as_echo "$ac_cv_c_compiler_gnu" >&6; }
+if test $ac_compiler_gnu = yes; then
+  GCC=yes
+else
+  GCC=
+fi
+ac_test_CFLAGS=${CFLAGS+set}
+ac_save_CFLAGS=$CFLAGS
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
+$as_echo_n "checking whether $CC accepts -g... " >&6; }
+if ${ac_cv_prog_cc_g+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_save_c_werror_flag=$ac_c_werror_flag
+   ac_c_werror_flag=yes
+   ac_cv_prog_cc_g=no
+   CFLAGS="-g"
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+else
+  CFLAGS=""
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  ac_c_werror_flag=$ac_save_c_werror_flag
+	 CFLAGS="-g"
+	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+   ac_c_werror_flag=$ac_save_c_werror_flag
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
+$as_echo "$ac_cv_prog_cc_g" >&6; }
+if test "$ac_test_CFLAGS" = set; then
+  CFLAGS=$ac_save_CFLAGS
+elif test $ac_cv_prog_cc_g = yes; then
+  if test "$GCC" = yes; then
+    CFLAGS="-g -O2"
+  else
+    CFLAGS="-g"
+  fi
+else
+  if test "$GCC" = yes; then
+    CFLAGS="-O2"
+  else
+    CFLAGS=
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
+$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
+if ${ac_cv_prog_cc_c89+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_cv_prog_cc_c89=no
+ac_save_CC=$CC
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdarg.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+     char **p;
+     int i;
+{
+  return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+  char *s;
+  va_list v;
+  va_start (v,p);
+  s = g (p, va_arg (v,int));
+  va_end (v);
+  return s;
+}
+
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
+   function prototypes and stuff, but not '\xHH' hex character constants.
+   These don't provoke an error unfortunately, instead are silently treated
+   as 'x'.  The following induces an error, until -std is added to get
+   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
+   array size at least.  It's necessary to write '\x00'==0 to get something
+   that's true only with -std.  */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
+
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+   inside strings and character constants.  */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;
+int
+main ()
+{
+return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
+  ;
+  return 0;
+}
+_ACEOF
+for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
+	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
+do
+  CC="$ac_save_CC $ac_arg"
+  if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_c89=$ac_arg
+fi
+rm -f core conftest.err conftest.$ac_objext
+  test "x$ac_cv_prog_cc_c89" != "xno" && break
+done
+rm -f conftest.$ac_ext
+CC=$ac_save_CC
+
+fi
+# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_c89" in
+  x)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
+$as_echo "none needed" >&6; } ;;
+  xno)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
+$as_echo "unsupported" >&6; } ;;
+  *)
+    CC="$CC $ac_cv_prog_cc_c89"
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
+$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
+esac
+if test "x$ac_cv_prog_cc_c89" != xno; then :
+
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5
+$as_echo_n "checking whether $CC understands -c and -o together... " >&6; }
+if ${am_cv_prog_cc_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+  # Make sure it works both with $CC and with simple cc.
+  # Following AC_PROG_CC_C_O, we do the test twice because some
+  # compilers refuse to overwrite an existing .o file with -o,
+  # though they will create one.
+  am_cv_prog_cc_c_o=yes
+  for am_i in 1 2; do
+    if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5
+   ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   (exit $ac_status); } \
+         && test -f conftest2.$ac_objext; then
+      : OK
+    else
+      am_cv_prog_cc_c_o=no
+      break
+    fi
+  done
+  rm -f core conftest*
+  unset am_i
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5
+$as_echo "$am_cv_prog_cc_c_o" >&6; }
+if test "$am_cv_prog_cc_c_o" != yes; then
+   # Losing compiler, so override with the script.
+   # FIXME: It is wrong to rewrite CC.
+   # But if we don't then we get into trouble of one sort or another.
+   # A longer-term fix would be to have automake use am__CC in this case,
+   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
+   CC="$am_aux_dir/compile $CC"
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+depcc="$CC"   am_compiler_list=
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
+$as_echo_n "checking dependency style of $depcc... " >&6; }
+if ${am_cv_CC_dependencies_compiler_type+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named 'D' -- because '-MD' means "put the output
+  # in D".
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_CC_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
+  fi
+  am__universal=false
+  case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
+      # Solaris 10 /bin/sh.
+      echo '/* dummy */' > sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with '-c' and '-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle '-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs.
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # After this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested.
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvc7 | msvc7msys | msvisualcpp | msvcmsys)
+      # This compiler won't grok '-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_CC_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_CC_dependencies_compiler_type=none
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
+$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
+CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
+
+ if
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
+  am__fastdepCC_TRUE=
+  am__fastdepCC_FALSE='#'
+else
+  am__fastdepCC_TRUE='#'
+  am__fastdepCC_FALSE=
+fi
+
+
+if test "x$CC" != xcc; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5
+$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5
+$as_echo_n "checking whether cc understands -c and -o together... " >&6; }
+fi
+set dummy $CC; ac_cc=`$as_echo "$2" |
+		      sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'`
+if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+# Make sure it works both with $CC and with simple cc.
+# We do the test twice because some compilers refuse to overwrite an
+# existing .o file with -o, though they will create one.
+ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
+rm -f conftest2.*
+if { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } &&
+   test -f conftest2.$ac_objext && { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; };
+then
+  eval ac_cv_prog_cc_${ac_cc}_c_o=yes
+  if test "x$CC" != xcc; then
+    # Test first that cc exists at all.
+    if { ac_try='cc -c conftest.$ac_ext >&5'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then
+      ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
+      rm -f conftest2.*
+      if { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } &&
+	 test -f conftest2.$ac_objext && { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; };
+      then
+	# cc works too.
+	:
+      else
+	# cc exists but doesn't like -o.
+	eval ac_cv_prog_cc_${ac_cc}_c_o=no
+      fi
+    fi
+  fi
+else
+  eval ac_cv_prog_cc_${ac_cc}_c_o=no
+fi
+rm -f core conftest*
+
+fi
+if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+
+$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h
+
+fi
+
+
+# Check for other compiler toolchain tools.
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AR"; then
+  ac_cv_prog_AR="$AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AR="${ac_tool_prefix}ar"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AR=$ac_cv_prog_AR
+if test -n "$AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
+$as_echo "$AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_AR"; then
+  ac_ct_AR=$AR
+  # Extract the first word of "ar", so it can be a program name with args.
+set dummy ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_AR"; then
+  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_AR="ar"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AR=$ac_cv_prog_ac_ct_AR
+if test -n "$ac_ct_AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
+$as_echo "$ac_ct_AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_AR" = x; then
+    AR=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    AR=$ac_ct_AR
+  fi
+else
+  AR="$ac_cv_prog_AR"
+fi
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$RANLIB"; then
+  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+RANLIB=$ac_cv_prog_RANLIB
+if test -n "$RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
+$as_echo "$RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_RANLIB"; then
+  ac_ct_RANLIB=$RANLIB
+  # Extract the first word of "ranlib", so it can be a program name with args.
+set dummy ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_RANLIB"; then
+  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_RANLIB="ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
+if test -n "$ac_ct_RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
+$as_echo "$ac_ct_RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_RANLIB" = x; then
+    RANLIB=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    RANLIB=$ac_ct_RANLIB
+  fi
+else
+  RANLIB="$ac_cv_prog_RANLIB"
+fi
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}objcopy", so it can be a program name with args.
+set dummy ${ac_tool_prefix}objcopy; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OBJCOPY+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OBJCOPY"; then
+  ac_cv_prog_OBJCOPY="$OBJCOPY" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OBJCOPY="${ac_tool_prefix}objcopy"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OBJCOPY=$ac_cv_prog_OBJCOPY
+if test -n "$OBJCOPY"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJCOPY" >&5
+$as_echo "$OBJCOPY" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OBJCOPY"; then
+  ac_ct_OBJCOPY=$OBJCOPY
+  # Extract the first word of "objcopy", so it can be a program name with args.
+set dummy objcopy; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OBJCOPY+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OBJCOPY"; then
+  ac_cv_prog_ac_ct_OBJCOPY="$ac_ct_OBJCOPY" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OBJCOPY="objcopy"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OBJCOPY=$ac_cv_prog_ac_ct_OBJCOPY
+if test -n "$ac_ct_OBJCOPY"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJCOPY" >&5
+$as_echo "$ac_ct_OBJCOPY" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OBJCOPY" = x; then
+    OBJCOPY=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OBJCOPY=$ac_ct_OBJCOPY
+  fi
+else
+  OBJCOPY="$ac_cv_prog_OBJCOPY"
+fi
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+
+# Check for other host tools.
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
+$as_echo_n "checking whether ln -s works... " >&6; }
+LN_S=$as_ln_s
+if test "$LN_S" = "ln -s"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
+$as_echo "no, using $LN_S" >&6; }
+fi
+
+
+# Extract the first word of "cmp", so it can be a program name with args.
+set dummy cmp; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_CMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $CMP in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_CMP="$CMP" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_path_CMP="$as_dir/$ac_word$ac_exec_ext"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+  ;;
+esac
+fi
+CMP=$ac_cv_path_CMP
+if test -n "$CMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CMP" >&5
+$as_echo "$CMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+# Extract the first word of "perl", so it can be a program name with args.
+set dummy perl; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_PERL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $PERL in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_PERL="$PERL" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_path_PERL="$as_dir/$ac_word$ac_exec_ext"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+  ;;
+esac
+fi
+PERL=$ac_cv_path_PERL
+if test -n "$PERL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PERL" >&5
+$as_echo "$PERL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+#
+# Checks for specific compiler characteristics
+#
+
+#
+# Common compiler flags we would like to have.
+#
+#   -Wall                        CC, CXX
+#
+
+PROSPECTIVE_CFLAGS="-Wall"
+
+
+
+
+            for ax_compiler_option in ${PROSPECTIVE_CFLAGS}; do
+
+    ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler understands $ax_compiler_option" >&5
+$as_echo_n "checking whether the C compiler understands $ax_compiler_option... " >&6; }
+    SAVE_CFLAGS=${CFLAGS}
+    CFLAGS=$ax_compiler_option
+    cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }; CFLAGS="${SAVE_CFLAGS} $ax_compiler_option"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }; CFLAGS=${SAVE_CFLAGS}
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext;
+    unset SAVE_CFLAGS
+    ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+            done
+
+
+
+# Check for and initialize libtool
+
+case `pwd` in
+  *\ * | *\	*)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
+$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
+esac
+
+
+
+macro_version='2.4.2'
+macro_revision='1.3337'
+
+
+
+
+
+
+
+
+
+
+
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+
+ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+$as_echo_n "checking how to print strings... " >&6; }
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO ""
+}
+
+case "$ECHO" in
+  printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5
+$as_echo "printf" >&6; } ;;
+  print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5
+$as_echo "print -r" >&6; } ;;
+  *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5
+$as_echo "cat" >&6; } ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
+$as_echo_n "checking for a sed that does not truncate output... " >&6; }
+if ${ac_cv_path_SED+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+            ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
+     for ac_i in 1 2 3 4 5 6 7; do
+       ac_script="$ac_script$as_nl$ac_script"
+     done
+     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
+     { ac_script=; unset ac_script;}
+     if test -z "$SED"; then
+  ac_path_SED_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
+# Check for GNU ac_path_SED and select it if it is found.
+  # Check for GNU $ac_path_SED
+case `"$ac_path_SED" --version 2>&1` in
+*GNU*)
+  ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo '' >> "conftest.nl"
+    "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_SED_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_SED="$ac_path_SED"
+      ac_path_SED_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_SED_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_SED"; then
+    as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
+  fi
+else
+  ac_cv_path_SED=$SED
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
+$as_echo "$ac_cv_path_SED" >&6; }
+ SED="$ac_cv_path_SED"
+  rm -f conftest.sed
+
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
+$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
+if ${ac_cv_path_GREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$GREP"; then
+  ac_path_GREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in grep ggrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
+# Check for GNU ac_path_GREP and select it if it is found.
+  # Check for GNU $ac_path_GREP
+case `"$ac_path_GREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'GREP' >> "conftest.nl"
+    "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_GREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_GREP="$ac_path_GREP"
+      ac_path_GREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_GREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_GREP"; then
+    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_GREP=$GREP
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
+$as_echo "$ac_cv_path_GREP" >&6; }
+ GREP="$ac_cv_path_GREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
+$as_echo_n "checking for egrep... " >&6; }
+if ${ac_cv_path_EGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
+   then ac_cv_path_EGREP="$GREP -E"
+   else
+     if test -z "$EGREP"; then
+  ac_path_EGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in egrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
+# Check for GNU ac_path_EGREP and select it if it is found.
+  # Check for GNU $ac_path_EGREP
+case `"$ac_path_EGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'EGREP' >> "conftest.nl"
+    "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_EGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_EGREP="$ac_path_EGREP"
+      ac_path_EGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_EGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_EGREP"; then
+    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_EGREP=$EGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
+$as_echo "$ac_cv_path_EGREP" >&6; }
+ EGREP="$ac_cv_path_EGREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
+$as_echo_n "checking for fgrep... " >&6; }
+if ${ac_cv_path_FGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
+   then ac_cv_path_FGREP="$GREP -F"
+   else
+     if test -z "$FGREP"; then
+  ac_path_FGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in fgrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
+# Check for GNU ac_path_FGREP and select it if it is found.
+  # Check for GNU $ac_path_FGREP
+case `"$ac_path_FGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'FGREP' >> "conftest.nl"
+    "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_FGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_FGREP="$ac_path_FGREP"
+      ac_path_FGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_FGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_FGREP"; then
+    as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_FGREP=$FGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
+$as_echo "$ac_cv_path_FGREP" >&6; }
+ FGREP="$ac_cv_path_FGREP"
+
+
+test -z "$GREP" && GREP=grep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-gnu-ld was given.
+if test "${with_gnu_ld+set}" = set; then :
+  withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
+else
+  with_gnu_ld=no
+fi
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
+$as_echo_n "checking for ld used by $CC... " >&6; }
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [\\/]* | ?:[\\/]*)
+      re_direlt='/[^/][^/]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
+$as_echo_n "checking for GNU ld... " >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
+$as_echo_n "checking for non-GNU ld... " >&6; }
+fi
+if ${lt_cv_path_LD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi
+fi
+
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
+$as_echo "$LD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
+$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
+if ${lt_cv_prog_gnu_ld+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  # I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
+$as_echo "$lt_cv_prog_gnu_ld" >&6; }
+with_gnu_ld=$lt_cv_prog_gnu_ld
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
+$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
+if ${lt_cv_path_NM+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
+$as_echo "$lt_cv_path_NM" >&6; }
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    if test -n "$ac_tool_prefix"; then
+  for ac_prog in dumpbin "link -dump"
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DUMPBIN"; then
+  ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DUMPBIN=$ac_cv_prog_DUMPBIN
+if test -n "$DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
+$as_echo "$DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$DUMPBIN" && break
+  done
+fi
+if test -z "$DUMPBIN"; then
+  ac_ct_DUMPBIN=$DUMPBIN
+  for ac_prog in dumpbin "link -dump"
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DUMPBIN"; then
+  ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
+if test -n "$ac_ct_DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
+$as_echo "$ac_ct_DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_DUMPBIN" && break
+done
+
+  if test "x$ac_ct_DUMPBIN" = x; then
+    DUMPBIN=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DUMPBIN=$ac_ct_DUMPBIN
+  fi
+fi
+
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
+$as_echo_n "checking the name lister ($NM) interface... " >&6; }
+if ${lt_cv_nm_interface+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: output\"" >&5)
+  cat conftest.out >&5
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
+$as_echo "$lt_cv_nm_interface" >&6; }
+
+# find the maximum length of command line arguments
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
+$as_echo_n "checking the maximum length of command line arguments... " >&6; }
+if ${lt_cv_sys_max_cmd_len+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+    i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[	 ]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+
+fi
+
+if test -n $lt_cv_sys_max_cmd_len ; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
+$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
+$as_echo "none" >&6; }
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+
+
+
+
+
+
+: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
+$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,b/c, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
+$as_echo "$xsi_shell" >&6; }
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
+$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
+$as_echo "$lt_shell_append" >&6; }
+
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+
+
+
+
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
+if ${lt_cv_to_host_file_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+
+fi
+
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
+if ${lt_cv_to_tool_file_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  #assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+
+fi
+
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+$as_echo_n "checking for $LD option to reload object files... " >&6; }
+if ${lt_cv_ld_reload_flag+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_reload_flag='-r'
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
+$as_echo "$lt_cv_ld_reload_flag" >&6; }
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test "$GCC" != yes; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
+set dummy ${ac_tool_prefix}objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OBJDUMP"; then
+  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OBJDUMP=$ac_cv_prog_OBJDUMP
+if test -n "$OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
+$as_echo "$OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OBJDUMP"; then
+  ac_ct_OBJDUMP=$OBJDUMP
+  # Extract the first word of "objdump", so it can be a program name with args.
+set dummy objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OBJDUMP"; then
+  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OBJDUMP="objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
+if test -n "$ac_ct_OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
+$as_echo "$ac_ct_OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OBJDUMP" = x; then
+    OBJDUMP="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OBJDUMP=$ac_ct_OBJDUMP
+  fi
+else
+  OBJDUMP="$ac_cv_prog_OBJDUMP"
+fi
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
+$as_echo_n "checking how to recognize dependent libraries... " >&6; }
+if ${lt_cv_deplibs_check_method+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[4-9]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[45]*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[3-9]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+$as_echo "$lt_cv_deplibs_check_method" >&6; }
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DLLTOOL"; then
+  ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DLLTOOL=$ac_cv_prog_DLLTOOL
+if test -n "$DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
+$as_echo "$DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DLLTOOL"; then
+  ac_ct_DLLTOOL=$DLLTOOL
+  # Extract the first word of "dlltool", so it can be a program name with args.
+set dummy dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DLLTOOL"; then
+  ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DLLTOOL="dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
+if test -n "$ac_ct_DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
+$as_echo "$ac_ct_DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DLLTOOL" = x; then
+    DLLTOOL="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DLLTOOL=$ac_ct_DLLTOOL
+  fi
+else
+  DLLTOOL="$ac_cv_prog_DLLTOOL"
+fi
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh
+  # decide which to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+  ;;
+esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  for ac_prog in ar
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AR"; then
+  ac_cv_prog_AR="$AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AR=$ac_cv_prog_AR
+if test -n "$AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
+$as_echo "$AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$AR" && break
+  done
+fi
+if test -z "$AR"; then
+  ac_ct_AR=$AR
+  for ac_prog in ar
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_AR"; then
+  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_AR="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AR=$ac_cv_prog_ac_ct_AR
+if test -n "$ac_ct_AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
+$as_echo "$ac_ct_AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_AR" && break
+done
+
+  if test "x$ac_ct_AR" = x; then
+    AR="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    AR=$ac_ct_AR
+  fi
+fi
+
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
+$as_echo_n "checking for archiver @FILE support... " >&6; }
+if ${lt_cv_ar_at_file+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ar_at_file=no
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
+      { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
+  (eval $lt_ar_try) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+      if test "$ac_status" -eq 0; then
+	# Ensure the archiver fails upon bogus file names.
+	rm -f conftest.$ac_objext libconftest.a
+	{ { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
+  (eval $lt_ar_try) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+	if test "$ac_status" -ne 0; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
+$as_echo "$lt_cv_ar_at_file" >&6; }
+
+if test "x$lt_cv_ar_at_file" = xno; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+test -z "$STRIP" && STRIP=:
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$RANLIB"; then
+  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+RANLIB=$ac_cv_prog_RANLIB
+if test -n "$RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
+$as_echo "$RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_RANLIB"; then
+  ac_ct_RANLIB=$RANLIB
+  # Extract the first word of "ranlib", so it can be a program name with args.
+set dummy ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_RANLIB"; then
+  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_RANLIB="ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
+if test -n "$ac_ct_RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
+$as_echo "$ac_ct_RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_RANLIB" = x; then
+    RANLIB=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    RANLIB=$ac_ct_RANLIB
+  fi
+else
+  RANLIB="$ac_cv_prog_RANLIB"
+fi
+
+test -z "$RANLIB" && RANLIB=:
+
+
+
+
+
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
+$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
+if ${lt_cv_sys_global_symbol_pipe+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[BCDEGRST]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[BCDT]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[ABCDGISTW]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[ABCDEGRST]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[BCDEGRST]'
+  ;;
+osf*)
+  symcode='[BCDEGQRST]'
+  ;;
+solaris*)
+  symcode='[BDRT]'
+  ;;
+sco3.2v5*)
+  symcode='[DT]'
+  ;;
+sysv4.2uw2*)
+  symcode='[DT]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[ABDT]'
+  ;;
+sysv4)
+  symcode='[DFNSTU]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[ABCDGIRSTW]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK '"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[	 ]\($symcode$symcode*\)[	 ][	 ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5
+  (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_globsym_save_LIBS=$LIBS
+	  lt_globsym_save_CFLAGS=$CFLAGS
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS=$lt_globsym_save_LIBS
+	  CFLAGS=$lt_globsym_save_CFLAGS
+	else
+	  echo "cannot find nm_test_func in $nlist" >&5
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&5
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
+    fi
+  else
+    echo "$progname: failed program was:" >&5
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+
+fi
+
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
+$as_echo "failed" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
+$as_echo "ok" >&6; }
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
+$as_echo_n "checking for sysroot... " >&6; }
+
+# Check whether --with-sysroot was given.
+if test "${with_sysroot+set}" = set; then :
+  withval=$with_sysroot;
+else
+  with_sysroot=no
+fi
+
+
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+   if test "$GCC" = yes; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5
+$as_echo "${with_sysroot}" >&6; }
+   as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
+   ;;
+esac
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
+$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+
+
+# Check whether --enable-libtool-lock was given.
+if test "${enable_libtool_lock+set}" = set; then :
+  enableval=$enable_libtool_lock;
+fi
+
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '#line '$LINENO' "configure"' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
+$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
+if ${lt_cv_cc_needs_belf+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+     if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_cc_needs_belf=yes
+else
+  lt_cv_cc_needs_belf=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+     ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
+$as_echo "$lt_cv_cc_needs_belf" >&6; }
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD="${LD-ld}_sol2"
+        fi
+        ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
+set dummy ${ac_tool_prefix}mt; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$MANIFEST_TOOL"; then
+  ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
+if test -n "$MANIFEST_TOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
+$as_echo "$MANIFEST_TOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
+  ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
+  # Extract the first word of "mt", so it can be a program name with args.
+set dummy mt; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_MANIFEST_TOOL"; then
+  ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
+if test -n "$ac_ct_MANIFEST_TOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_MANIFEST_TOOL" = x; then
+    MANIFEST_TOOL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
+  fi
+else
+  MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
+fi
+
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
+if ${lt_cv_path_mainfest_tool+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&5
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+  MANIFEST_TOOL=:
+fi
+
+
+
+
+
+
+  case $host_os in
+    rhapsody* | darwin*)
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DSYMUTIL"; then
+  ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DSYMUTIL=$ac_cv_prog_DSYMUTIL
+if test -n "$DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
+$as_echo "$DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DSYMUTIL"; then
+  ac_ct_DSYMUTIL=$DSYMUTIL
+  # Extract the first word of "dsymutil", so it can be a program name with args.
+set dummy dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DSYMUTIL"; then
+  ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
+if test -n "$ac_ct_DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
+$as_echo "$ac_ct_DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DSYMUTIL" = x; then
+    DSYMUTIL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DSYMUTIL=$ac_ct_DSYMUTIL
+  fi
+else
+  DSYMUTIL="$ac_cv_prog_DSYMUTIL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
+set dummy ${ac_tool_prefix}nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NMEDIT"; then
+  ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+NMEDIT=$ac_cv_prog_NMEDIT
+if test -n "$NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
+$as_echo "$NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_NMEDIT"; then
+  ac_ct_NMEDIT=$NMEDIT
+  # Extract the first word of "nmedit", so it can be a program name with args.
+set dummy nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_NMEDIT"; then
+  ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_NMEDIT="nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
+if test -n "$ac_ct_NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
+$as_echo "$ac_ct_NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_NMEDIT" = x; then
+    NMEDIT=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    NMEDIT=$ac_ct_NMEDIT
+  fi
+else
+  NMEDIT="$ac_cv_prog_NMEDIT"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
+set dummy ${ac_tool_prefix}lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$LIPO"; then
+  ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+LIPO=$ac_cv_prog_LIPO
+if test -n "$LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
+$as_echo "$LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_LIPO"; then
+  ac_ct_LIPO=$LIPO
+  # Extract the first word of "lipo", so it can be a program name with args.
+set dummy lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_LIPO"; then
+  ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_LIPO="lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
+if test -n "$ac_ct_LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
+$as_echo "$ac_ct_LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_LIPO" = x; then
+    LIPO=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    LIPO=$ac_ct_LIPO
+  fi
+else
+  LIPO="$ac_cv_prog_LIPO"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL"; then
+  ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL=$ac_cv_prog_OTOOL
+if test -n "$OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
+$as_echo "$OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL"; then
+  ac_ct_OTOOL=$OTOOL
+  # Extract the first word of "otool", so it can be a program name with args.
+set dummy otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL"; then
+  ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL="otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
+if test -n "$ac_ct_OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
+$as_echo "$ac_ct_OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL" = x; then
+    OTOOL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL=$ac_ct_OTOOL
+  fi
+else
+  OTOOL="$ac_cv_prog_OTOOL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL64"; then
+  ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL64=$ac_cv_prog_OTOOL64
+if test -n "$OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
+$as_echo "$OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL64"; then
+  ac_ct_OTOOL64=$OTOOL64
+  # Extract the first word of "otool64", so it can be a program name with args.
+set dummy otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL64"; then
+  ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL64="otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
+if test -n "$ac_ct_OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
+$as_echo "$ac_ct_OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL64" = x; then
+    OTOOL64=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL64=$ac_ct_OTOOL64
+  fi
+else
+  OTOOL64="$ac_cv_prog_OTOOL64"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
+$as_echo_n "checking for -single_module linker flag... " >&6; }
+if ${lt_cv_apple_cc_single_mod+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&5
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	# If there is a non-empty error log, and "single_module"
+	# appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+	  cat conftest.err >&5
+	# Otherwise, if the output was created with a 0 exit code from
+	# the compiler, it worked.
+	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&5
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
+$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
+$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
+if ${lt_cv_ld_exported_symbols_list+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_ld_exported_symbols_list=yes
+else
+  lt_cv_ld_exported_symbols_list=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+	LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
+$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5
+$as_echo_n "checking for -force_load linker flag... " >&6; }
+if ${lt_cv_ld_force_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+      echo "$AR cru libconftest.a conftest.o" >&5
+      $AR cru libconftest.a conftest.o 2>&5
+      echo "$RANLIB libconftest.a" >&5
+      $RANLIB libconftest.a 2>&5
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+	cat conftest.err >&5
+      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&5
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
+$as_echo "$lt_cv_ld_force_load" >&6; }
+    case $host_os in
+    rhapsody* | darwin1.[012])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[91]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[012]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
+$as_echo_n "checking for ANSI C header files... " >&6; }
+if ${ac_cv_header_stdc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <float.h>
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_header_stdc=yes
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+if test $ac_cv_header_stdc = yes; then
+  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <string.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "memchr" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "free" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+  if test "$cross_compiling" = yes; then :
+  :
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ctype.h>
+#include <stdlib.h>
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+		   (('a' <= (c) && (c) <= 'i') \
+		     || ('j' <= (c) && (c) <= 'r') \
+		     || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+  int i;
+  for (i = 0; i < 256; i++)
+    if (XOR (islower (i), ISLOWER (i))
+	|| toupper (i) != TOUPPER (i))
+      return 2;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
+$as_echo "$ac_cv_header_stdc" >&6; }
+if test $ac_cv_header_stdc = yes; then
+
+$as_echo "#define STDC_HEADERS 1" >>confdefs.h
+
+fi
+
+# On IRIX 5.3, sys/types and inttypes.h are conflicting.
+for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
+		  inttypes.h stdint.h unistd.h
+do :
+  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
+"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
+_ACEOF
+
+fi
+
+done
+
+
+for ac_header in dlfcn.h
+do :
+  ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
+"
+if test "x$ac_cv_header_dlfcn_h" = xyes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_DLFCN_H 1
+_ACEOF
+
+fi
+
+done
+
+
+
+
+
+# Set options
+
+
+
+        enable_dlopen=no
+
+
+  enable_win32_dll=no
+
+
+            # Check whether --enable-shared was given.
+if test "${enable_shared+set}" = set; then :
+  enableval=$enable_shared; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_shared=yes
+fi
+
+
+
+
+
+
+
+
+
+  # Check whether --enable-static was given.
+if test "${enable_static+set}" = set; then :
+  enableval=$enable_static; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_static=yes
+fi
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-pic was given.
+if test "${with_pic+set}" = set; then :
+  withval=$with_pic; lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for lt_pkg in $withval; do
+	IFS="$lt_save_ifs"
+	if test "X$lt_pkg" = "X$lt_p"; then
+	  pic_mode=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  pic_mode=default
+fi
+
+
+test -z "$pic_mode" && pic_mode=default
+
+
+
+
+
+
+
+  # Check whether --enable-fast-install was given.
+if test "${enable_fast_install+set}" = set; then :
+  enableval=$enable_fast_install; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_fast_install=yes
+fi
+
+
+
+
+
+
+
+
+
+
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+test -z "$LN_S" && LN_S="ln -s"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
+$as_echo_n "checking for objdir... " >&6; }
+if ${lt_cv_objdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
+$as_echo "$lt_cv_objdir" >&6; }
+objdir=$lt_cv_objdir
+
+
+
+
+
+cat >>confdefs.h <<_ACEOF
+#define LT_OBJDIR "$lt_cv_objdir/"
+_ACEOF
+
+
+
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+for cc_temp in $compiler""; do
+  case $cc_temp in
+    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
+    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
+$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/${ac_tool_prefix}file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+
+
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
+$as_echo_n "checking for file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  else
+    MAGIC_CMD=:
+  fi
+fi
+
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+
+lt_save_CC="$CC"
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+objext=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+
+
+if test -n "$compiler"; then
+
+lt_prog_compiler_no_builtin_flag=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;;
+  *)
+    lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;;
+  esac
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
+$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
+if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_rtti_exceptions=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="-fno-rtti -fno-exceptions"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_rtti_exceptions=yes
+     fi
+   fi
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
+$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
+
+if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
+    lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
+else
+    :
+fi
+
+fi
+
+
+
+
+
+
+  lt_prog_compiler_wl=
+lt_prog_compiler_pic=
+lt_prog_compiler_static=
+
+
+  if test "$GCC" = yes; then
+    lt_prog_compiler_wl='-Wl,'
+    lt_prog_compiler_static='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            lt_prog_compiler_pic='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      lt_prog_compiler_pic='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      lt_prog_compiler_static=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[3-9]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      lt_prog_compiler_can_build_shared=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	lt_prog_compiler_pic=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      lt_prog_compiler_pic='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      lt_prog_compiler_wl='-Xlinker '
+      if test -n "$lt_prog_compiler_pic"; then
+        lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      lt_prog_compiler_wl='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      else
+	lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      lt_prog_compiler_static='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC (with -KPIC) is the default.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-KPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='--shared'
+	lt_prog_compiler_static='--static'
+	;;
+      nagfor*)
+	# NAG Fortran compiler
+	lt_prog_compiler_wl='-Wl,-Wl,,'
+	lt_prog_compiler_pic='-PIC'
+	lt_prog_compiler_static='-Bstatic'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fpic'
+	lt_prog_compiler_static='-Bstatic'
+        ;;
+      ccc*)
+        lt_prog_compiler_wl='-Wl,'
+        # All Alpha code is PIC.
+        lt_prog_compiler_static='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-qpic'
+	lt_prog_compiler_static='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl=''
+	  ;;
+	*Sun\ F* | *Sun*Fortran*)
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl='-Qoption ld '
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl='-Wl,'
+	  ;;
+        *Intel*\ [CF]*Compiler*)
+	  lt_prog_compiler_wl='-Wl,'
+	  lt_prog_compiler_pic='-fPIC'
+	  lt_prog_compiler_static='-static'
+	  ;;
+	*Portland\ Group*)
+	  lt_prog_compiler_wl='-Wl,'
+	  lt_prog_compiler_pic='-fpic'
+	  lt_prog_compiler_static='-Bstatic'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      lt_prog_compiler_wl='-Wl,'
+      # All OSF/1 code is PIC.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    rdos*)
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    solaris*)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	lt_prog_compiler_wl='-Qoption ld ';;
+      *)
+	lt_prog_compiler_wl='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      lt_prog_compiler_wl='-Qoption ld '
+      lt_prog_compiler_pic='-PIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	lt_prog_compiler_pic='-Kconform_pic'
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    unicos*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_can_build_shared=no
+      ;;
+
+    uts4*)
+      lt_prog_compiler_pic='-pic'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *)
+      lt_prog_compiler_can_build_shared=no
+      ;;
+    esac
+  fi
+
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    lt_prog_compiler_pic=
+    ;;
+  *)
+    lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+    ;;
+esac
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+if ${lt_cv_prog_compiler_pic+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$lt_prog_compiler_pic"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
+$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
+if ${lt_cv_prog_compiler_pic_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_pic_works=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_pic_works=yes
+     fi
+   fi
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
+$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
+    case $lt_prog_compiler_pic in
+     "" | " "*) ;;
+     *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
+     esac
+else
+    lt_prog_compiler_pic=
+     lt_prog_compiler_can_build_shared=no
+fi
+
+fi
+
+
+
+
+
+
+
+
+
+
+
+#
+# Check to make sure the static flag actually works.
+#
+wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
+$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
+if ${lt_cv_prog_compiler_static_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_static_works=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler_static_works=yes
+       fi
+     else
+       lt_cv_prog_compiler_static_works=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
+$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_static_works" = xyes; then
+    :
+else
+    lt_prog_compiler_static=
+fi
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+hard_links="nottested"
+if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
+$as_echo_n "checking if we can lock with hard links... " >&6; }
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
+$as_echo "$hard_links" >&6; }
+  if test "$hard_links" = no; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
+$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+  runpath_var=
+  allow_undefined_flag=
+  always_export_symbols=no
+  archive_cmds=
+  archive_expsym_cmds=
+  compiler_needs_object=no
+  enable_shared_with_static_runtimes=no
+  export_dynamic_flag_spec=
+  export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  hardcode_automatic=no
+  hardcode_direct=no
+  hardcode_direct_absolute=no
+  hardcode_libdir_flag_spec=
+  hardcode_libdir_separator=
+  hardcode_minus_L=no
+  hardcode_shlibpath_var=unsupported
+  inherit_rpath=no
+  link_all_deplibs=unknown
+  module_cmds=
+  module_expsym_cmds=
+  old_archive_from_new_cmds=
+  old_archive_from_expsyms_cmds=
+  thread_safe_flag_spec=
+  whole_archive_flag_spec=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  include_expsyms=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  ld_shlibs=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
+	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+    export_dynamic_flag_spec='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      whole_archive_flag_spec=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[3-9]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	allow_undefined_flag=unsupported
+	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
+      # as there is no search path for DLLs.
+      hardcode_libdir_flag_spec='-L$libdir'
+      export_dynamic_flag_spec='${wl}--export-all-symbols'
+      allow_undefined_flag=unsupported
+      always_export_symbols=no
+      enable_shared_with_static_runtimes=yes
+      export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
+      exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    haiku*)
+      archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      link_all_deplibs=yes
+      ;;
+
+    interix[3-9]*)
+      hardcode_direct=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+      export_dynamic_flag_spec='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=' $pic_flag'
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  whole_archive_flag_spec=
+	  tmp_sharedflag='--shared' ;;
+	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+	  hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+	  archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        ld_shlibs=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+	    archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    ld_shlibs=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+    esac
+
+    if test "$ld_shlibs" = no; then
+      runpath_var=
+      hardcode_libdir_flag_spec=
+      export_dynamic_flag_spec=
+      whole_archive_flag_spec=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      allow_undefined_flag=unsupported
+      always_export_symbols=yes
+      archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      hardcode_minus_L=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	hardcode_direct=unsupported
+      fi
+      ;;
+
+    aix[4-9]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      archive_cmds=''
+      hardcode_direct=yes
+      hardcode_direct_absolute=yes
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      file_list_spec='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[012]|aix4.[012].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  hardcode_direct=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  hardcode_minus_L=yes
+	  hardcode_libdir_flag_spec='-L$libdir'
+	  hardcode_libdir_separator=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      export_dynamic_flag_spec='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      always_export_symbols=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	allow_undefined_flag='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  if ${lt_cv_aix_libpath_+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+  lt_aix_libpath_sed='
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }'
+  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_="/usr/lib:/lib"
+  fi
+
+fi
+
+  aix_libpath=$lt_cv_aix_libpath_
+fi
+
+        hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+        archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
+	  allow_undefined_flag="-z nodefs"
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  if ${lt_cv_aix_libpath_+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+  lt_aix_libpath_sed='
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }'
+  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_="/usr/lib:/lib"
+  fi
+
+fi
+
+  aix_libpath=$lt_cv_aix_libpath_
+fi
+
+	 hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  no_undefined_flag=' ${wl}-bernotok'
+	  allow_undefined_flag=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    whole_archive_flag_spec='$convenience'
+	  fi
+	  archive_cmds_need_lc=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[45]*)
+      export_dynamic_flag_spec=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+	# Native MSVC
+	hardcode_libdir_flag_spec=' '
+	allow_undefined_flag=unsupported
+	always_export_symbols=yes
+	file_list_spec='@'
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	  else
+	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	  fi~
+	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	  linknames='
+	# The linker will not automatically build a static lib if we build a DLL.
+	# _LT_TAGVAR(old_archive_from_new_cmds, )='true'
+	enable_shared_with_static_runtimes=yes
+	exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+	export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
+	# Don't use ranlib
+	old_postinstall_cmds='chmod 644 $oldlib'
+	postlink_cmds='lt_outputfile="@OUTPUT@"~
+	  lt_tool_outputfile="@TOOL_OUTPUT@"~
+	  case $lt_outputfile in
+	    *.exe|*.EXE) ;;
+	    *)
+	      lt_outputfile="$lt_outputfile.exe"
+	      lt_tool_outputfile="$lt_tool_outputfile.exe"
+	      ;;
+	  esac~
+	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	    $RM "$lt_outputfile.manifest";
+	  fi'
+	;;
+      *)
+	# Assume MSVC wrapper
+	hardcode_libdir_flag_spec=' '
+	allow_undefined_flag=unsupported
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+	# The linker will automatically build a .lib file if we build a DLL.
+	old_archive_from_new_cmds='true'
+	# FIXME: Should let the user specify the lib program.
+	old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+	enable_shared_with_static_runtimes=yes
+	;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+
+
+  archive_cmds_need_lc=no
+  hardcode_direct=no
+  hardcode_automatic=yes
+  hardcode_shlibpath_var=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    whole_archive_flag_spec='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+
+  else
+    whole_archive_flag_spec=''
+  fi
+  link_all_deplibs=yes
+  allow_undefined_flag="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+
+  else
+  ld_shlibs=no
+  fi
+
+      ;;
+
+    dgux*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_direct=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      hardcode_minus_L=yes
+      export_dynamic_flag_spec='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_separator=:
+	hardcode_direct=yes
+	hardcode_direct_absolute=yes
+	export_dynamic_flag_spec='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	hardcode_minus_L=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5
+$as_echo_n "checking if $CC understands -b... " >&6; }
+if ${lt_cv_prog_compiler__b+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler__b=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS -b"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler__b=yes
+       fi
+     else
+       lt_cv_prog_compiler__b=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5
+$as_echo "$lt_cv_prog_compiler__b" >&6; }
+
+if test x"$lt_cv_prog_compiler__b" = xyes; then
+    archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+else
+    archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+fi
+
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_separator=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  hardcode_direct=no
+	  hardcode_shlibpath_var=no
+	  ;;
+	*)
+	  hardcode_direct=yes
+	  hardcode_direct_absolute=yes
+	  export_dynamic_flag_spec='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  hardcode_minus_L=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+	# This should be the same for all languages, so no per-tag cache variable.
+	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
+if ${lt_cv_irix_exported_symbol+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  save_LDFLAGS="$LDFLAGS"
+	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+	   if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+int foo (void) { return 0; }
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_irix_exported_symbol=yes
+else
+  lt_cv_irix_exported_symbol=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+           LDFLAGS="$save_LDFLAGS"
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
+	if test "$lt_cv_irix_exported_symbol" = yes; then
+          archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+	fi
+      else
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      inherit_rpath=yes
+      link_all_deplibs=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    newsos6)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_shlibpath_var=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	hardcode_direct=yes
+	hardcode_shlibpath_var=no
+	hardcode_direct_absolute=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	  export_dynamic_flag_spec='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
+	     archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     hardcode_libdir_flag_spec='-R$libdir'
+	     ;;
+	   *)
+	     archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    os2*)
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_minus_L=yes
+      allow_undefined_flag=unsupported
+      archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	hardcode_libdir_flag_spec='-rpath $libdir'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_separator=:
+      ;;
+
+    solaris*)
+      no_undefined_flag=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_shlibpath_var=no
+      case $host_os in
+      solaris2.[0-5] | solaris2.[0-5].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      link_all_deplibs=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  reload_cmds='$CC -r -o $output$reload_objs'
+	  hardcode_direct=no
+        ;;
+	motorola)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4.3*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_shlibpath_var=no
+      export_dynamic_flag_spec='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	hardcode_shlibpath_var=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	ld_shlibs=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
+      no_undefined_flag='${wl}-z,text'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      no_undefined_flag='${wl}-z,text'
+      allow_undefined_flag='${wl}-z,nodefs'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-R,$libdir'
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      export_dynamic_flag_spec='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      ld_shlibs=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	export_dynamic_flag_spec='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
+$as_echo "$ld_shlibs" >&6; }
+test "$ld_shlibs" = no && can_build_shared=no
+
+with_gnu_ld=$with_gnu_ld
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$archive_cmds_need_lc" in
+x|xyes)
+  # Assume -lc should be added
+  archive_cmds_need_lc=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $archive_cmds in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
+$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
+if ${lt_cv_archive_cmds_need_lc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  $RM conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$lt_prog_compiler_wl
+	  pic_flag=$lt_prog_compiler_pic
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$allow_undefined_flag
+	  allow_undefined_flag=
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
+  (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+	  then
+	    lt_cv_archive_cmds_need_lc=no
+	  else
+	    lt_cv_archive_cmds_need_lc=yes
+	  fi
+	  allow_undefined_flag=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5
+$as_echo "$lt_cv_archive_cmds_need_lc" >&6; }
+      archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
+$as_echo_n "checking dynamic linker characteristics... " >&6; }
+
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[lt_foo]++; }
+  if (lt_freq[lt_foo] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([A-Za-z]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[4-9]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[01] | aix4.[01].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[45]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+    library_names_spec='${libname}.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec="$LIB"
+      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[23].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[01]* | freebsdelf3.[01]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
+  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[3-9]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux # correct to gnu/linux during the next big refactor
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
+	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
+    if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
+  lt_cv_shlibpath_overrides_runpath=yes
+fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+
+fi
+
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[89] | openbsd2.[89].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
+$as_echo "$dynamic_linker" >&6; }
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
+$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
+hardcode_action=
+if test -n "$hardcode_libdir_flag_spec" ||
+   test -n "$runpath_var" ||
+   test "X$hardcode_automatic" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$hardcode_direct" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
+     test "$hardcode_minus_L" != no; then
+    # Linking always hardcodes the temporary library directory.
+    hardcode_action=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    hardcode_action=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  hardcode_action=unsupported
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
+$as_echo "$hardcode_action" >&6; }
+
+if test "$hardcode_action" = relink ||
+   test "$inherit_rpath" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+
+
+
+
+
+
+  if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+
+fi
+
+    ;;
+
+  *)
+    ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
+if test "x$ac_cv_func_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
+$as_echo_n "checking for shl_load in -ldld... " >&6; }
+if ${ac_cv_lib_dld_shl_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char shl_load ();
+int
+main ()
+{
+return shl_load ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_shl_load=yes
+else
+  ac_cv_lib_dld_shl_load=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
+$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
+if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
+else
+  ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
+if test "x$ac_cv_func_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
+$as_echo_n "checking for dlopen in -lsvld... " >&6; }
+if ${ac_cv_lib_svld_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-lsvld  $LIBS"
+if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_svld_dlopen=yes
+else
+  ac_cv_lib_svld_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
+$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
+if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
+$as_echo_n "checking for dld_link in -ldld... " >&6; }
+if ${ac_cv_lib_dld_dld_link+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+if test x$ac_no_link = xyes; then
+  as_fn_error $? "link tests are not allowed after AC_NO_EXECUTABLES" "$LINENO" 5
+fi
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dld_link ();
+int
+main ()
+{
+return dld_link ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_dld_link=yes
+else
+  ac_cv_lib_dld_dld_link=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
+$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
+if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
+  lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
+$as_echo_n "checking whether a program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
+$as_echo "$lt_cv_dlopen_self" >&6; }
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
+$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self_static+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self_static=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self_static=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
+$as_echo "$lt_cv_dlopen_self_static" >&6; }
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+striplib=
+old_striplib=
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
+$as_echo_n "checking whether stripping libraries is possible... " >&6; }
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+    else
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    fi
+    ;;
+  *)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+  # Report which library types will actually be built
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
+$as_echo_n "checking if libtool supports shared libraries... " >&6; }
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
+$as_echo "$can_build_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
+$as_echo_n "checking whether to build shared libraries... " >&6; }
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[4-9]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
+$as_echo "$enable_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
+$as_echo_n "checking whether to build static libraries... " >&6; }
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
+$as_echo "$enable_static" >&6; }
+
+
+
+
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+CC="$lt_save_CC"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+        ac_config_commands="$ac_config_commands libtool"
+
+
+
+
+# Only expand once:
+
+
+
+#
+# Debug instances
+#
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build debug instances" >&5
+$as_echo "$as_me: checking whether to build debug instances" >&6;}
+
+# Debug
+
+
+    # Check whether or not a default value has been passed in.
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build debug instances of programs and libraries" >&5
+$as_echo_n "checking whether to build debug instances of programs and libraries... " >&6; }
+if ${nl_cv_build_debug+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+            # Check whether --enable-debug was given.
+if test "${enable_debug+set}" = set; then :
+  enableval=$enable_debug;
+                    case "${enableval}" in
+
+                    no|yes)
+                        nl_cv_build_debug=${enableval}
+                        ;;
+
+                    *)
+                        as_fn_error $? "Invalid value ${enableval} for --enable-debug" "$LINENO" 5
+                        ;;
+
+                    esac
+
+else
+
+                    nl_cv_build_debug=no
+
+fi
+
+
+            if test "${nl_cv_build_debug}" = "yes"; then
+                CFLAGS="${CFLAGS} -DDEBUG"
+                CXXFLAGS="${CXXFLAGS} -DDEBUG"
+                OBJCFLAGS="${OBJCFLAGS} -DDEBUG"
+                OBJCXXFLAGS="${OBJCXXFLAGS} -DDEBUG"
+            fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_build_debug" >&5
+$as_echo "$nl_cv_build_debug" >&6; }
+
+
+ if test "${nl_cv_build_debug}" = "yes"; then
+  NLUNITTEST_BUILD_DEBUG_TRUE=
+  NLUNITTEST_BUILD_DEBUG_FALSE='#'
+else
+  NLUNITTEST_BUILD_DEBUG_TRUE='#'
+  NLUNITTEST_BUILD_DEBUG_FALSE=
+fi
+
+
+# Optimization
+
+
+    # Check whether or not a default value has been passed in.
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build code-optimized instances of programs and libraries" >&5
+$as_echo_n "checking whether to build code-optimized instances of programs and libraries... " >&6; }
+if ${nl_cv_build_optimized+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+            # Check whether --enable-optimization was given.
+if test "${enable_optimization+set}" = set; then :
+  enableval=$enable_optimization;
+                    case "${enableval}" in
+
+                    no|yes)
+                        nl_cv_build_optimized=${enableval}
+
+                        if test "${nl_cv_build_coverage}" = "yes" && test "${nl_cv_build_optimized}" = "yes"; then
+                            as_fn_error $? "both --enable-optimization and --enable-coverage cannot used. Please, choose one or the other to enable." "$LINENO" 5
+                        fi
+                        ;;
+
+                    *)
+                        as_fn_error $? "Invalid value ${enableval} for --enable-optimized" "$LINENO" 5
+                        ;;
+
+                    esac
+
+else
+
+                    if test "${nl_cv_build_coverage}" = "yes"; then
+                        { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: --enable-coverage was specified, optimization disabled" >&5
+$as_echo "$as_me: WARNING: --enable-coverage was specified, optimization disabled" >&2;}
+                        nl_cv_build_optimized=no
+
+                    else
+                        nl_cv_build_optimized=yes
+
+                    fi
+
+fi
+
+
+            if test "${nl_cv_build_optimized}" = "no"; then
+                CFLAGS="`echo ${CFLAGS} | sed -e 's,-O[[:alnum:]]*,-O0,g'`"
+                CXXFLAGS="`echo ${CXXFLAGS} | sed -e 's,-O[[:alnum:]]*,-O0,g'`"
+                OBJCFLAGS="`echo ${OBJCFLAGS} | sed -e 's,-O[[:alnum:]]*,-O0,g'`"
+                OBJCXXFLAGS="`echo ${OBJCXXFLAGS} | sed -e 's,-O[[:alnum:]]*,-O0,g'`"
+            fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_build_optimized" >&5
+$as_echo "$nl_cv_build_optimized" >&6; }
+
+
+ if test "${nl_cv_build_optimized}" = "yes"; then
+  NLUNITTEST_BUILD_OPTIMIZED_TRUE=
+  NLUNITTEST_BUILD_OPTIMIZED_FALSE='#'
+else
+  NLUNITTEST_BUILD_OPTIMIZED_TRUE='#'
+  NLUNITTEST_BUILD_OPTIMIZED_FALSE=
+fi
+
+
+#
+# Documentation
+#
+
+# Determine whether or not documentation (via Doxygen) should be built
+# or not, with 'auto' as the default and establish a default support
+# value for GraphViz 'dot' support.
+
+
+    # Check whether or not the 'default' value is sane.
+
+
+
+    # Check whether or not the 'dot_default' value is sane.
+
+
+
+    DOXYGEN_USE_DOT=NO
+
+
+
+
+    # Extract the first word of "doxygen", so it can be a program name with args.
+set dummy doxygen; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_DOXYGEN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $DOXYGEN in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_DOXYGEN="$DOXYGEN" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_path_DOXYGEN="$as_dir/$ac_word$ac_exec_ext"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+  ;;
+esac
+fi
+DOXYGEN=$ac_cv_path_DOXYGEN
+if test -n "$DOXYGEN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DOXYGEN" >&5
+$as_echo "$DOXYGEN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    # Extract the first word of "dot", so it can be a program name with args.
+set dummy dot; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_DOT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $DOT in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_DOT="$DOT" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_path_DOT="$as_dir/$ac_word$ac_exec_ext"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+  ;;
+esac
+fi
+DOT=$ac_cv_path_DOT
+if test -n "$DOT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DOT" >&5
+$as_echo "$DOT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build documentation" >&5
+$as_echo_n "checking whether to build documentation... " >&6; }
+if ${nl_cv_build_docs+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+	    # Check whether --enable-docs was given.
+if test "${enable_docs+set}" = set; then :
+  enableval=$enable_docs;
+		    case "${enableval}" in
+
+		    auto|no|yes)
+			nl_cv_build_docs=${enableval}
+			;;
+
+		    *)
+			as_fn_error $? "Invalid value ${enableval} for --disable-docs" "$LINENO" 5
+			;;
+
+		    esac
+
+else
+  nl_cv_build_docs=auto
+fi
+
+
+	    if test "x${DOXYGEN}" != "x"; then
+		nl_cv_have_doxygen=yes
+	    else
+		nl_cv_have_doxygen=no
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "auto"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    nl_cv_build_docs=no
+		else
+		    nl_cv_build_docs=yes
+		fi
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "yes"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    as_fn_error $? "Building docs was explicitly requested but Doxygen cannot be found" "$LINENO" 5
+		elif test "${nl_cv_have_doxygen}" = "yes"; then
+		    if test "x${DOT}" != "x"; then
+			DOXYGEN_USE_DOT=YES
+		    fi
+		fi
+	    fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nl_cv_build_docs" >&5
+$as_echo "$nl_cv_build_docs" >&6; }
+
+
+
+
+ if test "${nl_cv_build_docs}" = "yes"; then
+  NLUNITTEST_BUILD_DOCS_TRUE=
+  NLUNITTEST_BUILD_DOCS_FALSE='#'
+else
+  NLUNITTEST_BUILD_DOCS_TRUE='#'
+  NLUNITTEST_BUILD_DOCS_FALSE=
+fi
+
+
+#
+# Check for headers
+#
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdbool.h that conforms to C99" >&5
+$as_echo_n "checking for stdbool.h that conforms to C99... " >&6; }
+if ${ac_cv_header_stdbool_h+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+#include <stdbool.h>
+#ifndef bool
+ "error: bool is not defined"
+#endif
+#ifndef false
+ "error: false is not defined"
+#endif
+#if false
+ "error: false is not 0"
+#endif
+#ifndef true
+ "error: true is not defined"
+#endif
+#if true != 1
+ "error: true is not 1"
+#endif
+#ifndef __bool_true_false_are_defined
+ "error: __bool_true_false_are_defined is not defined"
+#endif
+
+	struct s { _Bool s: 1; _Bool t; } s;
+
+	char a[true == 1 ? 1 : -1];
+	char b[false == 0 ? 1 : -1];
+	char c[__bool_true_false_are_defined == 1 ? 1 : -1];
+	char d[(bool) 0.5 == true ? 1 : -1];
+	/* See body of main program for 'e'.  */
+	char f[(_Bool) 0.0 == false ? 1 : -1];
+	char g[true];
+	char h[sizeof (_Bool)];
+	char i[sizeof s.t];
+	enum { j = false, k = true, l = false * true, m = true * 256 };
+	/* The following fails for
+	   HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */
+	_Bool n[m];
+	char o[sizeof n == m * sizeof n[0] ? 1 : -1];
+	char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1];
+	/* Catch a bug in an HP-UX C compiler.  See
+	   http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html
+	   http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html
+	 */
+	_Bool q = true;
+	_Bool *pq = &q;
+
+int
+main ()
+{
+
+	bool e = &s;
+	*pq |= q;
+	*pq |= ! q;
+	/* Refer to every declared value, to avoid compiler optimizations.  */
+	return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l
+		+ !m + !n + !o + !p + !q + !pq);
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_header_stdbool_h=yes
+else
+  ac_cv_header_stdbool_h=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdbool_h" >&5
+$as_echo "$ac_cv_header_stdbool_h" >&6; }
+ac_fn_c_check_type "$LINENO" "_Bool" "ac_cv_type__Bool" "$ac_includes_default"
+if test "x$ac_cv_type__Bool" = xyes; then :
+
+cat >>confdefs.h <<_ACEOF
+#define HAVE__BOOL 1
+_ACEOF
+
+
+fi
+
+if test $ac_cv_header_stdbool_h = yes; then
+
+$as_echo "#define HAVE_STDBOOL_H 1" >>confdefs.h
+
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
+$as_echo_n "checking for ANSI C header files... " >&6; }
+if ${ac_cv_header_stdc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <float.h>
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_header_stdc=yes
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+if test $ac_cv_header_stdc = yes; then
+  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <string.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "memchr" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "free" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+  if test "$cross_compiling" = yes; then :
+  :
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ctype.h>
+#include <stdlib.h>
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+		   (('a' <= (c) && (c) <= 'i') \
+		     || ('j' <= (c) && (c) <= 'r') \
+		     || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+  int i;
+  for (i = 0; i < 256; i++)
+    if (XOR (islower (i), ISLOWER (i))
+	|| toupper (i) != TOUPPER (i))
+      return 2;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
+$as_echo "$ac_cv_header_stdc" >&6; }
+if test $ac_cv_header_stdc = yes; then
+
+$as_echo "#define STDC_HEADERS 1" >>confdefs.h
+
+fi
+
+
+# At this point, we can restore the compiler flags to whatever the
+# user passed in, now that we're clear of an -Werror issues by
+# transforming -Wno-error back to -Werror.
+
+
+
+    if test "${nl_had_OBJCXXFLAGS_werror}" = "yes"; then
+	OBJCXXFLAGS="`echo ${OBJCXXFLAGS} | sed -e 's,-Wno-error\([[:space:]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_OBJCXXFLAGS_werror
+
+
+    if test "${nl_had_OBJCFLAGS_werror}" = "yes"; then
+	OBJCFLAGS="`echo ${OBJCFLAGS} | sed -e 's,-Wno-error\([[:space:]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_OBJCFLAGS_werror
+
+
+    if test "${nl_had_CXXFLAGS_werror}" = "yes"; then
+	CXXFLAGS="`echo ${CXXFLAGS} | sed -e 's,-Wno-error\([[:space:]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_CXXFLAGS_werror
+
+
+    if test "${nl_had_CFLAGS_werror}" = "yes"; then
+	CFLAGS="`echo ${CFLAGS} | sed -e 's,-Wno-error\([[:space:]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_CFLAGS_werror
+
+
+    if test "${nl_had_CPPFLAGS_werror}" = "yes"; then
+	CPPFLAGS="`echo ${CPPFLAGS} | sed -e 's,-Wno-error\([[:space:]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_CPPFLAGS_werror
+
+
+
+#
+# Identify the various makefiles and auto-generated files for the package
+#
+ac_config_files="$ac_config_files Makefile src/Makefile doc/Makefile"
+
+
+#
+# Generate the auto-generated files for the package
+#
+cat >confcache <<\_ACEOF
+# This file is a shell script that caches the results of configure
+# tests run on this system so they can be shared between configure
+# scripts and configure runs, see configure's option --config-cache.
+# It is not useful on other systems.  If it contains results you don't
+# want to keep, you may remove or edit it.
+#
+# config.status only pays attention to the cache file if you give it
+# the --recheck option to rerun configure.
+#
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
+# following values.
+
+_ACEOF
+
+# The following way of writing the cache mishandles newlines in values,
+# but we know of no workaround that is simple, portable, and efficient.
+# So, we kill variables containing newlines.
+# Ultrix sh set writes to stderr and can't be redirected directly,
+# and sets the high bit in the cache file unless we assign to the vars.
+(
+  for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+
+  (set) 2>&1 |
+    case $as_nl`(ac_space=' '; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      # `set' does not quote correctly, so add quotes: double-quote
+      # substitution turns \\\\ into \\, and sed turns \\ into \.
+      sed -n \
+	"s/'/'\\\\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
+      ;; #(
+    *)
+      # `set' quotes correctly as required by POSIX, so do not add quotes.
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+) |
+  sed '
+     /^ac_cv_env_/b end
+     t clear
+     :clear
+     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
+     t end
+     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
+     :end' >>confcache
+if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
+  if test -w "$cache_file"; then
+    if test "x$cache_file" != "x/dev/null"; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
+$as_echo "$as_me: updating cache $cache_file" >&6;}
+      if test ! -f "$cache_file" || test -h "$cache_file"; then
+	cat confcache >"$cache_file"
+      else
+        case $cache_file in #(
+        */* | ?:*)
+	  mv -f confcache "$cache_file"$$ &&
+	  mv -f "$cache_file"$$ "$cache_file" ;; #(
+        *)
+	  mv -f confcache "$cache_file" ;;
+	esac
+      fi
+    fi
+  else
+    { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
+$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
+  fi
+fi
+rm -f confcache
+
+test "x$prefix" = xNONE && prefix=$ac_default_prefix
+# Let make expand exec_prefix.
+test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
+
+DEFS=-DHAVE_CONFIG_H
+
+ac_libobjs=
+ac_ltlibobjs=
+U=
+for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
+  # 1. Remove the extension, and $U if already installed.
+  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
+  ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
+  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
+  #    will be set to the directory where LIBOBJS objects are built.
+  as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
+  as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
+done
+LIBOBJS=$ac_libobjs
+
+LTLIBOBJS=$ac_ltlibobjs
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5
+$as_echo_n "checking that generated files are newer than configure... " >&6; }
+   if test -n "$am_sleep_pid"; then
+     # Hide warnings about reused PIDs.
+     wait $am_sleep_pid 2>/dev/null
+   fi
+   { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5
+$as_echo "done" >&6; }
+ if test -n "$EXEEXT"; then
+  am__EXEEXT_TRUE=
+  am__EXEEXT_FALSE='#'
+else
+  am__EXEEXT_TRUE='#'
+  am__EXEEXT_FALSE=
+fi
+
+if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then
+  as_fn_error $? "conditional \"MAINTAINER_MODE\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
+  as_fn_error $? "conditional \"AMDEP\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
+  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
+  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${NLUNITTEST_BUILD_DEBUG_TRUE}" && test -z "${NLUNITTEST_BUILD_DEBUG_FALSE}"; then
+  as_fn_error $? "conditional \"NLUNITTEST_BUILD_DEBUG\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${NLUNITTEST_BUILD_OPTIMIZED_TRUE}" && test -z "${NLUNITTEST_BUILD_OPTIMIZED_FALSE}"; then
+  as_fn_error $? "conditional \"NLUNITTEST_BUILD_OPTIMIZED\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${NLUNITTEST_BUILD_DOCS_TRUE}" && test -z "${NLUNITTEST_BUILD_DOCS_FALSE}"; then
+  as_fn_error $? "conditional \"NLUNITTEST_BUILD_DOCS\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+
+: "${CONFIG_STATUS=./config.status}"
+ac_write_fail=0
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files $CONFIG_STATUS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
+$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
+as_write_fail=0
+cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+# Run this file to recreate the current configuration.
+# Compiler output produced by configure, useful for debugging
+# configure, is in config.log if it exists.
+
+debug=false
+ac_cs_recheck=false
+ac_cs_silent=false
+
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+
+exec 6>&1
+## ----------------------------------- ##
+## Main body of $CONFIG_STATUS script. ##
+## ----------------------------------- ##
+_ASEOF
+test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# Save the log message, to keep $0 and so on meaningful, and to
+# report actual input values of CONFIG_FILES etc. instead of their
+# values after options handling.
+ac_log="
+This file was extended by nlunittest $as_me g899ce0d, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  CONFIG_FILES    = $CONFIG_FILES
+  CONFIG_HEADERS  = $CONFIG_HEADERS
+  CONFIG_LINKS    = $CONFIG_LINKS
+  CONFIG_COMMANDS = $CONFIG_COMMANDS
+  $ $0 $@
+
+on `(hostname || uname -n) 2>/dev/null | sed 1q`
+"
+
+_ACEOF
+
+case $ac_config_files in *"
+"*) set x $ac_config_files; shift; ac_config_files=$*;;
+esac
+
+case $ac_config_headers in *"
+"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
+esac
+
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+# Files that config.status was made for.
+config_files="$ac_config_files"
+config_headers="$ac_config_headers"
+config_commands="$ac_config_commands"
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ac_cs_usage="\
+\`$as_me' instantiates files and other configuration actions
+from templates according to the current configuration.  Unless the files
+and actions are specified as TAGs, all are instantiated by default.
+
+Usage: $0 [OPTION]... [TAG]...
+
+  -h, --help       print this help, then exit
+  -V, --version    print version number and configuration settings, then exit
+      --config     print configuration, then exit
+  -q, --quiet, --silent
+                   do not print progress messages
+  -d, --debug      don't remove temporary files
+      --recheck    update $as_me by reconfiguring in the same conditions
+      --file=FILE[:TEMPLATE]
+                   instantiate the configuration file FILE
+      --header=FILE[:TEMPLATE]
+                   instantiate the configuration header FILE
+
+Configuration files:
+$config_files
+
+Configuration headers:
+$config_headers
+
+Configuration commands:
+$config_commands
+
+Report bugs to <developer@nest.com>.
+nlunittest home page: <https://developer.nest.com/>."
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
+ac_cs_version="\\
+nlunittest config.status g899ce0d
+configured by $0, generated by GNU Autoconf 2.68,
+  with options \\"\$ac_cs_config\\"
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This config.status script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it."
+
+ac_pwd='$ac_pwd'
+srcdir='$srcdir'
+INSTALL='$INSTALL'
+MKDIR_P='$MKDIR_P'
+AWK='$AWK'
+test -n "\$AWK" || AWK=awk
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# The default lists apply if the user does not specify any file.
+ac_need_defaults=:
+while test $# != 0
+do
+  case $1 in
+  --*=?*)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
+    ac_shift=:
+    ;;
+  --*=)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=
+    ac_shift=:
+    ;;
+  *)
+    ac_option=$1
+    ac_optarg=$2
+    ac_shift=shift
+    ;;
+  esac
+
+  case $ac_option in
+  # Handling of the options.
+  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
+    ac_cs_recheck=: ;;
+  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
+    $as_echo "$ac_cs_version"; exit ;;
+  --config | --confi | --conf | --con | --co | --c )
+    $as_echo "$ac_cs_config"; exit ;;
+  --debug | --debu | --deb | --de | --d | -d )
+    debug=: ;;
+  --file | --fil | --fi | --f )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    '') as_fn_error $? "missing file argument" ;;
+    esac
+    as_fn_append CONFIG_FILES " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --header | --heade | --head | --hea )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    as_fn_append CONFIG_HEADERS " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --he | --h)
+    # Conflict between --help and --header
+    as_fn_error $? "ambiguous option: \`$1'
+Try \`$0 --help' for more information.";;
+  --help | --hel | -h )
+    $as_echo "$ac_cs_usage"; exit ;;
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil | --si | --s)
+    ac_cs_silent=: ;;
+
+  # This is an error.
+  -*) as_fn_error $? "unrecognized option: \`$1'
+Try \`$0 --help' for more information." ;;
+
+  *) as_fn_append ac_config_targets " $1"
+     ac_need_defaults=false ;;
+
+  esac
+  shift
+done
+
+ac_configure_extra_args=
+
+if $ac_cs_silent; then
+  exec 6>/dev/null
+  ac_configure_extra_args="$ac_configure_extra_args --silent"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+if \$ac_cs_recheck; then
+  set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
+  shift
+  \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
+  CONFIG_SHELL='$SHELL'
+  export CONFIG_SHELL
+  exec "\$@"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+exec 5>>config.log
+{
+  echo
+  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
+## Running $as_me. ##
+_ASBOX
+  $as_echo "$ac_log"
+} >&5
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+#
+# INIT-COMMANDS
+#
+AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
+
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`'
+macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`'
+enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`'
+enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`'
+pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`'
+enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`'
+SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`'
+ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`'
+PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`'
+host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`'
+host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`'
+host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`'
+build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`'
+build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`'
+build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`'
+SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`'
+Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`'
+GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`'
+EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`'
+FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`'
+LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`'
+NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`'
+LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`'
+max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`'
+ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`'
+exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`'
+lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`'
+CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`'
+CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`'
+compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`'
+GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`'
+OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`'
+libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`'
+shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`'
+extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`'
+enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`'
+export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`'
+whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`'
+compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`'
+old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`'
+archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`'
+module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`'
+allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`'
+no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`'
+hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`'
+hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`'
+hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`'
+hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`'
+hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`'
+version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`'
+runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`'
+libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`'
+library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`'
+soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`'
+install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`'
+postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`'
+finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`'
+hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`'
+sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`'
+sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`'
+enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`'
+old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`'
+striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`'
+
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in SHELL \
+ECHO \
+PATH_SEPARATOR \
+SED \
+GREP \
+EGREP \
+FGREP \
+LD \
+NM \
+LN_S \
+lt_SP2NL \
+lt_NL2SP \
+reload_flag \
+OBJDUMP \
+deplibs_check_method \
+file_magic_cmd \
+file_magic_glob \
+want_nocaseglob \
+DLLTOOL \
+sharedlib_from_linklib_cmd \
+AR \
+AR_FLAGS \
+archiver_list_spec \
+STRIP \
+RANLIB \
+CC \
+CFLAGS \
+compiler \
+lt_cv_sys_global_symbol_pipe \
+lt_cv_sys_global_symbol_to_cdecl \
+lt_cv_sys_global_symbol_to_c_name_address \
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
+nm_file_list_spec \
+lt_prog_compiler_no_builtin_flag \
+lt_prog_compiler_pic \
+lt_prog_compiler_wl \
+lt_prog_compiler_static \
+lt_cv_prog_compiler_c_o \
+need_locks \
+MANIFEST_TOOL \
+DSYMUTIL \
+NMEDIT \
+LIPO \
+OTOOL \
+OTOOL64 \
+shrext_cmds \
+export_dynamic_flag_spec \
+whole_archive_flag_spec \
+compiler_needs_object \
+with_gnu_ld \
+allow_undefined_flag \
+no_undefined_flag \
+hardcode_libdir_flag_spec \
+hardcode_libdir_separator \
+exclude_expsyms \
+include_expsyms \
+file_list_spec \
+variables_saved_for_relink \
+libname_spec \
+library_names_spec \
+soname_spec \
+install_override_mode \
+finish_eval \
+old_striplib \
+striplib; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in reload_cmds \
+old_postinstall_cmds \
+old_postuninstall_cmds \
+old_archive_cmds \
+extract_expsyms_cmds \
+old_archive_from_new_cmds \
+old_archive_from_expsyms_cmds \
+archive_cmds \
+archive_expsym_cmds \
+module_cmds \
+module_expsym_cmds \
+export_symbols_cmds \
+prelink_cmds \
+postlink_cmds \
+postinstall_cmds \
+postuninstall_cmds \
+finish_cmds \
+sys_lib_search_path_spec \
+sys_lib_dlsearch_path_spec; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+ac_aux_dir='$ac_aux_dir'
+xsi_shell='$xsi_shell'
+lt_shell_append='$lt_shell_append'
+
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'
+
+
+
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+
+# Handling of arguments.
+for ac_config_target in $ac_config_targets
+do
+  case $ac_config_target in
+    "src/nlunittest-config.h") CONFIG_HEADERS="$CONFIG_HEADERS src/nlunittest-config.h" ;;
+    "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
+    "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
+    "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
+    "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;;
+    "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;;
+
+  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+  esac
+done
+
+
+# If the user did not use the arguments to specify the items to instantiate,
+# then the envvar interface is used.  Set only those that are not.
+# We use the long form for the default assignment because of an extremely
+# bizarre bug on SunOS 4.1.3.
+if $ac_need_defaults; then
+  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
+  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
+fi
+
+# Have a temporary directory for convenience.  Make it in the build tree
+# simply because there is no reason against having it here, and in addition,
+# creating and moving files from /tmp can sometimes cause problems.
+# Hook for its removal unless debugging.
+# Note that there is a small window in which the directory will not be cleaned:
+# after its creation but before its name has been assigned to `$tmp'.
+$debug ||
+{
+  tmp= ac_tmp=
+  trap 'exit_status=$?
+  : "${ac_tmp:=$tmp}"
+  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+' 0
+  trap 'as_fn_exit 1' 1 2 13 15
+}
+# Create a (secure) tmp directory for tmp files.
+
+{
+  tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
+  test -d "$tmp"
+}  ||
+{
+  tmp=./conf$$-$RANDOM
+  (umask 077 && mkdir "$tmp")
+} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
+ac_tmp=$tmp
+
+# Set up the scripts for CONFIG_FILES section.
+# No need to generate them if there are no CONFIG_FILES.
+# This happens for instance with `./config.status config.h'.
+if test -n "$CONFIG_FILES"; then
+
+
+ac_cr=`echo X | tr X '\015'`
+# On cygwin, bash can eat \r inside `` if the user requested igncr.
+# But we know of no other shell where ac_cr would be empty at this
+# point, so we can use a bashism as a fallback.
+if test "x$ac_cr" = x; then
+  eval ac_cr=\$\'\\r\'
+fi
+ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
+if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
+  ac_cs_awk_cr='\\r'
+else
+  ac_cs_awk_cr=$ac_cr
+fi
+
+echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+_ACEOF
+
+
+{
+  echo "cat >conf$$subs.awk <<_ACEOF" &&
+  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
+  echo "_ACEOF"
+} >conf$$subs.sh ||
+  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+ac_delim='%!_!# '
+for ac_last_try in false false false false false :; do
+  . ./conf$$subs.sh ||
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+
+  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
+  if test $ac_delim_n = $ac_delim_num; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+rm -f conf$$subs.sh
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+_ACEOF
+sed -n '
+h
+s/^/S["/; s/!.*/"]=/
+p
+g
+s/^[^!]*!//
+:repl
+t repl
+s/'"$ac_delim"'$//
+t delim
+:nl
+h
+s/\(.\{148\}\)..*/\1/
+t more1
+s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
+p
+n
+b repl
+:more1
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t nl
+:delim
+h
+s/\(.\{148\}\)..*/\1/
+t more2
+s/["\\]/\\&/g; s/^/"/; s/$/"/
+p
+b
+:more2
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t delim
+' <conf$$subs.awk | sed '
+/^[^""]/{
+  N
+  s/\n//
+}
+' >>$CONFIG_STATUS || ac_write_fail=1
+rm -f conf$$subs.awk
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACAWK
+cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+  for (key in S) S_is_set[key] = 1
+  FS = ""
+
+}
+{
+  line = $ 0
+  nfields = split(line, field, "@")
+  substed = 0
+  len = length(field[1])
+  for (i = 2; i < nfields; i++) {
+    key = field[i]
+    keylen = length(key)
+    if (S_is_set[key]) {
+      value = S[key]
+      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
+      len += length(value) + length(field[++i])
+      substed = 1
+    } else
+      len += 1 + keylen
+  }
+
+  print line
+}
+
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
+  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
+else
+  cat
+fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+# trailing colons and then remove the whole line if VPATH becomes empty
+# (actually we leave an empty line to preserve line numbers).
+if test "x$srcdir" = x.; then
+  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
+h
+s///
+s/^/:/
+s/[	 ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
+s/:*$//
+x
+s/\(=[	 ]*\).*/\1/
+G
+s/\n//
+s/^[^=]*=[	 ]*$//
+}'
+fi
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+fi # test -n "$CONFIG_FILES"
+
+# Set up the scripts for CONFIG_HEADERS section.
+# No need to generate them if there are no CONFIG_HEADERS.
+# This happens for instance with `./config.status Makefile'.
+if test -n "$CONFIG_HEADERS"; then
+cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+BEGIN {
+_ACEOF
+
+# Transform confdefs.h into an awk script `defines.awk', embedded as
+# here-document in config.status, that substitutes the proper values into
+# config.h.in to produce config.h.
+
+# Create a delimiter string that does not exist in confdefs.h, to ease
+# handling of long lines.
+ac_delim='%!_!# '
+for ac_last_try in false false :; do
+  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
+  if test -z "$ac_tt"; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+
+# For the awk script, D is an array of macro values keyed by name,
+# likewise P contains macro parameters if any.  Preserve backslash
+# newline sequences.
+
+ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
+sed -n '
+s/.\{148\}/&'"$ac_delim"'/g
+t rset
+:rset
+s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
+t def
+d
+:def
+s/\\$//
+t bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3"/p
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
+d
+:bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3\\\\\\n"\\/p
+t cont
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
+t cont
+d
+:cont
+n
+s/.\{148\}/&'"$ac_delim"'/g
+t clear
+:clear
+s/\\$//
+t bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/"/p
+d
+:bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
+b cont
+' <confdefs.h | sed '
+s/'"$ac_delim"'/"\\\
+"/g' >>$CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  for (key in D) D_is_set[key] = 1
+  FS = ""
+}
+/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
+  line = \$ 0
+  split(line, arg, " ")
+  if (arg[1] == "#") {
+    defundef = arg[2]
+    mac1 = arg[3]
+  } else {
+    defundef = substr(arg[1], 2)
+    mac1 = arg[2]
+  }
+  split(mac1, mac2, "(") #)
+  macro = mac2[1]
+  prefix = substr(line, 1, index(line, defundef) - 1)
+  if (D_is_set[macro]) {
+    # Preserve the white space surrounding the "#".
+    print prefix "define", macro P[macro] D[macro]
+    next
+  } else {
+    # Replace #undef with comments.  This is necessary, for example,
+    # in the case of _POSIX_SOURCE, which is predefined and required
+    # on some systems where configure will not decide to define it.
+    if (defundef == "undef") {
+      print "/*", prefix defundef, macro, "*/"
+      next
+    }
+  }
+}
+{ print }
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
+fi # test -n "$CONFIG_HEADERS"
+
+
+eval set X "  :F $CONFIG_FILES  :H $CONFIG_HEADERS    :C $CONFIG_COMMANDS"
+shift
+for ac_tag
+do
+  case $ac_tag in
+  :[FHLC]) ac_mode=$ac_tag; continue;;
+  esac
+  case $ac_mode$ac_tag in
+  :[FHL]*:*);;
+  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
+  :[FH]-) ac_tag=-:-;;
+  :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
+  esac
+  ac_save_IFS=$IFS
+  IFS=:
+  set x $ac_tag
+  IFS=$ac_save_IFS
+  shift
+  ac_file=$1
+  shift
+
+  case $ac_mode in
+  :L) ac_source=$1;;
+  :[FH])
+    ac_file_inputs=
+    for ac_f
+    do
+      case $ac_f in
+      -) ac_f="$ac_tmp/stdin";;
+      *) # Look for the file first in the build tree, then in the source tree
+	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
+	 # because $ac_f cannot contain `:'.
+	 test -f "$ac_f" ||
+	   case $ac_f in
+	   [\\/$]*) false;;
+	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
+	   esac ||
+	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+      esac
+      case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
+      as_fn_append ac_file_inputs " '$ac_f'"
+    done
+
+    # Let's still pretend it is `configure' which instantiates (i.e., don't
+    # use $as_me), people would be surprised to read:
+    #    /* config.h.  Generated by config.status.  */
+    configure_input='Generated from '`
+	  $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
+	`' by configure.'
+    if test x"$ac_file" != x-; then
+      configure_input="$ac_file.  $configure_input"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
+$as_echo "$as_me: creating $ac_file" >&6;}
+    fi
+    # Neutralize special characters interpreted by sed in replacement strings.
+    case $configure_input in #(
+    *\&* | *\|* | *\\* )
+       ac_sed_conf_input=`$as_echo "$configure_input" |
+       sed 's/[\\\\&|]/\\\\&/g'`;; #(
+    *) ac_sed_conf_input=$configure_input;;
+    esac
+
+    case $ac_tag in
+    *:-:* | *:-) cat >"$ac_tmp/stdin" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
+    esac
+    ;;
+  esac
+
+  ac_dir=`$as_dirname -- "$ac_file" ||
+$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$ac_file" : 'X\(//\)[^/]' \| \
+	 X"$ac_file" : 'X\(//\)$' \| \
+	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$ac_file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  as_dir="$ac_dir"; as_fn_mkdir_p
+  ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+
+  case $ac_mode in
+  :F)
+  #
+  # CONFIG_FILE
+  #
+
+  case $INSTALL in
+  [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
+  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
+  esac
+  ac_MKDIR_P=$MKDIR_P
+  case $MKDIR_P in
+  [\\/$]* | ?:[\\/]* ) ;;
+  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
+  esac
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# If the template does not know about datarootdir, expand it.
+# FIXME: This hack should be removed a few years after 2.60.
+ac_datarootdir_hack=; ac_datarootdir_seen=
+ac_sed_dataroot='
+/datarootdir/ {
+  p
+  q
+}
+/@datadir@/p
+/@docdir@/p
+/@infodir@/p
+/@localedir@/p
+/@mandir@/p'
+case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
+*datarootdir*) ac_datarootdir_seen=yes;;
+*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
+$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  ac_datarootdir_hack='
+  s&@datadir@&$datadir&g
+  s&@docdir@&$docdir&g
+  s&@infodir@&$infodir&g
+  s&@localedir@&$localedir&g
+  s&@mandir@&$mandir&g
+  s&\\\${datarootdir}&$datarootdir&g' ;;
+esac
+_ACEOF
+
+# Neutralize VPATH when `$srcdir' = `.'.
+# Shell code in configure.ac might set extrasub.
+# FIXME: do we really want to maintain this feature?
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_sed_extra="$ac_vpsub
+$extrasub
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+:t
+/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
+s|@configure_input@|$ac_sed_conf_input|;t t
+s&@top_builddir@&$ac_top_builddir_sub&;t t
+s&@top_build_prefix@&$ac_top_build_prefix&;t t
+s&@srcdir@&$ac_srcdir&;t t
+s&@abs_srcdir@&$ac_abs_srcdir&;t t
+s&@top_srcdir@&$ac_top_srcdir&;t t
+s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
+s&@builddir@&$ac_builddir&;t t
+s&@abs_builddir@&$ac_abs_builddir&;t t
+s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
+s&@INSTALL@&$ac_INSTALL&;t t
+s&@MKDIR_P@&$ac_MKDIR_P&;t t
+$ac_datarootdir_hack
+"
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
+  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+
+test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
+  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
+  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
+      "$ac_tmp/out"`; test -z "$ac_out"; } &&
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&5
+$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&2;}
+
+  rm -f "$ac_tmp/stdin"
+  case $ac_file in
+  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
+  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+  esac \
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ ;;
+  :H)
+  #
+  # CONFIG_HEADER
+  #
+  if test x"$ac_file" != x-; then
+    {
+      $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
+    } >"$ac_tmp/config.h" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
+$as_echo "$as_me: $ac_file is unchanged" >&6;}
+    else
+      rm -f "$ac_file"
+      mv "$ac_tmp/config.h" "$ac_file" \
+	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    fi
+  else
+    $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+      || as_fn_error $? "could not create -" "$LINENO" 5
+  fi
+# Compute "$ac_file"'s index in $config_headers.
+_am_arg="$ac_file"
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $_am_arg | $_am_arg:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
+$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$_am_arg" : 'X\(//\)[^/]' \| \
+	 X"$_am_arg" : 'X\(//\)$' \| \
+	 X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$_am_arg" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`/stamp-h$_am_stamp_count
+ ;;
+
+  :C)  { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
+$as_echo "$as_me: executing $ac_file commands" >&6;}
+ ;;
+  esac
+
+
+  case $ac_file$ac_mode in
+    "depfiles":C) test x"$AMDEP_TRUE" != x"" || {
+  # Older Autoconf quotes --file arguments for eval, but not when files
+  # are listed without --file.  Let's play safe and only enable the eval
+  # if we detect the quoting.
+  case $CONFIG_FILES in
+  *\'*) eval set x "$CONFIG_FILES" ;;
+  *)   set x $CONFIG_FILES ;;
+  esac
+  shift
+  for mf
+  do
+    # Strip MF so we end up with the name of the file.
+    mf=`echo "$mf" | sed -e 's/:.*$//'`
+    # Check whether this is an Automake generated Makefile or not.
+    # We used to match only the files named 'Makefile.in', but
+    # some people rename them; so instead we look at the file content.
+    # Grep'ing the first line is not enough: some people post-process
+    # each Makefile.in and add a new line on top of each file to say so.
+    # Grep'ing the whole file is not good either: AIX grep has a line
+    # limit of 2048, but all sed's we know have understand at least 4000.
+    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+      dirpart=`$as_dirname -- "$mf" ||
+$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$mf" : 'X\(//\)[^/]' \| \
+	 X"$mf" : 'X\(//\)$' \| \
+	 X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$mf" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+    else
+      continue
+    fi
+    # Extract the definition of DEPDIR, am__include, and am__quote
+    # from the Makefile without running 'make'.
+    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+    test -z "$DEPDIR" && continue
+    am__include=`sed -n 's/^am__include = //p' < "$mf"`
+    test -z "$am__include" && continue
+    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+    # Find all dependency output files, they are included files with
+    # $(DEPDIR) in their names.  We invoke sed twice because it is the
+    # simplest approach to changing $(DEPDIR) to its actual value in the
+    # expansion.
+    for file in `sed -n "
+      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
+      # Make sure the directory exists.
+      test -f "$dirpart/$file" && continue
+      fdir=`$as_dirname -- "$file" ||
+$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$file" : 'X\(//\)[^/]' \| \
+	 X"$file" : 'X\(//\)$' \| \
+	 X"$file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      as_dir=$dirpart/$fdir; as_fn_mkdir_p
+      # echo "creating $dirpart/$file"
+      echo '# dummy' > "$dirpart/$file"
+    done
+  done
+}
+ ;;
+    "libtool":C)
+
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags=""
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=$macro_version
+macro_revision=$macro_revision
+
+# Whether or not to build shared libraries.
+build_libtool_libs=$enable_shared
+
+# Whether or not to build static libraries.
+build_old_libs=$enable_static
+
+# What type of objects to build.
+pic_mode=$pic_mode
+
+# Whether or not to optimize for fast installation.
+fast_install=$enable_fast_install
+
+# Shell to use when invoking shell scripts.
+SHELL=$lt_SHELL
+
+# An echo program that protects backslashes.
+ECHO=$lt_ECHO
+
+# The PATH separator for the build system.
+PATH_SEPARATOR=$lt_PATH_SEPARATOR
+
+# The host system.
+host_alias=$host_alias
+host=$host
+host_os=$host_os
+
+# The build system.
+build_alias=$build_alias
+build=$build
+build_os=$build_os
+
+# A sed program that does not truncate output.
+SED=$lt_SED
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="\$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP=$lt_GREP
+
+# An ERE matcher.
+EGREP=$lt_EGREP
+
+# A literal string matcher.
+FGREP=$lt_FGREP
+
+# A BSD- or MS-compatible name lister.
+NM=$lt_NM
+
+# Whether we need soft or hard links.
+LN_S=$lt_LN_S
+
+# What is the maximum length of a command?
+max_cmd_len=$max_cmd_len
+
+# Object file suffix (normally "o").
+objext=$ac_objext
+
+# Executable file suffix (normally "").
+exeext=$exeext
+
+# whether the shell understands "unset".
+lt_unset=$lt_unset
+
+# turn spaces into newlines.
+SP2NL=$lt_lt_SP2NL
+
+# turn newlines into spaces.
+NL2SP=$lt_lt_NL2SP
+
+# convert \$build file names to \$host format.
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+
+# convert \$build files to toolchain format.
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+
+# An object symbol dumper.
+OBJDUMP=$lt_OBJDUMP
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method=$lt_deplibs_check_method
+
+# Command to use when deplibs_check_method = "file_magic".
+file_magic_cmd=$lt_file_magic_cmd
+
+# How to find potential files when deplibs_check_method = "file_magic".
+file_magic_glob=$lt_file_magic_glob
+
+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
+want_nocaseglob=$lt_want_nocaseglob
+
+# DLL creation program.
+DLLTOOL=$lt_DLLTOOL
+
+# Command to associate shared and link libraries.
+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
+
+# The archiver.
+AR=$lt_AR
+
+# Flags to create an archive.
+AR_FLAGS=$lt_AR_FLAGS
+
+# How to feed a file listing to the archiver.
+archiver_list_spec=$lt_archiver_list_spec
+
+# A symbol stripping program.
+STRIP=$lt_STRIP
+
+# Commands used to install an old-style archive.
+RANLIB=$lt_RANLIB
+old_postinstall_cmds=$lt_old_postinstall_cmds
+old_postuninstall_cmds=$lt_old_postuninstall_cmds
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=$lock_old_archive_extraction
+
+# A C compiler.
+LTCC=$lt_CC
+
+# LTCC compiler flags.
+LTCFLAGS=$lt_CFLAGS
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
+# Specify filename containing input files for \$NM.
+nm_file_list_spec=$lt_nm_file_list_spec
+
+# The root where to search for dependent libraries,and in which our libraries should be installed.
+lt_sysroot=$lt_sysroot
+
+# The name of the directory that contains temporary libtool files.
+objdir=$objdir
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=$MAGIC_CMD
+
+# Must we lock files when doing compilation?
+need_locks=$lt_need_locks
+
+# Manifest tool.
+MANIFEST_TOOL=$lt_MANIFEST_TOOL
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL=$lt_DSYMUTIL
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT=$lt_NMEDIT
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO=$lt_LIPO
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL=$lt_OTOOL
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=$lt_OTOOL64
+
+# Old archive suffix (normally "a").
+libext=$libext
+
+# Shared library suffix (normally ".so").
+shrext_cmds=$lt_shrext_cmds
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=$lt_extract_expsyms_cmds
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink=$lt_variables_saved_for_relink
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=$need_lib_prefix
+
+# Do we need a version for libraries?
+need_version=$need_version
+
+# Library versioning type.
+version_type=$version_type
+
+# Shared library runtime path variable.
+runpath_var=$runpath_var
+
+# Shared library path variable.
+shlibpath_var=$shlibpath_var
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=$shlibpath_overrides_runpath
+
+# Format of library name prefix.
+libname_spec=$lt_libname_spec
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec=$lt_library_names_spec
+
+# The coded name of the library, if different from the real name.
+soname_spec=$lt_soname_spec
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=$lt_install_override_mode
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=$lt_postinstall_cmds
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=$lt_postuninstall_cmds
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds=$lt_finish_cmds
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=$lt_finish_eval
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=$hardcode_into_libs
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
+
+# Whether dlopen is supported.
+dlopen_support=$enable_dlopen
+
+# Whether dlopen of programs is supported.
+dlopen_self=$enable_dlopen_self
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=$enable_dlopen_self_static
+
+# Commands to strip libraries.
+old_striplib=$lt_old_striplib
+striplib=$lt_striplib
+
+
+# The linker used to build libraries.
+LD=$lt_LD
+
+# How to create reloadable object files.
+reload_flag=$lt_reload_flag
+reload_cmds=$lt_reload_cmds
+
+# Commands used to build an old-style archive.
+old_archive_cmds=$lt_old_archive_cmds
+
+# A language specific compiler.
+CC=$lt_compiler
+
+# Is the compiler the GNU compiler?
+with_gcc=$GCC
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+# Additional compiler flags for building library objects.
+pic_flag=$lt_lt_prog_compiler_pic
+
+# How to pass a linker flag through the compiler.
+wl=$lt_lt_prog_compiler_wl
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=$lt_lt_prog_compiler_static
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=$lt_lt_cv_prog_compiler_c_o
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=$archive_cmds_need_lc
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=$lt_whole_archive_flag_spec
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=$lt_compiler_needs_object
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
+
+# Commands used to build a shared archive.
+archive_cmds=$lt_archive_cmds
+archive_expsym_cmds=$lt_archive_expsym_cmds
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=$lt_module_cmds
+module_expsym_cmds=$lt_module_expsym_cmds
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=$lt_with_gnu_ld
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=$lt_allow_undefined_flag
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=$lt_no_undefined_flag
+
+# Flag to hardcode \$libdir into a binary during linking.
+# This must work even if \$libdir does not exist
+hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=$lt_hardcode_libdir_separator
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=$hardcode_direct
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=$hardcode_direct_absolute
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=$hardcode_minus_L
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=$hardcode_shlibpath_var
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=$hardcode_automatic
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=$inherit_rpath
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=$link_all_deplibs
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=$always_export_symbols
+
+# The commands to list exported symbols.
+export_symbols_cmds=$lt_export_symbols_cmds
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=$lt_exclude_expsyms
+
+# Symbols that must always be exported.
+include_expsyms=$lt_include_expsyms
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=$lt_prelink_cmds
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=$lt_postlink_cmds
+
+# Specify filename containing input files.
+file_list_spec=$lt_file_list_spec
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=$hardcode_action
+
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+  if test x"$xsi_shell" = xyes; then
+  sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
+func_dirname ()\
+{\
+\    case ${1} in\
+\      */*) func_dirname_result="${1%/*}${2}" ;;\
+\      *  ) func_dirname_result="${3}" ;;\
+\    esac\
+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_basename ()$/,/^} # func_basename /c\
+func_basename ()\
+{\
+\    func_basename_result="${1##*/}"\
+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
+func_dirname_and_basename ()\
+{\
+\    case ${1} in\
+\      */*) func_dirname_result="${1%/*}${2}" ;;\
+\      *  ) func_dirname_result="${3}" ;;\
+\    esac\
+\    func_basename_result="${1##*/}"\
+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
+func_stripname ()\
+{\
+\    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
+\    # positional parameters, so assign one to ordinary parameter first.\
+\    func_stripname_result=${3}\
+\    func_stripname_result=${func_stripname_result#"${1}"}\
+\    func_stripname_result=${func_stripname_result%"${2}"}\
+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
+func_split_long_opt ()\
+{\
+\    func_split_long_opt_name=${1%%=*}\
+\    func_split_long_opt_arg=${1#*=}\
+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
+func_split_short_opt ()\
+{\
+\    func_split_short_opt_arg=${1#??}\
+\    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
+func_lo2o ()\
+{\
+\    case ${1} in\
+\      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
+\      *)    func_lo2o_result=${1} ;;\
+\    esac\
+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_xform ()$/,/^} # func_xform /c\
+func_xform ()\
+{\
+    func_xform_result=${1%.*}.lo\
+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_arith ()$/,/^} # func_arith /c\
+func_arith ()\
+{\
+    func_arith_result=$(( $* ))\
+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_len ()$/,/^} # func_len /c\
+func_len ()\
+{\
+    func_len_result=${#1}\
+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+fi
+
+if test x"$lt_shell_append" = xyes; then
+  sed -e '/^func_append ()$/,/^} # func_append /c\
+func_append ()\
+{\
+    eval "${1}+=\\${2}"\
+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
+func_append_quoted ()\
+{\
+\    func_quote_for_eval "${2}"\
+\    eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  # Save a `func_append' function call where possible by direct use of '+='
+  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+else
+  # Save a `func_append' function call even when '+=' is not available
+  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
+fi
+
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+
+ ;;
+
+  esac
+done # for ac_tag
+
+
+as_fn_exit 0
+_ACEOF
+ac_clean_files=$ac_clean_files_save
+
+test $ac_write_fail = 0 ||
+  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
+
+
+# configure is writing to config.log, and then calls config.status.
+# config.status does its own redirection, appending to config.log.
+# Unfortunately, on DOS this fails, as config.log is still kept open
+# by configure, so config.status won't be able to write to it; its
+# output is simply discarded.  So we exec the FD to /dev/null,
+# effectively closing config.log, so it can be properly (re)opened and
+# appended to by config.status.  When coming back to configure, we
+# need to make the FD available again.
+if test "$no_create" != yes; then
+  ac_cs_success=:
+  ac_config_status_args=
+  test "$silent" = yes &&
+    ac_config_status_args="$ac_config_status_args --quiet"
+  exec 5>/dev/null
+  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
+  exec 5>>config.log
+  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
+  # would make configure fail if this is the last instruction.
+  $ac_cs_success || as_fn_exit 1
+fi
+if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
+$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
+fi
+
+
+#
+# Summarize the package configuration
+#
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}:
+
+  Configuration Summary
+  ---------------------
+  Package                                   : ${PACKAGE_NAME}
+  Version                                   : ${PACKAGE_VERSION}
+  Interface                                 : ${LIBNLUNITTEST_VERSION_INFO//:/.}
+  Build system                              : ${build}
+  Host system                               : ${host}
+  Target system                             : ${target}
+  Target architecture                       : ${target_cpu}
+  Target OS                                 : ${target_os}
+  Cross compiling                           : ${cross_compiling}
+  Build shared libraries                    : ${enable_shared}
+  Build static libraries                    : ${enable_static}
+  Build optimized libraries                 : ${nl_cv_build_optimized}
+  Prefix                                    : ${prefix}
+  Documentation support                     : ${nl_cv_build_docs}
+  Doxygen                                   : ${DOXYGEN:--}
+  GraphViz dot                              : ${DOT:--}
+  PERL                                      : ${PERL:--}
+  C Preprocessor                            : ${CPP}
+  C Compiler                                : ${CC}
+  Archiver                                  : ${AR}
+  Archive Indexer                           : ${RANLIB}
+  Symbol Stripper                           : ${STRIP}
+  Object Copier                             : ${OBJCOPY}
+  C Preprocessor flags                      : ${CPPFLAGS:--}
+  C Compile flags                           : ${CFLAGS:--}
+  Link flags                                : ${LDFLAGS:--}
+  Link libraries                            : ${LIBS}
+
+" >&5
+$as_echo "$as_me:
+
+  Configuration Summary
+  ---------------------
+  Package                                   : ${PACKAGE_NAME}
+  Version                                   : ${PACKAGE_VERSION}
+  Interface                                 : ${LIBNLUNITTEST_VERSION_INFO//:/.}
+  Build system                              : ${build}
+  Host system                               : ${host}
+  Target system                             : ${target}
+  Target architecture                       : ${target_cpu}
+  Target OS                                 : ${target_os}
+  Cross compiling                           : ${cross_compiling}
+  Build shared libraries                    : ${enable_shared}
+  Build static libraries                    : ${enable_static}
+  Build optimized libraries                 : ${nl_cv_build_optimized}
+  Prefix                                    : ${prefix}
+  Documentation support                     : ${nl_cv_build_docs}
+  Doxygen                                   : ${DOXYGEN:--}
+  GraphViz dot                              : ${DOT:--}
+  PERL                                      : ${PERL:--}
+  C Preprocessor                            : ${CPP}
+  C Compiler                                : ${CC}
+  Archiver                                  : ${AR}
+  Archive Indexer                           : ${RANLIB}
+  Symbol Stripper                           : ${STRIP}
+  Object Copier                             : ${OBJCOPY}
+  C Preprocessor flags                      : ${CPPFLAGS:--}
+  C Compile flags                           : ${CFLAGS:--}
+  Link flags                                : ${LDFLAGS:--}
+  Link libraries                            : ${LIBS}
+
+" >&6;}
+
+
+
diff --git a/nl-unit-test/configure.ac b/nl-unit-test/configure.ac
new file mode 100644
index 0000000..cccc64e
--- /dev/null
+++ b/nl-unit-test/configure.ac
@@ -0,0 +1,298 @@
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU autoconf input source file for the Nest
+#      Labs Memory-mapped I/O package.
+#
+
+#
+# Declare autoconf version requirements
+#
+AC_PREREQ([2.68])
+
+#
+# Initialize autoconf for the package
+#
+AC_INIT([nlunittest],
+        m4_esyscmd([third_party/nlbuild-autotools/repo/scripts/mkversion -b `cat .default-version` .]),
+        [developer@nest.com],
+        [nlunittest],
+        [https://developer.nest.com/])
+
+# Tell the rest of the build system the absolute path where the
+# nlbuild-autotools repository is rooted at.
+
+AC_SUBST(nlbuild_autotools_stem,[third_party/nlbuild-autotools/repo])
+AC_SUBST(abs_top_nlbuild_autotools_dir,[\${abs_top_srcdir}/\${nlbuild_autotools_stem}])
+
+#
+# NLUNITTEST interface current, revision, and age versions.
+#
+# NOTE: At present, NLUNITTEST makes NO ABI compatibility
+#       commitments. Consequently, these simply serve as documentation
+#       for how the interfaces have evolved.
+#
+# Maintainters: Please manage these fields as follows:
+#
+#   Interfaces removed:    CURRENT++, AGE = 0, REVISION = 0
+#   Interfaces added:      CURRENT++, AGE++,   REVISION = 0
+#   No interfaces changed:                     REVISION++
+#
+#
+AC_SUBST(LIBNLUNITTEST_VERSION_CURRENT,  [1])
+AC_SUBST(LIBNLUNITTEST_VERSION_AGE,      [0])
+AC_SUBST(LIBNLUNITTEST_VERSION_REVISION, [0])
+AC_SUBST(LIBNLUNITTEST_VERSION_INFO,     [${LIBNLUNITTEST_VERSION_CURRENT}:${LIBNLUNITTEST_VERSION_REVISION}:${LIBNLUNITTEST_VERSION_AGE}])
+
+#
+# Check the sanity of the source directory by checking for the
+# presence of a key watch file
+#
+AC_CONFIG_SRCDIR([src/nltest.h])
+
+#
+# Tell autoconf where to find auxilliary build tools (e.g. config.guess,
+# install-sh, missing, etc.)
+#
+AC_CONFIG_AUX_DIR([third_party/nlbuild-autotools/repo/autoconf])
+
+#
+# Tell autoconf where to find auxilliary M4 macros
+#
+AC_CONFIG_MACRO_DIR([third_party/nlbuild-autotools/repo/autoconf/m4])
+
+#
+# Tell autoconf what file the package is using to aggregate C preprocessor
+# defines.
+#
+AC_CONFIG_HEADERS([src/nlunittest-config.h])
+
+#
+# Figure out what the canonical build, host and target tuples are.
+#
+AC_CANONICAL_BUILD
+AC_CANONICAL_HOST
+AC_CANONICAL_TARGET
+
+#
+# Mac OS X / Darwin ends up putting some versioning cruft on the end of its
+# tuple that we don't care about in this script. Create "clean" variables
+# devoid of it.
+#
+
+NL_FILTERED_CANONICAL_BUILD
+NL_FILTERED_CANONICAL_HOST
+NL_FILTERED_CANONICAL_TARGET
+
+#
+# Configure automake with the desired options, indicating that this is not
+# a native GNU package, that we want "silent" build rules, and that we want
+# objects built in the same subdirectory as their source rather than collapsed
+# together at the top-level directory.
+#
+# Disable silent build rules by either passing --disable-silent-rules to
+# configure or passing V=1 to make
+#
+AM_INIT_AUTOMAKE([1.14 foreign silent-rules subdir-objects tar-pax])
+
+#
+# Silent build rules requires at least automake-1.11. Employ
+# techniques for not breaking earlier versions of automake.
+#
+m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])])
+AM_SILENT_RULES([yes])
+
+#
+# Enable maintainer mode to prevent the package from constantly trying
+# to rebuild configure, Makefile.in, etc. Rebuilding such files rarely,
+# if ever, needs to be done "in the field".
+#
+# Use the included 'bootstrap' script instead when necessary.
+#
+AM_MAINTAINER_MODE
+
+#
+# Checks for build host programs
+#
+
+# If we are cross-compiling and we are on an embedded target that
+# doesn't support independent, standalone executables, then all
+# compiler tests that attempt to create an executable will fail. In
+# such circumstances, set AC_NO_EXECUTABLES (see http://sourceware.org/
+# ml/newlib/2006/msg00353.html).
+
+AC_MSG_CHECKING([whether to disable executable checking])
+if test "$cross_compiling" = yes; then
+    AC_NO_EXECUTABLES
+    AC_MSG_RESULT([yes])
+else
+    AC_MSG_RESULT([no])
+fi
+
+# Passing -Werror to GCC-based or -compatible compilers breaks some
+# autoconf tests (see
+# http://lists.gnu.org/archive/html/autoconf-patches/2008-09/msg00014.html).
+#
+# If -Werror has been passed transform it into -Wno-error. We'll
+# transform it back later with NL_RESTORE_WERROR.
+
+NL_SAVE_WERROR
+
+# Check for compilers.
+#
+# These should be checked BEFORE we check for and, implicitly,
+# initialize libtool such that libtool knows what languages it has to
+# work with.
+
+AC_PROG_CPP
+AC_PROG_CPP_WERROR
+
+AC_PROG_CC
+AC_PROG_CC_C_O
+
+# Check for other compiler toolchain tools.
+
+AC_CHECK_TOOL(AR, ar)
+AC_CHECK_TOOL(RANLIB, ranlib)
+AC_CHECK_TOOL(OBJCOPY, objcopy)
+AC_CHECK_TOOL(STRIP, strip)
+
+# Check for other host tools.
+
+AC_PROG_INSTALL
+AC_PROG_LN_S
+
+AC_PATH_PROG(CMP, cmp)
+AC_PATH_PROG(PERL, perl)
+
+#
+# Checks for specific compiler characteristics
+#
+
+#
+# Common compiler flags we would like to have.
+#
+#   -Wall                        CC, CXX
+#
+
+PROSPECTIVE_CFLAGS="-Wall"
+
+AX_CHECK_COMPILER_OPTIONS([C],   ${PROSPECTIVE_CFLAGS})
+
+# Check for and initialize libtool
+
+LT_INIT
+
+#
+# Debug instances
+#
+AC_MSG_NOTICE([checking whether to build debug instances])
+
+# Debug
+
+NL_ENABLE_DEBUG([no])
+
+AM_CONDITIONAL([NLUNITTEST_BUILD_DEBUG], [test "${nl_cv_build_debug}" = "yes"])
+
+# Optimization
+
+NL_ENABLE_OPTIMIZATION([yes])
+
+AM_CONDITIONAL([NLUNITTEST_BUILD_OPTIMIZED], [test "${nl_cv_build_optimized}" = "yes"])
+
+#
+# Documentation
+#
+
+# Determine whether or not documentation (via Doxygen) should be built
+# or not, with 'auto' as the default and establish a default support
+# value for GraphViz 'dot' support.
+
+NL_ENABLE_DOCS([auto],[NO])
+
+AM_CONDITIONAL(NLUNITTEST_BUILD_DOCS, [test "${nl_cv_build_docs}" = "yes"])
+
+#
+# Check for headers
+#
+AC_HEADER_STDBOOL
+AC_HEADER_STDC
+
+# At this point, we can restore the compiler flags to whatever the
+# user passed in, now that we're clear of an -Werror issues by
+# transforming -Wno-error back to -Werror.
+
+NL_RESTORE_WERROR
+
+#
+# Identify the various makefiles and auto-generated files for the package
+#
+AC_CONFIG_FILES([
+Makefile
+src/Makefile
+doc/Makefile
+])
+
+#
+# Generate the auto-generated files for the package
+#
+AC_OUTPUT
+
+#
+# Summarize the package configuration
+#
+
+AC_MSG_NOTICE([
+
+  Configuration Summary
+  ---------------------
+  Package                                   : ${PACKAGE_NAME}
+  Version                                   : ${PACKAGE_VERSION}
+  Interface                                 : ${LIBNLUNITTEST_VERSION_INFO//:/.}
+  Build system                              : ${build}
+  Host system                               : ${host}
+  Target system                             : ${target}
+  Target architecture                       : ${target_cpu}
+  Target OS                                 : ${target_os}
+  Cross compiling                           : ${cross_compiling}
+  Build shared libraries                    : ${enable_shared}
+  Build static libraries                    : ${enable_static}
+  Build optimized libraries                 : ${nl_cv_build_optimized}
+  Prefix                                    : ${prefix}
+  Documentation support                     : ${nl_cv_build_docs}
+  Doxygen                                   : ${DOXYGEN:--}
+  GraphViz dot                              : ${DOT:--}
+  PERL                                      : ${PERL:--}
+  C Preprocessor                            : ${CPP}
+  C Compiler                                : ${CC}
+  Archiver                                  : ${AR}
+  Archive Indexer                           : ${RANLIB}
+  Symbol Stripper                           : ${STRIP}
+  Object Copier                             : ${OBJCOPY}
+  C Preprocessor flags                      : ${CPPFLAGS:--}
+  C Compile flags                           : ${CFLAGS:--}
+  Link flags                                : ${LDFLAGS:--}
+  Link libraries                            : ${LIBS}
+
+])
+
+
+
diff --git a/nl-unit-test/doc/Doxyfile.in b/nl-unit-test/doc/Doxyfile.in
new file mode 100644
index 0000000..4475114
--- /dev/null
+++ b/nl-unit-test/doc/Doxyfile.in
@@ -0,0 +1,2371 @@
+# Doxyfile 1.8.6
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file describes the settings to be used by the
+#      documentation system # doxygen (www.doxygen.org) for Weave.
+#
+#      This was initially autogenerated 'doxywizard' and then hand-tuned.
+#
+#      All text after a hash (#) is considered a comment and will be
+#      ignored.
+#
+#      The format is:
+#
+#          TAG = value [value, ...]
+#
+#      For lists items can also be appended using:
+#
+#          TAG += value [value, ...]
+#
+#      Values that contain spaces should be placed between quotes (" ")
+#
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME           = @PACKAGE_NAME@
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER         = @PACKAGE_VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF          =
+
+# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
+# the documentation. The maximum height of the logo should not exceed 55 pixels
+# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
+# to the output directory.
+
+PROJECT_LOGO           =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = @abs_builddir@
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS         = YES
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF       = "The $name class" \
+                         "The $name widget" \
+                         "The $name file" \
+                         is \
+                         provides \
+                         specifies \
+                         contains \
+                         represents \
+                         a \
+                         an \
+                         the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB  = NO
+
+# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES        = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH        = @abs_top_srcdir@ \
+                         @abs_top_builddir@
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH    = @abs_top_srcdir@
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF      = YES
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF           = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
+# new page for each member. If set to NO, the documentation of a member will be
+# part of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE               = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES                =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST              =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make
+# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
+# (default is Fortran), use: inc=Fortran f=C.
+#
+# Note For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING      =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT       = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by by putting a % sign in front of the word
+# or globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT       = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT    = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING            = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS  = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT   = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL            = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC         = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. When set to YES local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO these classes will be included in the various overviews. This option has
+# no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS          = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES       = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES       = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC  = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES   = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING  = NO
+
+# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
+# todo list. This list is created by putting \todo commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
+# test list. This list is created by putting \test commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS       =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES the list
+# will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES        = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE            =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. Do not use file names with spaces, bibtex cannot handle them. See
+# also \cite for info how to create references.
+
+CITE_BIB_FILES         =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET                  = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS               = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED   = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO doxygen will only warn about wrong or incomplete parameter
+# documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC       = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE           =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces.
+# Note: If this tag is empty the current directory is searched.
+
+INPUT                  = @abs_top_srcdir@/src \
+                         @abs_top_srcdir@/doc \
+                         @abs_top_srcdir@/README.md \
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank the
+# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
+# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
+# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
+# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
+# *.qsf, *.as and *.js.
+
+FILE_PATTERNS          = *.c \
+                         *.cc \
+                         *.cxx \
+                         *.cpp \
+                         *.c++ \
+                         *.d \
+                         *.java \
+                         *.ii \
+                         *.ixx \
+                         *.ipp \
+                         *.i++ \
+                         *.inl \
+                         *.h \
+                         *.hh \
+                         *.hxx \
+                         *.hpp \
+                         *.h++ \
+                         *.idl \
+                         *.odl \
+                         *.cs \
+                         *.php \
+                         *.php3 \
+                         *.inc \
+                         *.m \
+                         *.mm \
+                         *.dox \
+                         *.py \
+                         *.f90 \
+                         *.f \
+                         *.for \
+                         *.vhd \
+                         *.vhdl
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE              = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE                =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS        = _NL_TEST_ASSIGN
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH           =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS       = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH             =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+
+FILTER_PATTERNS        =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER ) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES    = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE = README.md
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER         = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION    = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS        = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS       = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX     = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX          =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT            = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET        =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user-
+# defined cascading style sheet that is included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefor more robust against future updates.
+# Doxygen will copy the style sheet file to the output directory. For an example
+# see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET  =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES       =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the stylesheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE    = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT    = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA  = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP         = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS  = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET        = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME  = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP      = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE               =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler ( hhc.exe). If non-empty
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION           =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated (
+# YES) or that it should be included in the master .chm file ( NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI           = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING     =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated (
+# YES) or a normal table of contents ( NO) in the .chm file.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE               =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE          = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME   =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS  =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS  =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION           =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP   = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID         = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX          = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW      = YES
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE   = 1
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH         = 250
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW    = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE       = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT    = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using prerendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX            = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT         = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS     =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE       =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavours of web server based searching depending on the
+# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
+# searching and an index file used by the script. When EXTERNAL_SEARCH is
+# enabled the indexing and searching needs to be provided by external tools. See
+# the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH    = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH        = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL       =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE        = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID     =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS  =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX         = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE             = letter
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. To get the times font for
+# instance you can specify
+# EXTRA_PACKAGES=times
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES         =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will
+# replace them by respectively the title of the page, the current date and time,
+# only the current date, the version number of doxygen, the project name (see
+# PROJECT_NAME), or the project number (see PROJECT_NUMBER).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER           =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER           =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES      =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS         = YES
+
+# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX           = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE        = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES     = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE      = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE        = plain
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE    =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE    =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION          = .3
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT             = xml
+
+# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a
+# validating XML parser to check the syntax of the XML files.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_SCHEMA             =
+
+# The XML_DTD tag can be used to specify a XML DTD, which can be used by a
+# validating XML parser to check the syntax of the XML files.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_DTD                =
+
+# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK       = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT         = docbook
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
+# Definitions (see http://autogen.sf.net) file that captures the structure of
+# the code including all documentation. Note that this feature is still
+# experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
+# in the source code. If set to NO only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION        = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF     = NO
+
+# If the SEARCH_INCLUDES tag is set to YES the includes files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH           =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS  =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED             =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED      =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all refrences to function-like macros that are alone on a line, have an
+# all uppercase name, and do not end with a semicolon. Such function macros are
+# typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have an unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES               =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE       =
+
+# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
+# class index. If set to NO only the inherited external classes will be listed.
+# The default value is: NO.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
+# the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS        = YES
+
+# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES         = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH              = @PERL@
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH               =
+
+# If set to YES, the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT               = @DOXYGEN_USE_DOT@
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS        = 0
+
+# When you want a differently looking font n the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME           = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH           =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK               = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS   = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH          = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH             = YES
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH           = YES
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot.
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif and svg.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT       = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG        = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS           =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS           =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS           =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS      = YES
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP            = YES
diff --git a/nl-unit-test/doc/Makefile.am b/nl-unit-test/doc/Makefile.am
new file mode 100644
index 0000000..eebbbf6
--- /dev/null
+++ b/nl-unit-test/doc/Makefile.am
@@ -0,0 +1,102 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest Labs Unit
+#      Test, in-package, documentation.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+EXTRA_DIST                                      = \
+    $(srcdir)/Doxyfile.in                         \
+    $(NULL)
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a doc distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+PACKAGE_VERSION                                 = $(shell cat $(top_builddir)/.local-version)
+VERSION                                         = $(PACKAGE_VERSION)
+
+
+docdistdir                                     ?= .
+
+nlunittest_docdist_alias                        = \
+    $(PACKAGE_TARNAME)-docs
+
+nlunittest_docdist_name                         = \
+    $(nlunittest_docdist_alias)-$(VERSION)
+
+nlunittest_docdist_archive                      = \
+    $(docdistdir)/$(nlunittest_docdist_name).tar.gz
+
+CLEANFILES                                      = \
+    Doxyfile                                      \
+    $(nlunittest_docdist_archive)                 \
+    $(NULL)
+
+if NLUNITTEST_BUILD_DOCS
+
+all-local: html/index.html
+
+#
+# We choose to manually transform Doxyfile.in into Doxyfile here in
+# the makefile rather than in the configure script so that we can take
+# advantage of live, at build time (rather than at configure time),
+# updating of the package version number.
+#
+
+Doxyfile: $(srcdir)/Doxyfile.in Makefile
+	$(AM_V_GEN)$(SED)                                     \
+	    -e "s,\@DOXYGEN_USE_DOT\@,$(DOXYGEN_USE_DOT),g"   \
+	    -e "s,\@PACKAGE_NAME\@,$(PACKAGE_NAME),g"         \
+	    -e "s,\@PACKAGE_VERSION\@,$(PACKAGE_VERSION),g"   \
+	    -e "s,\@PERL\@,$(PERL),g"                         \
+	    -e "s,\@abs_builddir\@,$(abs_builddir),g"         \
+	    -e "s,\@abs_srcdir\@,$(abs_srcdir),g"             \
+	    -e "s,\@abs_top_builddir\@,$(abs_top_builddir),g" \
+	    -e "s,\@abs_top_srcdir\@,$(abs_top_srcdir),g"     \
+	    < "$(srcdir)/Doxyfile.in" > "$(@)"
+
+html/index.html: Doxyfile
+	$(AM_V_GEN)$(DOXYGEN) $(<)
+
+#
+# Addition rules and commands to create a documentation-only
+# distribution of nlunittest
+#
+
+$(nlunittest_docdist_name): html/index.html
+	$(AM_V_at)rm -f -r $(@)
+	$(call create-directory)
+	$(AM_V_at)cp -R html $(@)
+
+$(nlunittest_docdist_archive): $(nlunittest_docdist_name)
+	$(AM_V_at)echo "  TAR      $(@)"
+	$(AM_V_at)tardir="$(<)" && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c > "$(@)" && rm -rf $(<)
+
+docdist $(nlunittest_docdist_alias): $(nlunittest_docdist_archive)
+
+clean-local:
+	$(AM_V_at)rm -f -r html
+
+endif # NLUNITTEST_BUILD_DOCS
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/doc/Makefile.in b/nl-unit-test/doc/Makefile.in
new file mode 100644
index 0000000..06b6710
--- /dev/null
+++ b/nl-unit-test/doc/Makefile.in
@@ -0,0 +1,538 @@
+# Makefile.in generated by automake 1.14.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest Labs Unit
+#      Test, in-package, documentation.
+#
+VPATH = @srcdir@
+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \	]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs	]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+target_triplet = @target@
+subdir = doc
+DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(SHELL) \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
+CONFIG_HEADER = $(top_builddir)/src/nlunittest-config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+SOURCES =
+DIST_SOURCES =
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CMP = @CMP@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOT = @DOT@
+DOXYGEN = @DOXYGEN@
+DOXYGEN_USE_DOT = @DOXYGEN_USE_DOT@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBNLUNITTEST_VERSION_AGE = @LIBNLUNITTEST_VERSION_AGE@
+LIBNLUNITTEST_VERSION_CURRENT = @LIBNLUNITTEST_VERSION_CURRENT@
+LIBNLUNITTEST_VERSION_INFO = @LIBNLUNITTEST_VERSION_INFO@
+LIBNLUNITTEST_VERSION_REVISION = @LIBNLUNITTEST_VERSION_REVISION@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAINT = @MAINT@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJCOPY = @OBJCOPY@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a doc distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+PACKAGE_VERSION = $(shell cat $(top_builddir)/.local-version)
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PERL = @PERL@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = $(PACKAGE_VERSION)
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_nlbuild_autotools_dir = @abs_top_nlbuild_autotools_dir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+nlbuild_autotools_stem = @nlbuild_autotools_stem@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target = @target@
+target_alias = @target_alias@
+target_cpu = @target_cpu@
+target_os = @target_os@
+target_vendor = @target_vendor@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+EXTRA_DIST = \
+    $(srcdir)/Doxyfile.in                         \
+    $(NULL)
+
+nlunittest_docdist_alias = \
+    $(PACKAGE_TARNAME)-docs
+
+nlunittest_docdist_name = \
+    $(nlunittest_docdist_alias)-$(VERSION)
+
+nlunittest_docdist_archive = \
+    $(docdistdir)/$(nlunittest_docdist_name).tar.gz
+
+CLEANFILES = \
+    Doxyfile                                      \
+    $(nlunittest_docdist_archive)                 \
+    $(NULL)
+
+all: all-am
+
+.SUFFIXES:
+$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+	        && { if test -f $@; then exit 0; else break; fi; }; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign doc/Makefile'; \
+	$(am__cd) $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign doc/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+tags TAGS:
+
+ctags CTAGS:
+
+cscope cscopelist:
+
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d "$(distdir)/$$file"; then \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+	  else \
+	    test -f "$(distdir)/$$file" \
+	    || cp -p $$d/$$file "$(distdir)/$$file" \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+@NLUNITTEST_BUILD_DOCS_FALSE@all-local:
+all-am: Makefile all-local
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	if test -z '$(STRIP)'; then \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	      install; \
+	else \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	    "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+	fi
+mostlyclean-generic:
+
+clean-generic:
+	-test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+@NLUNITTEST_BUILD_DOCS_FALSE@clean-local:
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-local mostlyclean-am
+
+distclean: distclean-am
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: install-am install-strip
+
+.PHONY: all all-am all-local check check-am clean clean-generic \
+	clean-libtool clean-local cscopelist-am ctags-am distclean \
+	distclean-generic distclean-libtool distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dvi install-dvi-am install-exec \
+	install-exec-am install-html install-html-am install-info \
+	install-info-am install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-generic \
+	mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \
+	uninstall-am
+
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+docdistdir                                     ?= .
+
+@NLUNITTEST_BUILD_DOCS_TRUE@all-local: html/index.html
+
+#
+# We choose to manually transform Doxyfile.in into Doxyfile here in
+# the makefile rather than in the configure script so that we can take
+# advantage of live, at build time (rather than at configure time),
+# updating of the package version number.
+#
+
+@NLUNITTEST_BUILD_DOCS_TRUE@Doxyfile: $(srcdir)/Doxyfile.in Makefile
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_GEN)$(SED)                                     \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@DOXYGEN_USE_DOT\@,$(DOXYGEN_USE_DOT),g"   \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@PACKAGE_NAME\@,$(PACKAGE_NAME),g"         \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@PACKAGE_VERSION\@,$(PACKAGE_VERSION),g"   \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@PERL\@,$(PERL),g"                         \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@abs_builddir\@,$(abs_builddir),g"         \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@abs_srcdir\@,$(abs_srcdir),g"             \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@abs_top_builddir\@,$(abs_top_builddir),g" \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    -e "s,\@abs_top_srcdir\@,$(abs_top_srcdir),g"     \
+@NLUNITTEST_BUILD_DOCS_TRUE@	    < "$(srcdir)/Doxyfile.in" > "$(@)"
+
+@NLUNITTEST_BUILD_DOCS_TRUE@html/index.html: Doxyfile
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_GEN)$(DOXYGEN) $(<)
+
+#
+# Addition rules and commands to create a documentation-only
+# distribution of nlunittest
+#
+
+@NLUNITTEST_BUILD_DOCS_TRUE@$(nlunittest_docdist_name): html/index.html
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_at)rm -f -r $(@)
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(call create-directory)
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_at)cp -R html $(@)
+
+@NLUNITTEST_BUILD_DOCS_TRUE@$(nlunittest_docdist_archive): $(nlunittest_docdist_name)
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_at)echo "  TAR      $(@)"
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_at)tardir="$(<)" && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c > "$(@)" && rm -rf $(<)
+
+@NLUNITTEST_BUILD_DOCS_TRUE@docdist $(nlunittest_docdist_alias): $(nlunittest_docdist_archive)
+
+@NLUNITTEST_BUILD_DOCS_TRUE@clean-local:
+@NLUNITTEST_BUILD_DOCS_TRUE@	$(AM_V_at)rm -f -r html
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/nl-unit-test/src/Makefile.am b/nl-unit-test/src/Makefile.am
new file mode 100644
index 0000000..bb432a2
--- /dev/null
+++ b/nl-unit-test/src/Makefile.am
@@ -0,0 +1,39 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest Labs Unit Test
+#      library.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+include_HEADERS             = \
+    nltest.h                  \
+    $(NULL)
+
+lib_LIBRARIES               = \
+    libnlunittest.a           \
+    $(NULL)
+
+libnlunittest_a_SOURCES     = \
+    nltest.c                  \
+    $(NULL)
+
+install-headers: install-data
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/src/Makefile.in b/nl-unit-test/src/Makefile.in
new file mode 100644
index 0000000..aae8649
--- /dev/null
+++ b/nl-unit-test/src/Makefile.in
@@ -0,0 +1,732 @@
+# Makefile.in generated by automake 1.14.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the Nest Labs Unit Test
+#      library.
+#
+
+
+VPATH = @srcdir@
+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \	]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs	]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+target_triplet = @target@
+subdir = src
+DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \
+	$(srcdir)/nlunittest-config.h.in \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/depcomp \
+	$(include_HEADERS)
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(SHELL) \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
+CONFIG_HEADER = nlunittest-config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+  test -z "$$files" \
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"
+LIBRARIES = $(lib_LIBRARIES)
+ARFLAGS = cru
+AM_V_AR = $(am__v_AR_@AM_V@)
+am__v_AR_ = $(am__v_AR_@AM_DEFAULT_V@)
+am__v_AR_0 = @echo "  AR      " $@;
+am__v_AR_1 = 
+libnlunittest_a_AR = $(AR) $(ARFLAGS)
+libnlunittest_a_LIBADD =
+am_libnlunittest_a_OBJECTS = nltest.$(OBJEXT)
+libnlunittest_a_OBJECTS = $(am_libnlunittest_a_OBJECTS)
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+DEFAULT_INCLUDES = -I.@am__isrc@
+depcomp = $(SHELL) \
+	$(top_srcdir)/third_party/nlbuild-autotools/repo/autoconf/depcomp
+am__depfiles_maybe = depfiles
+am__mv = mv -f
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+AM_V_lt = $(am__v_lt_@AM_V@)
+am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
+am__v_lt_0 = --silent
+am__v_lt_1 = 
+LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
+	$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+	$(AM_CFLAGS) $(CFLAGS)
+AM_V_CC = $(am__v_CC_@AM_V@)
+am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
+am__v_CC_0 = @echo "  CC      " $@;
+am__v_CC_1 = 
+CCLD = $(CC)
+LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(AM_LDFLAGS) $(LDFLAGS) -o $@
+AM_V_CCLD = $(am__v_CCLD_@AM_V@)
+am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
+am__v_CCLD_0 = @echo "  CCLD    " $@;
+am__v_CCLD_1 = 
+SOURCES = $(libnlunittest_a_SOURCES)
+DIST_SOURCES = $(libnlunittest_a_SOURCES)
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+HEADERS = $(include_HEADERS)
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \
+	$(LISP)nlunittest-config.h.in
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates.  Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+  BEGIN { nonempty = 0; } \
+  { items[$$0] = 1; nonempty = 1; } \
+  END { if (nonempty) { for (i in items) print i; }; } \
+'
+# Make sure the list of sources is unique.  This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+  list='$(am__tagged_files)'; \
+  unique=`for i in $$list; do \
+    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+  done | $(am__uniquify_input)`
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CMP = @CMP@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOT = @DOT@
+DOXYGEN = @DOXYGEN@
+DOXYGEN_USE_DOT = @DOXYGEN_USE_DOT@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBNLUNITTEST_VERSION_AGE = @LIBNLUNITTEST_VERSION_AGE@
+LIBNLUNITTEST_VERSION_CURRENT = @LIBNLUNITTEST_VERSION_CURRENT@
+LIBNLUNITTEST_VERSION_INFO = @LIBNLUNITTEST_VERSION_INFO@
+LIBNLUNITTEST_VERSION_REVISION = @LIBNLUNITTEST_VERSION_REVISION@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAINT = @MAINT@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJCOPY = @OBJCOPY@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PERL = @PERL@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_nlbuild_autotools_dir = @abs_top_nlbuild_autotools_dir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+nlbuild_autotools_stem = @nlbuild_autotools_stem@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target = @target@
+target_alias = @target_alias@
+target_cpu = @target_cpu@
+target_os = @target_os@
+target_vendor = @target_vendor@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+include_HEADERS = \
+    nltest.h                  \
+    $(NULL)
+
+lib_LIBRARIES = \
+    libnlunittest.a           \
+    $(NULL)
+
+libnlunittest_a_SOURCES = \
+    nltest.c                  \
+    $(NULL)
+
+all: nlunittest-config.h
+	$(MAKE) $(AM_MAKEFLAGS) all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+	        && { if test -f $@; then exit 0; else break; fi; }; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \
+	$(am__cd) $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign src/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+
+nlunittest-config.h: stamp-h1
+	@test -f $@ || rm -f stamp-h1
+	@test -f $@ || $(MAKE) $(AM_MAKEFLAGS) stamp-h1
+
+stamp-h1: $(srcdir)/nlunittest-config.h.in $(top_builddir)/config.status
+	@rm -f stamp-h1
+	cd $(top_builddir) && $(SHELL) ./config.status src/nlunittest-config.h
+$(srcdir)/nlunittest-config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) 
+	($(am__cd) $(top_srcdir) && $(AUTOHEADER))
+	rm -f stamp-h1
+	touch $@
+
+distclean-hdr:
+	-rm -f nlunittest-config.h stamp-h1
+install-libLIBRARIES: $(lib_LIBRARIES)
+	@$(NORMAL_INSTALL)
+	@list='$(lib_LIBRARIES)'; test -n "$(libdir)" || list=; \
+	list2=; for p in $$list; do \
+	  if test -f $$p; then \
+	    list2="$$list2 $$p"; \
+	  else :; fi; \
+	done; \
+	test -z "$$list2" || { \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \
+	  echo " $(INSTALL_DATA) $$list2 '$(DESTDIR)$(libdir)'"; \
+	  $(INSTALL_DATA) $$list2 "$(DESTDIR)$(libdir)" || exit $$?; }
+	@$(POST_INSTALL)
+	@list='$(lib_LIBRARIES)'; test -n "$(libdir)" || list=; \
+	for p in $$list; do \
+	  if test -f $$p; then \
+	    $(am__strip_dir) \
+	    echo " ( cd '$(DESTDIR)$(libdir)' && $(RANLIB) $$f )"; \
+	    ( cd "$(DESTDIR)$(libdir)" && $(RANLIB) $$f ) || exit $$?; \
+	  else :; fi; \
+	done
+
+uninstall-libLIBRARIES:
+	@$(NORMAL_UNINSTALL)
+	@list='$(lib_LIBRARIES)'; test -n "$(libdir)" || list=; \
+	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+	dir='$(DESTDIR)$(libdir)'; $(am__uninstall_files_from_dir)
+
+clean-libLIBRARIES:
+	-test -z "$(lib_LIBRARIES)" || rm -f $(lib_LIBRARIES)
+
+libnlunittest.a: $(libnlunittest_a_OBJECTS) $(libnlunittest_a_DEPENDENCIES) $(EXTRA_libnlunittest_a_DEPENDENCIES) 
+	$(AM_V_at)-rm -f libnlunittest.a
+	$(AM_V_AR)$(libnlunittest_a_AR) libnlunittest.a $(libnlunittest_a_OBJECTS) $(libnlunittest_a_LIBADD)
+	$(AM_V_at)$(RANLIB) libnlunittest.a
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/nltest.Po@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@	$(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
+@am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
+@am__fastdepCC_TRUE@	$(am__mv) $$depbase.Tpo $$depbase.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	$(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $<
+
+.c.obj:
+@am__fastdepCC_TRUE@	$(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
+@am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\
+@am__fastdepCC_TRUE@	$(am__mv) $$depbase.Tpo $$depbase.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	$(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@	$(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
+@am__fastdepCC_TRUE@	$(LTCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
+@am__fastdepCC_TRUE@	$(am__mv) $$depbase.Tpo $$depbase.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	$(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+install-includeHEADERS: $(include_HEADERS)
+	@$(NORMAL_INSTALL)
+	@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
+	  $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
+	done
+
+uninstall-includeHEADERS:
+	@$(NORMAL_UNINSTALL)
+	@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
+	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+	dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
+
+ID: $(am__tagged_files)
+	$(am__define_uniq_tagged_files); mkid -fID $$unique
+tags: tags-am
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+	set x; \
+	here=`pwd`; \
+	$(am__define_uniq_tagged_files); \
+	shift; \
+	if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  if test $$# -gt 0; then \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      "$$@" $$unique; \
+	  else \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      $$unique; \
+	  fi; \
+	fi
+ctags: ctags-am
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+	$(am__define_uniq_tagged_files); \
+	test -z "$(CTAGS_ARGS)$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && $(am__cd) $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) "$$here"
+cscopelist: cscopelist-am
+
+cscopelist-am: $(am__tagged_files)
+	list='$(am__tagged_files)'; \
+	case "$(srcdir)" in \
+	  [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+	  *) sdir=$(subdir)/$(srcdir) ;; \
+	esac; \
+	for i in $$list; do \
+	  if test -f "$$i"; then \
+	    echo "$(subdir)/$$i"; \
+	  else \
+	    echo "$$sdir/$$i"; \
+	  fi; \
+	done >> $(top_builddir)/cscope.files
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d "$(distdir)/$$file"; then \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+	  else \
+	    test -f "$(distdir)/$$file" \
+	    || cp -p $$d/$$file "$(distdir)/$$file" \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LIBRARIES) $(HEADERS) nlunittest-config.h
+installdirs:
+	for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \
+	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+	done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	if test -z '$(STRIP)'; then \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	      install; \
+	else \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	    "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+	fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libLIBRARIES clean-libtool \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-hdr distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am: install-includeHEADERS
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am: install-libLIBRARIES
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-includeHEADERS uninstall-libLIBRARIES
+
+.MAKE: all install-am install-strip
+
+.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
+	clean-libLIBRARIES clean-libtool cscopelist-am ctags ctags-am \
+	distclean distclean-compile distclean-generic distclean-hdr \
+	distclean-libtool distclean-tags distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dvi install-dvi-am install-exec \
+	install-exec-am install-html install-html-am \
+	install-includeHEADERS install-info install-info-am \
+	install-libLIBRARIES install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-compile \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags tags-am uninstall uninstall-am uninstall-includeHEADERS \
+	uninstall-libLIBRARIES
+
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+install-headers: install-data
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/nl-unit-test/src/nltest.c b/nl-unit-test/src/nltest.c
new file mode 100644
index 0000000..0b88588
--- /dev/null
+++ b/nl-unit-test/src/nltest.c
@@ -0,0 +1,257 @@
+/**
+ *    Copyright 2012-2016 Nest Labs Inc. All Rights Reserved.
+ *
+ *    Licensed under the Apache License, Version 2.0 (the "License");
+ *    you may not use this file except in compliance with the License.
+ *    You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *    Unless required by applicable law or agreed to in writing, software
+ *    distributed under the License is distributed on an "AS IS" BASIS,
+ *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *    See the License for the specific language governing permissions and
+ *    limitations under the License.
+ */
+
+/**
+ *    @file
+ *      This file implements functions that effect a simple, portable
+ *      unit test suite framework.
+ *
+ */
+
+#include <stdio.h>
+#include <stddef.h>
+#include <string.h>
+
+#include <nltest.h>
+
+/* Function Prototypes */
+
+static void def_log_name(struct _nlTestSuite* inSuite);
+static void def_log_initialize(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+static void def_log_terminate(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+static void def_log_setup(struct _nlTestSuite* inSuite, int inResult, int inWidth);
+static void def_log_test(struct _nlTestSuite *inSuite, int inWidth, int inIndex);
+static void def_log_teardown(struct _nlTestSuite* inSuite, int inResult, int inWidth);
+static void def_log_statTest(struct _nlTestSuite* inSuite);
+static void def_log_statAssert(struct _nlTestSuite* inSuite);
+static void csv_log_name(struct _nlTestSuite* inSuite);
+static void csv_log_initialize(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+static void csv_log_terminate(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+static void csv_log_setup(struct _nlTestSuite* inSuite, int inResult, int inWidth);
+static void csv_log_test(struct _nlTestSuite *inSuite, int inWidth, int inIndex);
+static void csv_log_teardown(struct _nlTestSuite* inSuite, int inResult, int inWidth);
+static void csv_log_statTest(struct _nlTestSuite* inSuite);
+static void csv_log_statAssert(struct _nlTestSuite* inSuite);
+
+/* Global Variables */
+
+static nl_test_output_logger_t nl_test_logger_default = {
+    def_log_name,
+    def_log_initialize,
+    def_log_terminate,
+    def_log_setup,
+    def_log_test,
+    def_log_teardown,
+    def_log_statTest,
+    def_log_statAssert,
+};
+
+static nl_test_output_logger_t nl_test_logger_csv = {
+    csv_log_name,
+    csv_log_initialize,
+    csv_log_terminate,
+    csv_log_setup,
+    csv_log_test,
+    csv_log_teardown,
+    csv_log_statTest,
+    csv_log_statAssert,
+};
+
+/* Global Output Style Variable */
+
+static const nl_test_output_logger_t *logger_output = &nl_test_logger_default;
+
+static int isSentinel(const nlTest* inTests, size_t inIndex)
+{
+    if (inTests[inIndex].name == NULL
+        && inTests[inIndex].function == NULL)
+    {
+        return 1;
+    }
+
+    return 0;
+}
+
+/* Human-readable (Default) Output Functions */
+
+static void def_log_name(struct _nlTestSuite* inSuite)
+{
+    printf("[ %s ]\n", inSuite->name);
+}
+
+static void def_log_initialize(struct _nlTestSuite * inSuite, int inResult, int inWidth)
+{
+    printf("[ %s : %-*s ] : %s\n", inSuite->name, inWidth, "Initialize", inResult==FAILURE ? "FAILED" : "PASSED" );
+}
+static void def_log_terminate(struct _nlTestSuite * inSuite, int inResult, int inWidth)
+{
+    printf("[ %s : %-*s ] : %s\n", inSuite->name, inWidth,"Terminate", inResult==FAILURE ? "FAILED" : "PASSED" );
+}
+
+static void def_log_setup(struct _nlTestSuite* inSuite, int inResult, int inWidth)
+{
+    printf("[ %s : %-*s ] : %s\n", inSuite->name, inWidth, "Setup", inResult == FAILURE ? "FAILED" : "PASSED");
+}
+
+static void def_log_test(struct _nlTestSuite *inSuite, int inWidth, int inIndex)
+{
+    printf("[ %s : %-*s ] : %s\n", inSuite->name, inWidth, inSuite->tests[inIndex].name, inSuite->flagError ? "FAILED" : "PASSED");
+}
+
+static void def_log_teardown(struct _nlTestSuite* inSuite, int inResult, int inWidth)
+{
+    printf("[ %s : %-*s ] : %s\n", inSuite->name, inWidth, "TearDown", inResult == FAILURE ? "FAILED" : "PASSED");
+}
+
+static void def_log_statTest(struct _nlTestSuite* inSuite)
+{
+    printf("Failed Tests:   %d / %d\n", inSuite->failedTests, inSuite->runTests);
+}
+
+static void def_log_statAssert(struct _nlTestSuite* inSuite)
+{
+    printf("Failed Asserts: %d / %d\n", inSuite->failedAssertions, inSuite->performedAssertions);
+}
+
+/* CSV Output Functions */
+
+static void csv_log_name(struct _nlTestSuite* inSuite)
+{
+    printf("'#0:','%s'\n", inSuite->name);
+}
+
+static void csv_log_initialize(struct _nlTestSuite * inSuite, int inResult, int inWidth)
+{
+    printf("'#1:','%-*s','%s'\n", inWidth, "Initialize", inResult==FAILURE ? "FAILED" : "PASSED" );
+}
+
+static void csv_log_terminate(struct _nlTestSuite * inSuite, int inResult, int inWidth)
+{
+    printf("'#5:','%-*s','%s'\n", inWidth,"Terminate", inResult==FAILURE ? "FAILED" : "PASSED" );
+}
+
+static void csv_log_setup(struct _nlTestSuite* inSuite, int inResult, int inWidth)
+{
+    printf("'#2:','%-*s','%s'\n", inWidth, "Setup", inResult == FAILURE ? "FAILED" : "PASSED");
+}
+
+static void csv_log_test(struct _nlTestSuite *inSuite, int inWidth, int inIndex)
+{
+    printf("'#3:','%-*s','%s'\n", inWidth, inSuite->tests[inIndex].name, inSuite->flagError ? "FAILED" : "PASSED");
+}
+
+static void csv_log_teardown(struct _nlTestSuite* inSuite, int inResult, int inWidth)
+{
+    printf("'#4:','%-*s','%s'\n", inWidth, "Teardown", inResult == FAILURE ? "FAILED" : "PASSED");
+}
+
+static void csv_log_statTest(struct _nlTestSuite* inSuite)
+{
+    printf("'#6:','%d','%d'\n", inSuite->failedTests, inSuite->runTests);
+}
+
+static void csv_log_statAssert(struct _nlTestSuite* inSuite)
+{
+    printf("'#7:','%d','%d'\n", inSuite->failedAssertions, inSuite->performedAssertions);
+}
+
+void nlTestRunner(struct _nlTestSuite* inSuite, void* inContext)
+{
+    int i = 0;
+    size_t len, max = 0;
+
+    logger_output->PrintName(inSuite);
+
+    /* Determine the maximum test name length */
+
+    for (i = 0; i < kTestSuiteMaxTests; i++)
+    {
+        if (isSentinel(inSuite->tests, i))
+            break;
+
+        len = strlen(inSuite->tests[i].name);
+        if (len > max)
+            max = len;
+    }
+    
+    inSuite->runTests = 0;
+    inSuite->failedTests = 0;
+    inSuite->performedAssertions = 0;
+    inSuite->failedAssertions = 0;
+
+    /* Run the tests and display the test and summary result */
+    if (inSuite->setup != NULL)
+    {
+        int resSetup = inSuite->setup(inContext);
+        logger_output->PrintSetup(inSuite,resSetup, max);
+    }
+    for (i = 0; i < kTestSuiteMaxTests; i++)
+    {
+        if (isSentinel(inSuite->tests, i))
+            break;
+
+        if (inSuite->initialize != NULL)
+        {
+            int resInitialize = inSuite->initialize(inContext);
+            logger_output->PrintInitialize(inSuite,resInitialize, max);
+        }
+
+        inSuite->flagError = false;
+        inSuite->tests[i].function(inSuite, inContext);
+        inSuite->runTests += 1;
+        if (inSuite->flagError)
+            inSuite->failedTests += 1;
+
+        logger_output->PrintTest(inSuite, max, i);
+
+        if (inSuite->terminate != NULL)
+        {
+            int resTerminate = inSuite->terminate(inContext);
+            logger_output->PrintTerminate(inSuite,resTerminate, max);
+        }
+    }
+    if (inSuite->tear_down != NULL)
+    {
+        int resTeardown = inSuite->tear_down(inContext);
+        logger_output->PrintTeardown(inSuite,resTeardown, max);
+    }
+}
+
+int nlTestRunnerStats(struct _nlTestSuite* inSuite)
+{
+    logger_output->PrintStatTests(inSuite);
+    logger_output->PrintStatAsserts(inSuite);
+
+    return 0 - inSuite->failedTests;
+}
+
+void nlTestSetOutputStyle(nlTestOutputStyle inStyle)
+{
+    if (inStyle == OUTPUT_DEF)
+    {
+        logger_output = &nl_test_logger_default;
+    }
+    else if (inStyle == OUTPUT_CSV)
+    {
+        logger_output = &nl_test_logger_csv;
+    }
+}
+
+void nlTestSetLogger(const nlTestOutputLogger* inLogger)
+{
+    logger_output = inLogger;
+}
+
diff --git a/nl-unit-test/src/nltest.h b/nl-unit-test/src/nltest.h
new file mode 100644
index 0000000..b9418c2
--- /dev/null
+++ b/nl-unit-test/src/nltest.h
@@ -0,0 +1,499 @@
+/**
+ *    Copyright 2012-2016 Nest Labs Inc. All Rights Reserved.
+ *
+ *    Licensed under the Apache License, Version 2.0 (the "License");
+ *    you may not use this file except in compliance with the License.
+ *    You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *    Unless required by applicable law or agreed to in writing, software
+ *    distributed under the License is distributed on an "AS IS" BASIS,
+ *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *    See the License for the specific language governing permissions and
+ *    limitations under the License.
+ */
+
+/**
+ *    @file
+ *      This file defines macros, constants, data structures, and
+ *      functions that effect a simple, portable unit test suite
+ *      framework.
+ *
+ */
+
+#ifndef __NLTEST_H_INCLUDED__
+#define __NLTEST_H_INCLUDED__
+
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#else
+#endif /* __cplusplus */
+
+/**
+ * @addtogroup typedef Type Definitions
+ *
+ * @{
+ *
+ */
+
+struct _nlTestSuite;
+
+/**
+ * This defines a function entry point for a test in a test suite.
+ *
+ * @param[inout]  inSuite     A pointer to the test suite being run.
+ * @param[inout]  inContext   A pointer to test suite-specific context
+ *                            provided by the test suite driver.
+ *
+ */
+typedef void (*nlTestFunction)(struct _nlTestSuite* inSuite, void* inContext);
+
+/**
+ * This structure is used to define a single test for use in a test suite.
+ *
+ */
+typedef struct _nlTest
+{
+    const char*    name;                /**< Brief descriptive name of the test */
+    nlTestFunction function;            /**< Function entry point for the test */
+} nlTest;
+
+/**
+ * This structure is used to define the test suite, an array of tests
+ * for a given module.
+ *
+ * It has a name for the suite, the array of tests, as well as a setup function which
+ * is called before the execution of the test suite and a teardown function which is
+ * called after all tests in the suite are complete. It also contains pointers to an
+ * initialize function which is called before each individual test, and a terminate
+ * function which is called after each individual test.
+ *
+ */
+typedef struct _nlTestSuite
+{
+    /* Public Members */
+    const char*    name;                /**< Brief descriptive name of the test suite */
+    const nlTest*  tests;               /**< Array of tests in the suite */
+
+    /**
+     * This function is responsible for, if non-NULL, performing
+     * initial setup for the entire test suite, before running any
+     * tests in the suite.
+     *
+     * @param[inout]  inContext   A pointer to test suite-specific context
+     *                            provided by the test suite driver.
+     *
+     */
+    int            (*setup)(void *inContext);
+
+    /**
+     * This function is responsible for, if non-NULL, performing
+     * tear down for the entire test suite, after every test in the suite.
+     *
+     * @param[inout]  inContext   A pointer to test suite-specific context
+     *                            provided by the test suite driver.
+     *
+     */
+    int            (*tear_down)(void *inContext);
+
+    /**
+     * This function is responsible for, if non-NULL, performing
+     * initialization for the test, before running the test.
+     *
+     * @param[inout]  inContext   A pointer to test suite-specific context
+     *                            provided by the test suite driver.
+     *
+     */
+    int            (*initialize)(void *inContext);
+
+    /**
+     * This function is responsible for, if non-NULL, performing
+     * final tear down for the test, after running the test.
+     *
+     * @param[inout]  inContext   A pointer to test suite-specific context
+     *                            provided by the test suite driver.
+     *
+     */
+    int            (*terminate)(void *inContext);
+
+    int            runTests;            /**< Total number of tests performed in the suite */
+    int            failedTests;         /**< Total number of tests failed in the suite */
+    int            performedAssertions; /**< Total number of test assertions performed in the suite */
+    int            failedAssertions;    /**< Total number of test assertions failed in the suite */
+
+    /* Private Members */
+    bool           flagError;
+} nlTestSuite;
+
+/**
+ * Output style for tests and test summaries.
+ */
+typedef enum _nlTestOutputStyle
+{
+    OUTPUT_DEF = 0,   /**< Generate human-readable output (default). */
+    OUTPUT_CSV = 1,   /**< Generate machine-readable, comma-separated value (CSV) output. */
+} nlTestOutputStyle;
+
+/**
+ * This structure contains function pointers for functions that output
+ * the test suite name as well as the status of:
+ *
+ *   * Test suite setup and teardown functions.
+ *   * Tests and result functions (failed tests, failed assertions).
+ *
+ * All functions take a pointer to the test suite along with
+ * additional parameters, as needed, for the particular function.
+ *
+ * Custom instances of this structure may be instantiated and
+ * populated with custom output functions and then globally set via
+ * nlTestSetLogger.
+ *
+ */
+typedef struct _nlTestOutputLogger {
+    /**
+     * This function is responsible for rendering the name of the test
+     * suite.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the name should be rendered.
+     *
+     */ 
+    void (*PrintName)(struct _nlTestSuite*inSuite);
+
+    /**
+     * This function is responsible for rendering the status of the
+     * individual test initialization.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite setup status should be
+     *                             rendered.
+     *  @param[in]    inResult     The status of the test suite setup to
+     *                             be rendered.
+     *  @param[in]    inWidth      The maximum width, in characters,
+     *                             allowed for rendering the setup name
+     *                             or phase.
+     *
+     */ 
+	void (*PrintInitialize)(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+
+    /**
+     * This function is responsible for rendering the status of the
+     * individual test termination
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite setup status should be
+     *                             rendered.
+     *  @param[in]    inResult     The status of the test suite setup to
+     *                             be rendered.
+     *  @param[in]    inWidth      The maximum width, in characters,
+     *                             allowed for rendering the setup name
+     *                             or phase.
+     *
+     */ 
+    void (*PrintTerminate)(struct _nlTestSuite * inSuite, int inResult, int inWidth);
+
+    /**
+     * This function is responsible for rendering the status of the test
+     * suite setup.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite setup status should be
+     *                             rendered.
+     *  @param[in]    inResult     The status of the test suite setup to
+     *                             be rendered.
+     *  @param[in]    inWidth      The maximum width, in characters,
+     *                             allowed for rendering the setup name
+     *                             or phase.
+     *
+     */ 
+    void (*PrintSetup)(struct _nlTestSuite*inSuite, int inResult, int inWidth);
+
+    /**
+     * This function is responsible for rendering the summary of a test
+     * run, indicating success or failure of that test.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test run status summary should be
+     *                             rendered.
+     *  @param[in]    inWidth      The maximum width, in characters,
+     *                             allowed for rendering the test name.
+     *  @param[in]    inIndex      The index of the test in the suite
+     *                             for which to render the summary of the
+     *                             test run.
+     *
+     */
+    void (*PrintTest)(struct _nlTestSuite*inSuite, int inWidth, int inIndex);
+
+    /**
+     * This function is responsible for rendering the status of the test
+     * suite teardown.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite teardown status should be
+     *                             rendered.
+     *  @param[in]    inResult     The status of the test suite setup to
+     *                             be rendered.
+     *  @param[in]    inWidth      The maximum width, in characters,
+     *                             allowed for rendering the setup name
+     *                             or phase.
+     *
+     */
+    void (*PrintTeardown)(struct _nlTestSuite*inSuite, int inResult, int inWidth);
+
+    /**
+     * This function is responsible for rendering the test suite run
+     * statistics, including the number of failed tests and the total
+     * number of tests run.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite test statistics should be
+     *                             rendered.
+     *
+     */
+    void (*PrintStatTests)(struct _nlTestSuite*inSuite);
+
+    /**
+     * This function is responsible for rendering the test suite assertion
+     * statistics, including the number of failed assertions and the total
+     * number of assertions evaluated.
+     *
+     *  @param[in]    inSuite      A pointer to the test suite for which
+     *                             the test suite assertion statistics should
+     *                             be rendered.
+     *
+     */
+    void (*PrintStatAsserts)(struct _nlTestSuite*inSuite);
+} nlTestOutputLogger;
+
+/**
+ *  @}
+ *
+ */
+
+/**
+ *  @addtogroup cpp Preprocessor Definitions and Macros
+ *  
+ *  @{
+ */
+
+/**
+ *  @def kTestSuiteMaxTests
+ *
+ *  @brief
+ *    Defines the maximum number of tests allowed in a single test suite.
+ *
+ */    
+#define kTestSuiteMaxTests (64)
+
+/**
+ *  @def SUCCESS
+ *
+ *  @brief
+ *    Defines successful return status from test suite functions
+ *
+ */
+#define SUCCESS 0
+
+/**
+ *  @def FAILURE
+ *
+ *  @brief
+ *    Defines failed return status from test suite functions
+ *
+ */
+#define FAILURE -1
+
+#ifdef __cplusplus
+#define _NL_TEST_ASSIGN(field, value)	value
+#else
+#define _NL_TEST_ASSIGN(field, value)	.field = value
+#endif /* __cplusplus */
+
+/**
+ *  @def NL_TEST_DEF(inName, inFunction)
+ *
+ *  @brief
+ *    This macro makes an test assignment in a test suite, associating
+ *    the specified function @a inFunction with the provided string @a
+ *    inName.
+ *
+ *  @param[in]    inName       A pointer to a NULL-terminated C string
+ *                             briefly describing the test.
+ *  @param[in]    inFunction   A pointer to the function entry point for
+ *                             the test.
+ *
+ */
+#define NL_TEST_DEF(inName, inFunction)               \
+{                                                     \
+    _NL_TEST_ASSIGN(name, inName),                    \
+    _NL_TEST_ASSIGN(function, inFunction)             \
+}
+
+/**
+ *  @def NL_TEST_SENTINEL()
+ *
+ *  @brief
+ *    This macro must be used as the final entry to terminate an array
+ *    of test assignments to a test suite.
+ *
+ */
+#define NL_TEST_SENTINEL()                            NL_TEST_DEF(NULL, NULL)
+
+/**
+ *  @def NL_TEST_ASSERT(inSuite, inCondition)
+ *
+ *  @brief
+ *    This is used to assert the results of a conditional check
+ *    through out a test in a test suite.
+ *
+ *  @param[in]    inSuite      A pointer to the test suite the assertion
+ *                             should be accounted against.
+ *  @param[in]    inCondition  Code for the logical predicate to be checked
+ *                             for truth. If the condition fails, the
+ *                             assertion fails.
+ *
+ */
+#define NL_TEST_ASSERT(inSuite, inCondition)          \
+    do {                                              \
+        (inSuite)->performedAssertions += 1;          \
+                                                      \
+        if (!(inCondition))                           \
+        {                                             \
+            printf("Failed assert: %s in %s:%u\n",    \
+                   #inCondition, __FILE__, __LINE__); \
+            (inSuite)->failedAssertions += 1;         \
+            (inSuite)->flagError = true;              \
+        }                                             \
+    } while (0)
+
+#define NL_TEST_ASSERT_LOOP(inSuite, iteration, inCondition )                                           \
+do {                                                                                                    \
+    (inSuite)->performedAssertions += 1;                                                                \
+    if ( !(inCondition) )                                                                               \
+    {                                                                                                   \
+		printf( "Failed assert: %s in %s:%u, iter: %d\n", #inCondition, __FILE__, __LINE__, iteration);	\
+        (inSuite)->failedAssertions += 1;                                                               \
+        (inSuite)->flagError = true;                                                                    \
+    }                                                                                                   \
+} while (0)
+
+/**
+ * @}
+ *
+ */
+
+/**
+ * This runs all the functions for each test specified in the current
+ * suite and outputs the results for each function using the
+ * currently-set logger methods.
+ *
+ * @param[inout]  inSuite     A pointer to the test suite being run.
+ * @param[inout]  inContext   A pointer to test suite-specific context
+ *                            that will be provided to each test invoked
+ *                            within the suite.
+ *
+ */
+extern void nlTestRunner(struct _nlTestSuite* inSuite, void* inContext);
+
+/**
+ * This summarizes the number of run and failed tests as well as the
+ * number of performed and failed assertions for the suite using the
+ * currently-set logger methods.
+ *
+ * @param[inout]  inSuite     A pointer to the test suite being run.
+ *
+ * @returns SUCCESS on success; otherwise, FAILURE.
+ */
+extern int  nlTestRunnerStats(struct _nlTestSuite* inSuite);
+
+/**
+ * This globally sets the output style to be used for summarizing a
+ * suite test run.
+ *
+ * @note This supports selecting among built-in logger methods. Custom
+ *       logger methods may be set through the nlTestSetLogger() interface.
+ *
+ * @param[in]     inStyle     The style to be used for summarizing a
+ *                            suite test run.
+ *
+ */
+extern void nlTestSetOutputStyle(nlTestOutputStyle inStyle);
+
+/**
+ * This globally sets the logger methods to be used for summarizing a
+ * suite test run.
+ *
+ * @param[in]     inLogger    A pointer to the logger methods to be used
+ *                            for summarizing a suite test run.
+ *
+ */
+extern void nlTestSetLogger(const nlTestOutputLogger* inLogger);
+
+/**
+ *  @addtogroup compat Compatibility Types and Interfaces
+ *  
+ *  Deprecated legacy types and interfaces. New usage of these types
+ *  and interfaces is discouraged.
+ *
+ *  @{
+ */
+
+/**
+ * Legacy type for output style for tests and test summaries.
+ *
+ */
+typedef nlTestOutputStyle  nl_test_outputStyle;
+
+/**
+ * Legacy type for output functions.
+ *
+ */
+typedef nlTestOutputLogger nl_test_output_logger_t;
+
+/**
+ *  @def nl_test_set_output_style(inStyle)
+ *
+ *  @note See nlTestSetOutputStyle() for the equivalent non-deprecated
+ *        interface.
+ *
+ *  @brief
+ *    This globally sets the output style to be used for summarizing a
+ *    suite test run.
+ *
+ *  @param[in]    inStyle     The style to be used for summarizing a
+ *                            suite test run.
+ *
+ */
+#define nl_test_set_output_style(inStyle) nlTestSetOutputStyle(inStyle)
+
+/**
+ *  @def nl_test_set_logger(inLogger)
+ *
+ *  @note See nlTestSetLogger() for the equivalent non-deprecated
+ *        interface.
+ *
+ *  @brief
+ *    This globally sets the output style to be used for summarizing a
+ *    suite test run.
+ *
+ *  @param[in]    inLogger    A pointer to the logger methods to be used
+ *                            for summarizing a suite test run.
+ *
+ */
+#define nl_test_set_logger(inLogger)      nlTestSetLogger(inLogger)
+
+/**
+ *  @}
+ *
+ */
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __NLTEST_H_INCLUDED__ */
diff --git a/nl-unit-test/src/nlunittest-config.h.in b/nl-unit-test/src/nlunittest-config.h.in
new file mode 100644
index 0000000..9962b98
--- /dev/null
+++ b/nl-unit-test/src/nlunittest-config.h.in
@@ -0,0 +1,71 @@
+/* src/nlunittest-config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* Define to 1 if stdbool.h conforms to C99. */
+#undef HAVE_STDBOOL_H
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#undef HAVE_STDLIB_H
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#undef HAVE_STRING_H
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#undef HAVE_SYS_STAT_H
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#undef HAVE_SYS_TYPES_H
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* Define to 1 if the system has the type `_Bool'. */
+#undef HAVE__BOOL
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Define to 1 if your C compiler doesn't accept -c and -o together. */
+#undef NO_MINUS_C_MINUS_O
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to 1 if you have the ANSI C header files. */
+#undef STDC_HEADERS
+
+/* Version number of package */
+#undef VERSION
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/CONTRIBUTING.md b/nl-unit-test/third_party/nlbuild-autotools/repo/CONTRIBUTING.md
new file mode 100644
index 0000000..2827b7d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/CONTRIBUTING.md
@@ -0,0 +1,27 @@
+Want to contribute? Great! First, read this page (including the small print at the end).
+
+### Before you contribute
+Before we can use your code, you must sign the
+[Google Individual Contributor License Agreement]
+(https://cla.developers.google.com/about/google-individual)
+(CLA), which you can do online. The CLA is necessary mainly because you own the
+copyright to your changes, even after your contribution becomes part of our
+codebase, so we need your permission to use and distribute your code. We also
+need to be sure of various other things—for instance that you'll tell us if you
+know that your code infringes on other people's patents. You don't have to sign
+the CLA until after you've submitted your code for review and a member has
+approved it, but you must do it before we can put your code into our codebase.
+Before you start working on a larger contribution, you should get in touch with
+us first through the issue tracker with your idea so that we can help out and
+possibly guide you. Coordinating up front makes it much easier to avoid
+frustration later on.
+
+### Code reviews
+All submissions, including submissions by project members, require review. We
+use Github pull requests for this purpose.
+
+### The small print
+Contributions made by corporations are covered by a different agreement than
+the one above, the
+[Software Grant and Corporate Contributor License Agreement]
+(https://cla.developers.google.com/about/google-corporate).
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/LICENSE b/nl-unit-test/third_party/nlbuild-autotools/repo/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/README.md b/nl-unit-test/third_party/nlbuild-autotools/repo/README.md
new file mode 100644
index 0000000..1800040
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/README.md
@@ -0,0 +1,105 @@
+Nest Labs Build - GNU Autotools
+===============================
+
+Introduction
+------------
+
+The Nest Labs Build - GNU Autotools (nlbuild-autotools) provides a
+customized, turnkey build system framework, based on GNU autotools, for
+standalone Nest Labs software packages that need to support not only
+building on and targeting against standalone build host systems but
+also embedded target systems using GCC-based or -compatible toolchains.
+
+Getting Started
+---------------
+
+This project is typically subtreed into a target project repository
+and serves as the seed for that project's build system.
+
+Assuming that you already have a project repository established in
+git, perform the following in your project repository:
+
+    > 1. % git remote add nlbuild-autotools ssh://<PATH_TO_REPOSITORY>/nlbuild-autotools.git
+    > 2. % git fetch nlbuild-autotools
+
+You can place the nlbuild-autotools package anywhere in your project;
+however, by convention, "third_party/nlbuild-autotools/repo" is recommended:
+
+    > 3. % mkdir third_party
+    > 4. % git subtree add --prefix=third_party/nlbuild-autotools/repo --squash --message="Add subtree mirror of repository 'ssh://<PATH_TO_REPOSITORY>/nlbuild-autotools.git' branch 'master' at commit 'HEAD'." nlbuild-autotools HEAD
+
+At this point, you now have the nlbuild-autotools package integrated
+into your project. The next step is using the
+nlbuild-autotools-provided examples as templates. To do this, a
+convenience script has been provided that will get you started. You
+can tune and customize the results, as needed, for your project. From
+the top level of your project tree:
+
+    > 5. % third_party/nlbuild-autotools/repo/scripts/mkskeleton -I third_party/nlbuild-autotools/repo --package-description "My Fantastic Package" --package-name "mfp"
+
+Supported Build Host Systems
+----------------------------
+
+The nlbuild-autotools system supports the following POSIX-based build
+host systems:
+
+  * i686-pc-cygwin
+  * i686-pc-linux-gnu
+  * x86_64-apple-darwin
+  * x86_64-unknown-linux-gnu
+
+Support for these systems includes a set of pre-built, qualified
+versions of GNU autotools along with integration and automation
+scripts to run them.
+
+If support is required for a new POSIX-compatible build host system,
+use the 'build' script in 'tools/packages' to unarchive, build, and
+install the tools for your system.
+
+Please see the FAQ section for more background on why this package
+provides these pre-built tools.
+
+Package Organization
+--------------------
+
+The nlbuild-autotools package is laid out as follows:
+
+| Directory                         | Description                                                                              |
+|-----------------------------------|------------------------------------------------------------------------------------------|
+| autoconf/                         | GNU autoconf infrastructure provided by nlbuild-autotools.                               |
+| autoconf/m4/                      | GNU m4 macros for configure.ac provided by nlbuild-autotools.                            |
+| automake/                         | GNU automake Makefile.am header and footer infrastructure provided by nlbuild-autotools. |
+| automake/post/                    | GNU automake Makefile.am footers.                                                        |
+| automake/post.am                  | GNU automake Makefile.am footer included by every makefile.                              |
+| automake/pre/                     | GNU automake Makefile.am headers.                                                        |
+| automake/pre.am                   | GNU automake Makefile.am header included by every makefile.                              |
+| examples/                         | Example template files for starting your own nlbuild-autotools-based project.            |
+| scripts/                          | Automation scripts for regenerating the build system and for managing package versions.  |
+| tools/                            | Qualified packages of and pre-built instances of GNU autotools.                          |
+| tools/host/                          | Pre-built instances of GNU autotools.                                                    |
+| tools/host/i686-pc-cygwin/           | Pre-built instances of GNU autotools for 32-bit Cygwin.                                  |
+| tools/host/i686-pc-linux-gnu/        | Pre-built instances of GNU autotools for 32-bit Linux.                                   |
+| tools/host/x86_64-apple-darwin/      | Pre-built instances of GNU autotools for 64-bit Mac OS X.                                |
+| tools/host/x86_64-unknown-linux-gnu/ | Pre-built instances of GNU autotools for 64-bit Linux.                                   |
+| tools/packages/                   | Qualified packages for GNU autotools.                                                    |
+
+FAQ
+---
+
+Q: Why does nlbuild-autotools have its own built versions of GNU
+   autotools rather than leveraging whatever versions exist on the build
+   host system?
+
+A: Some build host systems such as Mac OS X may not have GNU autotools
+   at all. Other build host systems, such as Linux, may have different
+   distributions or different versions of those distributions in which
+   the versions of GNU autotools are apt to be different.
+
+   This differences lead to different primary and secondary autotools
+   output and, as a consequence, a divergent user and support
+   experience. To avoid this, this package provides a pre-built,
+   qualified set of GNU autotools along with a comprehensive,
+   standalone set of scripts to drive them without reliance on those
+   versions of the tools on the build host system.
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ar-lib b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ar-lib
new file mode 100755
index 0000000..fe2301e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ar-lib
@@ -0,0 +1,270 @@
+#! /bin/sh
+# Wrapper for Microsoft lib.exe
+
+me=ar-lib
+scriptversion=2012-03-01.08; # UTC
+
+# Copyright (C) 2010-2013 Free Software Foundation, Inc.
+# Written by Peter Rosin <peda@lysator.liu.se>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+
+# func_error message
+func_error ()
+{
+  echo "$me: $1" 1>&2
+  exit 1
+}
+
+file_conv=
+
+# func_file_conv build_file
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts.
+func_file_conv ()
+{
+  file=$1
+  case $file in
+    / | /[!/]*) # absolute file, and not a UNC file
+      if test -z "$file_conv"; then
+	# lazily determine how to convert abs files
+	case `uname -s` in
+	  MINGW*)
+	    file_conv=mingw
+	    ;;
+	  CYGWIN*)
+	    file_conv=cygwin
+	    ;;
+	  *)
+	    file_conv=wine
+	    ;;
+	esac
+      fi
+      case $file_conv in
+	mingw)
+	  file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+	  ;;
+	cygwin)
+	  file=`cygpath -m "$file" || echo "$file"`
+	  ;;
+	wine)
+	  file=`winepath -w "$file" || echo "$file"`
+	  ;;
+      esac
+      ;;
+  esac
+}
+
+# func_at_file at_file operation archive
+# Iterate over all members in AT_FILE performing OPERATION on ARCHIVE
+# for each of them.
+# When interpreting the content of the @FILE, do NOT use func_file_conv,
+# since the user would need to supply preconverted file names to
+# binutils ar, at least for MinGW.
+func_at_file ()
+{
+  operation=$2
+  archive=$3
+  at_file_contents=`cat "$1"`
+  eval set x "$at_file_contents"
+  shift
+
+  for member
+  do
+    $AR -NOLOGO $operation:"$member" "$archive" || exit $?
+  done
+}
+
+case $1 in
+  '')
+     func_error "no command.  Try '$0 --help' for more information."
+     ;;
+  -h | --h*)
+    cat <<EOF
+Usage: $me [--help] [--version] PROGRAM ACTION ARCHIVE [MEMBER...]
+
+Members may be specified in a file named with @FILE.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "$me, version $scriptversion"
+    exit $?
+    ;;
+esac
+
+if test $# -lt 3; then
+  func_error "you must specify a program, an action and an archive"
+fi
+
+AR=$1
+shift
+while :
+do
+  if test $# -lt 2; then
+    func_error "you must specify a program, an action and an archive"
+  fi
+  case $1 in
+    -lib | -LIB \
+    | -ltcg | -LTCG \
+    | -machine* | -MACHINE* \
+    | -subsystem* | -SUBSYSTEM* \
+    | -verbose | -VERBOSE \
+    | -wx* | -WX* )
+      AR="$AR $1"
+      shift
+      ;;
+    *)
+      action=$1
+      shift
+      break
+      ;;
+  esac
+done
+orig_archive=$1
+shift
+func_file_conv "$orig_archive"
+archive=$file
+
+# strip leading dash in $action
+action=${action#-}
+
+delete=
+extract=
+list=
+quick=
+replace=
+index=
+create=
+
+while test -n "$action"
+do
+  case $action in
+    d*) delete=yes  ;;
+    x*) extract=yes ;;
+    t*) list=yes    ;;
+    q*) quick=yes   ;;
+    r*) replace=yes ;;
+    s*) index=yes   ;;
+    S*)             ;; # the index is always updated implicitly
+    c*) create=yes  ;;
+    u*)             ;; # TODO: don't ignore the update modifier
+    v*)             ;; # TODO: don't ignore the verbose modifier
+    *)
+      func_error "unknown action specified"
+      ;;
+  esac
+  action=${action#?}
+done
+
+case $delete$extract$list$quick$replace,$index in
+  yes,* | ,yes)
+    ;;
+  yesyes*)
+    func_error "more than one action specified"
+    ;;
+  *)
+    func_error "no action specified"
+    ;;
+esac
+
+if test -n "$delete"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  for member
+  do
+    case $1 in
+      @*)
+        func_at_file "${1#@}" -REMOVE "$archive"
+        ;;
+      *)
+        func_file_conv "$1"
+        $AR -NOLOGO -REMOVE:"$file" "$archive" || exit $?
+        ;;
+    esac
+  done
+
+elif test -n "$extract"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  if test $# -gt 0; then
+    for member
+    do
+      case $1 in
+        @*)
+          func_at_file "${1#@}" -EXTRACT "$archive"
+          ;;
+        *)
+          func_file_conv "$1"
+          $AR -NOLOGO -EXTRACT:"$file" "$archive" || exit $?
+          ;;
+      esac
+    done
+  else
+    $AR -NOLOGO -LIST "$archive" | sed -e 's/\\/\\\\/g' | while read member
+    do
+      $AR -NOLOGO -EXTRACT:"$member" "$archive" || exit $?
+    done
+  fi
+
+elif test -n "$quick$replace"; then
+  if test ! -f "$orig_archive"; then
+    if test -z "$create"; then
+      echo "$me: creating $orig_archive"
+    fi
+    orig_archive=
+  else
+    orig_archive=$archive
+  fi
+
+  for member
+  do
+    case $1 in
+    @*)
+      func_file_conv "${1#@}"
+      set x "$@" "@$file"
+      ;;
+    *)
+      func_file_conv "$1"
+      set x "$@" "$file"
+      ;;
+    esac
+    shift
+    shift
+  done
+
+  if test -n "$orig_archive"; then
+    $AR -NOLOGO -OUT:"$archive" "$orig_archive" "$@" || exit $?
+  else
+    $AR -NOLOGO -OUT:"$archive" "$@" || exit $?
+  fi
+
+elif test -n "$list"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  $AR -NOLOGO -LIST "$archive" || exit $?
+fi
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/compile b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/compile
new file mode 100755
index 0000000..b1f4749
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/compile
@@ -0,0 +1,310 @@
+#! /bin/sh
+# Wrapper for compilers which do not understand '-c -o'.
+
+scriptversion=2012-01-04.17; # UTC
+
+# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2009, 2010, 2012 Free
+# Software Foundation, Inc.
+# Written by Tom Tromey <tromey@cygnus.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent tools from complaining about whitespace usage.
+IFS=" ""	$nl"
+
+file_conv=
+
+# func_file_conv build_file lazy
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts. If the determined conversion
+# type is listed in (the comma separated) LAZY, no conversion will
+# take place.
+func_file_conv ()
+{
+  file=$1
+  case $file in
+    / | /[!/]*) # absolute file, and not a UNC file
+      if test -z "$file_conv"; then
+	# lazily determine how to convert abs files
+	case `uname -s` in
+	  MINGW*)
+	    file_conv=mingw
+	    ;;
+	  CYGWIN*)
+	    file_conv=cygwin
+	    ;;
+	  *)
+	    file_conv=wine
+	    ;;
+	esac
+      fi
+      case $file_conv/,$2, in
+	*,$file_conv,*)
+	  ;;
+	mingw/*)
+	  file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+	  ;;
+	cygwin/*)
+	  file=`cygpath -m "$file" || echo "$file"`
+	  ;;
+	wine/*)
+	  file=`winepath -w "$file" || echo "$file"`
+	  ;;
+      esac
+      ;;
+  esac
+}
+
+# func_cl_wrapper cl arg...
+# Adjust compile command to suit cl
+func_cl_wrapper ()
+{
+  # Assume a capable shell
+  lib_path=
+  shared=:
+  linker_opts=
+  for arg
+  do
+    if test -n "$eat"; then
+      eat=
+    else
+      case $1 in
+	-o)
+	  # configure might choose to run compile as 'compile cc -o foo foo.c'.
+	  eat=1
+	  case $2 in
+	    *.o | *.[oO][bB][jJ])
+	      func_file_conv "$2"
+	      set x "$@" -Fo"$file"
+	      shift
+	      ;;
+	    *)
+	      func_file_conv "$2"
+	      set x "$@" -Fe"$file"
+	      shift
+	      ;;
+	  esac
+	  ;;
+	-I*)
+	  func_file_conv "${1#-I}" mingw
+	  set x "$@" -I"$file"
+	  shift
+	  ;;
+	-l*)
+	  lib=${1#-l}
+	  found=no
+	  save_IFS=$IFS
+	  IFS=';'
+	  for dir in $lib_path $LIB
+	  do
+	    IFS=$save_IFS
+	    if $shared && test -f "$dir/$lib.dll.lib"; then
+	      found=yes
+	      set x "$@" "$dir/$lib.dll.lib"
+	      break
+	    fi
+	    if test -f "$dir/$lib.lib"; then
+	      found=yes
+	      set x "$@" "$dir/$lib.lib"
+	      break
+	    fi
+	  done
+	  IFS=$save_IFS
+
+	  test "$found" != yes && set x "$@" "$lib.lib"
+	  shift
+	  ;;
+	-L*)
+	  func_file_conv "${1#-L}"
+	  if test -z "$lib_path"; then
+	    lib_path=$file
+	  else
+	    lib_path="$lib_path;$file"
+	  fi
+	  linker_opts="$linker_opts -LIBPATH:$file"
+	  ;;
+	-static)
+	  shared=false
+	  ;;
+	-Wl,*)
+	  arg=${1#-Wl,}
+	  save_ifs="$IFS"; IFS=','
+	  for flag in $arg; do
+	    IFS="$save_ifs"
+	    linker_opts="$linker_opts $flag"
+	  done
+	  IFS="$save_ifs"
+	  ;;
+	-Xlinker)
+	  eat=1
+	  linker_opts="$linker_opts $2"
+	  ;;
+	-*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+	*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
+	  func_file_conv "$1"
+	  set x "$@" -Tp"$file"
+	  shift
+	  ;;
+	*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
+	  func_file_conv "$1" mingw
+	  set x "$@" "$file"
+	  shift
+	  ;;
+	*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+      esac
+    fi
+    shift
+  done
+  if test -n "$linker_opts"; then
+    linker_opts="-link$linker_opts"
+  fi
+  exec "$@" $linker_opts
+  exit 1
+}
+
+eat=
+
+case $1 in
+  '')
+     echo "$0: No command.  Try '$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: compile [--help] [--version] PROGRAM [ARGS]
+
+Wrapper for compilers which do not understand '-c -o'.
+Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
+arguments, and rename the output as expected.
+
+If you are trying to build a whole package this is not the
+right script to run: please start by reading the file 'INSTALL'.
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "compile $scriptversion"
+    exit $?
+    ;;
+  cl | *[/\\]cl | cl.exe | *[/\\]cl.exe )
+    func_cl_wrapper "$@"      # Doesn't return...
+    ;;
+esac
+
+ofile=
+cfile=
+
+for arg
+do
+  if test -n "$eat"; then
+    eat=
+  else
+    case $1 in
+      -o)
+	# configure might choose to run compile as 'compile cc -o foo foo.c'.
+	# So we strip '-o arg' only if arg is an object.
+	eat=1
+	case $2 in
+	  *.o | *.obj)
+	    ofile=$2
+	    ;;
+	  *)
+	    set x "$@" -o "$2"
+	    shift
+	    ;;
+	esac
+	;;
+      *.c)
+	cfile=$1
+	set x "$@" "$1"
+	shift
+	;;
+      *)
+	set x "$@" "$1"
+	shift
+	;;
+    esac
+  fi
+  shift
+done
+
+if test -z "$ofile" || test -z "$cfile"; then
+  # If no '-o' option was seen then we might have been invoked from a
+  # pattern rule where we don't need one.  That is ok -- this is a
+  # normal compilation that the losing compiler can handle.  If no
+  # '.c' file was seen then we are probably linking.  That is also
+  # ok.
+  exec "$@"
+fi
+
+# Name of file we expect compiler to create.
+cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
+
+# Create the lock directory.
+# Note: use '[/\\:.-]' here to ensure that we don't use the same name
+# that we are using for the .o file.  Also, base the name on the expected
+# object file name, since that is what matters with a parallel build.
+lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
+while true; do
+  if mkdir "$lockdir" >/dev/null 2>&1; then
+    break
+  fi
+  sleep 1
+done
+# FIXME: race condition here if user kills between mkdir and trap.
+trap "rmdir '$lockdir'; exit 1" 1 2 15
+
+# Run the compile.
+"$@"
+ret=$?
+
+if test -f "$cofile"; then
+  test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
+elif test -f "${cofile}bj"; then
+  test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
+fi
+
+rmdir "$lockdir"
+exit $ret
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.guess b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.guess
new file mode 100755
index 0000000..d622a44
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.guess
@@ -0,0 +1,1530 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+#   2011, 2012 Free Software Foundation, Inc.
+
+timestamp='2012-02-10'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Originally written by Per Bothner.  Please send patches (context
+# diff format) to <config-patches@gnu.org> and include a ChangeLog
+# entry.
+#
+# This script attempts to guess a canonical system name similar to
+# config.sub.  If it succeeds, it prints the system name on stdout, and
+# exits with 0.  Otherwise, it exits with 1.
+#
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,)    echo "int x;" > $dummy.c ;
+	for c in cc gcc c89 c99 ; do
+	  if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+	     CC_FOR_BUILD="$c"; break ;
+	  fi ;
+	done ;
+	if test x"$CC_FOR_BUILD" = x ; then
+	  CC_FOR_BUILD=no_compiler_found ;
+	fi
+	;;
+ ,,*)   CC_FOR_BUILD=$CC ;;
+ ,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null`  || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	sysctl="sysctl -n hw.machine_arch"
+	UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+	    /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+	case "${UNAME_MACHINE_ARCH}" in
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    sh5el) machine=sh5le-unknown ;;
+	    *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently, or will in the future.
+	case "${UNAME_MACHINE_ARCH}" in
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		eval $set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep -q __ELF__
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+		os=netbsd
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case "${UNAME_VERSION}" in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	echo "${machine}-${os}${release}"
+	exit ;;
+    *:OpenBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+	echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+	exit ;;
+    *:ekkoBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+	exit ;;
+    *:SolidBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
+	exit ;;
+    macppc:MirBSD:*:*)
+	echo powerpc-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    *:MirBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    alpha:OSF1:*:*)
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case "$ALPHA_CPU_TYPE" in
+	    "EV4 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE="alphaev5" ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE="alphaev56" ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE="alphapca56" ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE="alphapca57" ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE="alphaev6" ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE="alphaev67" ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE="alphaev69" ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE="alphaev7" ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE="alphaev79" ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	# Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+	exitcode=$?
+	trap '' 0
+	exit $exitcode ;;
+    Alpha\ *:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# Should we change UNAME_MACHINE based on the output of uname instead
+	# of the specific Alpha model?
+	echo alpha-pc-interix
+	exit ;;
+    21064:Windows_NT:50:3)
+	echo alpha-dec-winnt3.5
+	exit ;;
+    Amiga*:UNIX_System_V:4.0:*)
+	echo m68k-unknown-sysv4
+	exit ;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-amigaos
+	exit ;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-morphos
+	exit ;;
+    *:OS/390:*:*)
+	echo i370-ibm-openedition
+	exit ;;
+    *:z/VM:*:*)
+	echo s390-ibm-zvmoe
+	exit ;;
+    *:OS400:*:*)
+	echo powerpc-ibm-os400
+	exit ;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	echo arm-acorn-riscix${UNAME_RELEASE}
+	exit ;;
+    arm:riscos:*:*|arm:RISCOS:*:*)
+	echo arm-unknown-riscos
+	exit ;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	echo hppa1.1-hitachi-hiuxmpp
+	exit ;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	if test "`(/bin/universe) 2>/dev/null`" = att ; then
+		echo pyramid-pyramid-sysv3
+	else
+		echo pyramid-pyramid-bsd
+	fi
+	exit ;;
+    NILE*:*:*:dcosx)
+	echo pyramid-pyramid-svr4
+	exit ;;
+    DRS?6000:unix:4.0:6*)
+	echo sparc-icl-nx6
+	exit ;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) echo sparc-icl-nx7; exit ;;
+	esac ;;
+    s390x:SunOS:*:*)
+	echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4H:SunOS:5.*:*)
+	echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+	echo i386-pc-auroraux${UNAME_RELEASE}
+	exit ;;
+    i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+	eval $set_cc_for_build
+	SUN_ARCH="i386"
+	# If there is a compiler, see if it is configured for 64-bit objects.
+	# Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+	# This test works for both compilers.
+	if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+	    if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+		(CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		grep IS_64BIT_ARCH >/dev/null
+	    then
+		SUN_ARCH="x86_64"
+	    fi
+	fi
+	echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:*:*)
+	case "`/usr/bin/arch -k`" in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+	exit ;;
+    sun3*:SunOS:*:*)
+	echo m68k-sun-sunos${UNAME_RELEASE}
+	exit ;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+	case "`/bin/arch`" in
+	    sun3)
+		echo m68k-sun-sunos${UNAME_RELEASE}
+		;;
+	    sun4)
+		echo sparc-sun-sunos${UNAME_RELEASE}
+		;;
+	esac
+	exit ;;
+    aushp:SunOS:*:*)
+	echo sparc-auspex-sunos${UNAME_RELEASE}
+	exit ;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+	echo m68k-milan-mint${UNAME_RELEASE}
+	exit ;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+	echo m68k-hades-mint${UNAME_RELEASE}
+	exit ;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+	echo m68k-unknown-mint${UNAME_RELEASE}
+	exit ;;
+    m68k:machten:*:*)
+	echo m68k-apple-machten${UNAME_RELEASE}
+	exit ;;
+    powerpc:machten:*:*)
+	echo powerpc-apple-machten${UNAME_RELEASE}
+	exit ;;
+    RISC*:Mach:*:*)
+	echo mips-dec-mach_bsd4.3
+	exit ;;
+    RISC*:ULTRIX:*:*)
+	echo mips-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    VAX*:ULTRIX*:*:*)
+	echo vax-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	echo clipper-intergraph-clix${UNAME_RELEASE}
+	exit ;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c &&
+	  dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+	  SYSTEM_NAME=`$dummy $dummyarg` &&
+	    { echo "$SYSTEM_NAME"; exit; }
+	echo mips-mips-riscos${UNAME_RELEASE}
+	exit ;;
+    Motorola:PowerMAX_OS:*:*)
+	echo powerpc-motorola-powermax
+	exit ;;
+    Motorola:*:4.3:PL8-*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:Power_UNIX:*:*)
+	echo powerpc-harris-powerunix
+	exit ;;
+    m88k:CX/UX:7*:*)
+	echo m88k-harris-cxux7
+	exit ;;
+    m88k:*:4*:R4*)
+	echo m88k-motorola-sysv4
+	exit ;;
+    m88k:*:3*:R3*)
+	echo m88k-motorola-sysv3
+	exit ;;
+    AViiON:dgux:*:*)
+	# DG/UX returns AViiON for all architectures
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+	then
+	    if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+	       [ ${TARGET_BINARY_INTERFACE}x = x ]
+	    then
+		echo m88k-dg-dgux${UNAME_RELEASE}
+	    else
+		echo m88k-dg-dguxbcs${UNAME_RELEASE}
+	    fi
+	else
+	    echo i586-dg-dgux${UNAME_RELEASE}
+	fi
+	exit ;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	echo m88k-dolphin-sysv3
+	exit ;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	echo m88k-motorola-sysv3
+	exit ;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	echo m88k-tektronix-sysv3
+	exit ;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	echo m68k-tektronix-bsd
+	exit ;;
+    *:IRIX*:*:*)
+	echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+	exit ;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	echo romp-ibm-aix     # uname -m gives an 8 hex-code CPU id
+	exit ;;               # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	echo i386-ibm-aix
+	exit ;;
+    ia64:AIX:*:*)
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		eval $set_cc_for_build
+		sed 's/^		//' << EOF >$dummy.c
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
+		then
+			echo "$SYSTEM_NAME"
+		else
+			echo rs6000-ibm-aix3.2.5
+		fi
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		echo rs6000-ibm-aix3.2.4
+	else
+		echo rs6000-ibm-aix3.2
+	fi
+	exit ;;
+    *:AIX:*:[4567])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:*:*)
+	echo rs6000-ibm-aix
+	exit ;;
+    ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+	echo romp-ibm-bsd4.4
+	exit ;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	echo romp-ibm-bsd${UNAME_RELEASE}   # 4.3 with uname added to
+	exit ;;                             # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	echo rs6000-bull-bosx
+	exit ;;
+    DPX/2?00:B.O.S.:*:*)
+	echo m68k-bull-sysv3
+	exit ;;
+    9000/[34]??:4.3bsd:1.*:*)
+	echo m68k-hp-bsd
+	exit ;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	echo m68k-hp-bsd4.4
+	exit ;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	case "${UNAME_MACHINE}" in
+	    9000/31? )            HP_ARCH=m68000 ;;
+	    9000/[34]?? )         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if [ -x /usr/bin/getconf ]; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+		    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+		    case "${sc_cpu_version}" in
+		      523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+		      528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+		      532)                      # CPU_PA_RISC2_0
+			case "${sc_kernel_bits}" in
+			  32) HP_ARCH="hppa2.0n" ;;
+			  64) HP_ARCH="hppa2.0w" ;;
+			  '') HP_ARCH="hppa2.0" ;;   # HP-UX 10.20
+			esac ;;
+		    esac
+		fi
+		if [ "${HP_ARCH}" = "" ]; then
+		    eval $set_cc_for_build
+		    sed 's/^		//' << EOF >$dummy.c
+
+		#define _HPUX_SOURCE
+		#include <stdlib.h>
+		#include <unistd.h>
+
+		int main ()
+		{
+		#if defined(_SC_KERNEL_BITS)
+		    long bits = sysconf(_SC_KERNEL_BITS);
+		#endif
+		    long cpu  = sysconf (_SC_CPU_VERSION);
+
+		    switch (cpu)
+			{
+			case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+			case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+			case CPU_PA_RISC2_0:
+		#if defined(_SC_KERNEL_BITS)
+			    switch (bits)
+				{
+				case 64: puts ("hppa2.0w"); break;
+				case 32: puts ("hppa2.0n"); break;
+				default: puts ("hppa2.0"); break;
+				} break;
+		#else  /* !defined(_SC_KERNEL_BITS) */
+			    puts ("hppa2.0"); break;
+		#endif
+			default: puts ("hppa1.0"); break;
+			}
+		    exit (0);
+		}
+EOF
+		    (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if [ ${HP_ARCH} = "hppa2.0w" ]
+	then
+	    eval $set_cc_for_build
+
+	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
+	    # generating 64-bit code.  GNU and HP use different nomenclature:
+	    #
+	    # $ CC_FOR_BUILD=cc ./config.guess
+	    # => hppa2.0w-hp-hpux11.23
+	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+	    # => hppa64-hp-hpux11.23
+
+	    if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
+		grep -q __LP64__
+	    then
+		HP_ARCH="hppa2.0w"
+	    else
+		HP_ARCH="hppa64"
+	    fi
+	fi
+	echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+	exit ;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	echo ia64-hp-hpux${HPUX_REV}
+	exit ;;
+    3050*:HI-UX:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
+		{ echo "$SYSTEM_NAME"; exit; }
+	echo unknown-hitachi-hiuxwe2
+	exit ;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+	echo hppa1.1-hp-bsd
+	exit ;;
+    9000/8??:4.3bsd:*:*)
+	echo hppa1.0-hp-bsd
+	exit ;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	echo hppa1.0-hp-mpeix
+	exit ;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+	echo hppa1.1-hp-osf
+	exit ;;
+    hp8??:OSF1:*:*)
+	echo hppa1.0-hp-osf
+	exit ;;
+    i*86:OSF1:*:*)
+	if [ -x /usr/sbin/sysversion ] ; then
+	    echo ${UNAME_MACHINE}-unknown-osf1mk
+	else
+	    echo ${UNAME_MACHINE}-unknown-osf1
+	fi
+	exit ;;
+    parisc*:Lites*:*:*)
+	echo hppa1.1-hp-lites
+	exit ;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	echo c1-convex-bsd
+	exit ;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	echo c34-convex-bsd
+	exit ;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	echo c38-convex-bsd
+	exit ;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	echo c4-convex-bsd
+	exit ;;
+    CRAY*Y-MP:*:*:*)
+	echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*[A-Z]90:*:*:*)
+	echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*TS:*:*:*)
+	echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*T3E:*:*:*)
+	echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*SV1:*:*:*)
+	echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    *:UNICOS/mp:*:*)
+	echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+	echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    5000:UNIX_System_V:4.*:*)
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+	echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+	exit ;;
+    sparc*:BSD/OS:*:*)
+	echo sparc-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:BSD/OS:*:*)
+	echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:FreeBSD:*:*)
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	case ${UNAME_PROCESSOR} in
+	    amd64)
+		echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	    *)
+		echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	esac
+	exit ;;
+    i*:CYGWIN*:*)
+	echo ${UNAME_MACHINE}-pc-cygwin
+	exit ;;
+    *:MINGW*:*)
+	echo ${UNAME_MACHINE}-pc-mingw32
+	exit ;;
+    i*:MSYS*:*)
+	echo ${UNAME_MACHINE}-pc-msys
+	exit ;;
+    i*:windows32*:*)
+	# uname -m includes "-pc" on this system.
+	echo ${UNAME_MACHINE}-mingw32
+	exit ;;
+    i*:PW*:*)
+	echo ${UNAME_MACHINE}-pc-pw32
+	exit ;;
+    *:Interix*:*)
+	case ${UNAME_MACHINE} in
+	    x86)
+		echo i586-pc-interix${UNAME_RELEASE}
+		exit ;;
+	    authenticamd | genuineintel | EM64T)
+		echo x86_64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	    IA64)
+		echo ia64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	esac ;;
+    [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+	echo i${UNAME_MACHINE}-pc-mks
+	exit ;;
+    8664:Windows_NT:*)
+	echo x86_64-pc-mks
+	exit ;;
+    i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+	# UNAME_MACHINE based on the output of uname instead of i386?
+	echo i586-pc-interix
+	exit ;;
+    i*:UWIN*:*)
+	echo ${UNAME_MACHINE}-pc-uwin
+	exit ;;
+    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+	echo x86_64-unknown-cygwin
+	exit ;;
+    p*:CYGWIN*:*)
+	echo powerpcle-unknown-cygwin
+	exit ;;
+    prep*:SunOS:5.*:*)
+	echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    *:GNU:*:*)
+	# the GNU system
+	echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+	exit ;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
+	exit ;;
+    i*86:Minix:*:*)
+	echo ${UNAME_MACHINE}-pc-minix
+	exit ;;
+    aarch64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    aarch64_be:Linux:*:*)
+	UNAME_MACHINE=aarch64_be
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+	esac
+	objdump --private-headers /bin/sh | grep -q ld.so.1
+	if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+	echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
+	exit ;;
+    arm*:Linux:*:*)
+	eval $set_cc_for_build
+	if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+	    | grep -q __ARM_EABI__
+	then
+	    echo ${UNAME_MACHINE}-unknown-linux-gnu
+	else
+	    if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+		| grep -q __ARM_PCS_VFP
+	    then
+		echo ${UNAME_MACHINE}-unknown-linux-gnueabi
+	    else
+		echo ${UNAME_MACHINE}-unknown-linux-gnueabihf
+	    fi
+	fi
+	exit ;;
+    avr32*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    cris:Linux:*:*)
+	echo ${UNAME_MACHINE}-axis-linux-gnu
+	exit ;;
+    crisv32:Linux:*:*)
+	echo ${UNAME_MACHINE}-axis-linux-gnu
+	exit ;;
+    frv:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    hexagon:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    i*86:Linux:*:*)
+	LIBC=gnu
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#ifdef __dietlibc__
+	LIBC=dietlibc
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
+	echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
+	exit ;;
+    ia64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m32r*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m68*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    mips:Linux:*:* | mips64:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef ${UNAME_MACHINE}
+	#undef ${UNAME_MACHINE}el
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=${UNAME_MACHINE}el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=${UNAME_MACHINE}
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
+	test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
+	;;
+    or32:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    padre:Linux:*:*)
+	echo sparc-unknown-linux-gnu
+	exit ;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	echo hppa64-unknown-linux-gnu
+	exit ;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) echo hppa1.1-unknown-linux-gnu ;;
+	  PA8*) echo hppa2.0-unknown-linux-gnu ;;
+	  *)    echo hppa-unknown-linux-gnu ;;
+	esac
+	exit ;;
+    ppc64:Linux:*:*)
+	echo powerpc64-unknown-linux-gnu
+	exit ;;
+    ppc:Linux:*:*)
+	echo powerpc-unknown-linux-gnu
+	exit ;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	echo ${UNAME_MACHINE}-ibm-linux
+	exit ;;
+    sh64*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sh*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    tile*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    vax:Linux:*:*)
+	echo ${UNAME_MACHINE}-dec-linux-gnu
+	exit ;;
+    x86_64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    xtensa*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	echo i386-sequent-sysv4
+	exit ;;
+    i*86:UNIX_SV:4.2MP:2.*)
+	# Unixware is an offshoot of SVR4, but it has its own version
+	# number series starting with 2...
+	# I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+	# Use sysv4.2uw... so that sysv4* matches it.
+	echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+	exit ;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	echo ${UNAME_MACHINE}-pc-os2-emx
+	exit ;;
+    i*86:XTS-300:*:STOP)
+	echo ${UNAME_MACHINE}-unknown-stop
+	exit ;;
+    i*86:atheos:*:*)
+	echo ${UNAME_MACHINE}-unknown-atheos
+	exit ;;
+    i*86:syllable:*:*)
+	echo ${UNAME_MACHINE}-pc-syllable
+	exit ;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+	echo i386-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    i*86:*DOS:*:*)
+	echo ${UNAME_MACHINE}-pc-msdosdjgpp
+	exit ;;
+    i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+	UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+	else
+		echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+	fi
+	exit ;;
+    i*86:*:5:[678]*)
+	# UnixWare 7.x, OpenUNIX and OpenServer 6.
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	exit ;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+	else
+		echo ${UNAME_MACHINE}-pc-sysv32
+	fi
+	exit ;;
+    pc:*:*:*)
+	# Left here for compatibility:
+	# uname -m prints for DJGPP always 'pc', but it prints nothing about
+	# the processor, so we play safe by assuming i586.
+	# Note: whatever this is, it MUST be the same as what config.sub
+	# prints for the "djgpp" host, or else GDB configury will decide that
+	# this is a cross-build.
+	echo i586-pc-msdosdjgpp
+	exit ;;
+    Intel:Mach:3*:*)
+	echo i386-pc-mach3
+	exit ;;
+    paragon:*:*:*)
+	echo i860-intel-osf1
+	exit ;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  echo i860-unknown-sysv${UNAME_RELEASE}  # Unknown i860-SVR4
+	fi
+	exit ;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	echo m68010-convergent-sysv
+	exit ;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	echo m68k-convergent-sysv
+	exit ;;
+    M680?0:D-NIX:5.3:*)
+	echo m68k-diab-dnix
+	exit ;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4; exit; } ;;
+    NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+	OS_REL='.3'
+	test -r /etc/.relid \
+	    && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	    && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	echo m68k-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    mc68030:UNIX_System_V:4.*:*)
+	echo m68k-atari-sysv4
+	exit ;;
+    TSUNAMI:LynxOS:2.*:*)
+	echo sparc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    rs6000:LynxOS:2.*:*)
+	echo rs6000-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+	echo powerpc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    SM[BE]S:UNIX_SV:*:*)
+	echo mips-dde-sysv${UNAME_RELEASE}
+	exit ;;
+    RM*:ReliantUNIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    RM*:SINIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		echo ${UNAME_MACHINE}-sni-sysv4
+	else
+		echo ns32k-sni-sysv
+	fi
+	exit ;;
+    PENTIUM:*:4.0*:*)	# Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+			# says <Richard.M.Bartel@ccMail.Census.GOV>
+	echo i586-unisys-sysv4
+	exit ;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes@openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	echo hppa1.1-stratus-sysv4
+	exit ;;
+    *:*:*:FTX*)
+	# From seanf@swdc.stratus.com.
+	echo i860-stratus-sysv4
+	exit ;;
+    i*86:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo ${UNAME_MACHINE}-stratus-vos
+	exit ;;
+    *:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo hppa1.1-stratus-vos
+	exit ;;
+    mc68*:A/UX:*:*)
+	echo m68k-apple-aux${UNAME_RELEASE}
+	exit ;;
+    news*:NEWS-OS:6*:*)
+	echo mips-sony-newsos6
+	exit ;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if [ -d /usr/nec ]; then
+		echo mips-nec-sysv${UNAME_RELEASE}
+	else
+		echo mips-unknown-sysv${UNAME_RELEASE}
+	fi
+	exit ;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	echo powerpc-be-beos
+	exit ;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	echo powerpc-apple-beos
+	exit ;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	echo i586-pc-beos
+	exit ;;
+    BePC:Haiku:*:*)	# Haiku running on Intel PC compatible.
+	echo i586-pc-haiku
+	exit ;;
+    SX-4:SUPER-UX:*:*)
+	echo sx4-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-5:SUPER-UX:*:*)
+	echo sx5-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-6:SUPER-UX:*:*)
+	echo sx6-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-7:SUPER-UX:*:*)
+	echo sx7-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8:SUPER-UX:*:*)
+	echo sx8-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8R:SUPER-UX:*:*)
+	echo sx8r-nec-superux${UNAME_RELEASE}
+	exit ;;
+    Power*:Rhapsody:*:*)
+	echo powerpc-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Rhapsody:*:*)
+	echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+	case $UNAME_PROCESSOR in
+	    i386)
+		eval $set_cc_for_build
+		if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+		  if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+		      (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		      grep IS_64BIT_ARCH >/dev/null
+		  then
+		      UNAME_PROCESSOR="x86_64"
+		  fi
+		fi ;;
+	    unknown) UNAME_PROCESSOR=powerpc ;;
+	esac
+	echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+	exit ;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = "x86"; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+	exit ;;
+    *:QNX:*:4*)
+	echo i386-pc-qnx
+	exit ;;
+    NEO-?:NONSTOP_KERNEL:*:*)
+	echo neo-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSE-?:NONSTOP_KERNEL:*:*)
+	echo nse-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSR-?:NONSTOP_KERNEL:*:*)
+	echo nsr-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    *:NonStop-UX:*:*)
+	echo mips-compaq-nonstopux
+	exit ;;
+    BS2000:POSIX*:*:*)
+	echo bs2000-siemens-sysv
+	exit ;;
+    DS/*:UNIX_System_V:*:*)
+	echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+	exit ;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "$cputype" = "386"; then
+	    UNAME_MACHINE=i386
+	else
+	    UNAME_MACHINE="$cputype"
+	fi
+	echo ${UNAME_MACHINE}-unknown-plan9
+	exit ;;
+    *:TOPS-10:*:*)
+	echo pdp10-unknown-tops10
+	exit ;;
+    *:TENEX:*:*)
+	echo pdp10-unknown-tenex
+	exit ;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	echo pdp10-dec-tops20
+	exit ;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	echo pdp10-xkl-tops20
+	exit ;;
+    *:TOPS-20:*:*)
+	echo pdp10-unknown-tops20
+	exit ;;
+    *:ITS:*:*)
+	echo pdp10-unknown-its
+	exit ;;
+    SEI:*:*:SEIUX)
+	echo mips-sei-seiux${UNAME_RELEASE}
+	exit ;;
+    *:DragonFly:*:*)
+	echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit ;;
+    *:*VMS:*:*)
+	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case "${UNAME_MACHINE}" in
+	    A*) echo alpha-dec-vms ; exit ;;
+	    I*) echo ia64-dec-vms ; exit ;;
+	    V*) echo vax-dec-vms ; exit ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	echo i386-pc-xenix
+	exit ;;
+    i*86:skyos:*:*)
+	echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
+	exit ;;
+    i*86:rdos:*:*)
+	echo ${UNAME_MACHINE}-pc-rdos
+	exit ;;
+    i*86:AROS:*:*)
+	echo ${UNAME_MACHINE}-pc-aros
+	exit ;;
+    x86_64:VMkernel:*:*)
+	echo ${UNAME_MACHINE}-unknown-esx
+	exit ;;
+esac
+
+#echo '(No uname command or uname output not recognized.)' 1>&2
+#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+	"4"
+#else
+	""
+#endif
+	); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+  printf ("arm-acorn-riscix\n"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+  printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+    struct utsname un;
+
+    uname(&un);
+
+    if (strncmp(un.version, "V2", 2) == 0) {
+	printf ("i386-sequent-ptx2\n"); exit (0);
+    }
+    if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+	printf ("i386-sequent-ptx1\n"); exit (0);
+    }
+    printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+#  include <sys/param.h>
+#  if defined (BSD)
+#   if BSD == 43
+      printf ("vax-dec-bsd4.3\n"); exit (0);
+#   else
+#    if BSD == 199006
+      printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#    else
+      printf ("vax-dec-bsd\n"); exit (0);
+#    endif
+#   endif
+#  else
+    printf ("vax-dec-bsd\n"); exit (0);
+#  endif
+# else
+    printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+	{ echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+    case `getsysinfo -f cpu_type` in
+    c1*)
+	echo c1-convex-bsd
+	exit ;;
+    c2*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    c34*)
+	echo c34-convex-bsd
+	exit ;;
+    c38*)
+	echo c38-convex-bsd
+	exit ;;
+    c4*)
+	echo c4-convex-bsd
+	exit ;;
+    esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+and
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches@gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM  = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.sub b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.sub
new file mode 100755
index 0000000..c894da4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/config.sub
@@ -0,0 +1,1773 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+#   2011, 2012 Free Software Foundation, Inc.
+
+timestamp='2012-02-10'
+
+# This file is (in principle) common to ALL GNU software.
+# The presence of a machine in this file suggests that SOME GNU software
+# can handle that machine.  It does not imply ALL GNU software can.
+#
+# This file is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Please send patches to <config-patches@gnu.org>.  Submit a context
+# diff and a properly formatted GNU ChangeLog entry.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+       $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help"
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo $1
+       exit ;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+  nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \
+  linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
+  knetbsd*-gnu* | netbsd*-gnu* | \
+  kopensolaris*-gnu* | \
+  storm-chaos* | os2-emx* | rtmk-nova*)
+    os=-$maybe_os
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+    ;;
+  android-linux)
+    os=-linux-android
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown
+    ;;
+  *)
+    basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+    if [ $basic_machine != $1 ]
+    then os=`echo $1 | sed 's/.*-/-/'`
+    else os=; fi
+    ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work.  We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+	-sun*os*)
+		# Prevent following clause from handling this invalid input.
+		;;
+	-dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+	-att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+	-unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+	-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+	-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+	-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+	-apple | -axis | -knuth | -cray | -microblaze)
+		os=
+		basic_machine=$1
+		;;
+	-bluegene*)
+		os=-cnk
+		;;
+	-sim | -cisco | -oki | -wec | -winbond)
+		os=
+		basic_machine=$1
+		;;
+	-scout)
+		;;
+	-wrs)
+		os=-vxworks
+		basic_machine=$1
+		;;
+	-chorusos*)
+		os=-chorusos
+		basic_machine=$1
+		;;
+	-chorusrdb)
+		os=-chorusrdb
+		basic_machine=$1
+		;;
+	-hiux*)
+		os=-hiuxwe2
+		;;
+	-sco6)
+		os=-sco5v6
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5)
+		os=-sco3.2v5
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco4)
+		os=-sco3.2v4
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2.[4-9]*)
+		os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2v[4-9]*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5v6*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco*)
+		os=-sco3.2v2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-udk*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-isc)
+		os=-isc2.2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-clix*)
+		basic_machine=clipper-intergraph
+		;;
+	-isc*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-lynx*)
+		os=-lynxos
+		;;
+	-ptx*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+		;;
+	-windowsnt*)
+		os=`echo $os | sed -e 's/windowsnt/winnt/'`
+		;;
+	-psos*)
+		os=-psos
+		;;
+	-mint | -mint[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+	# Recognize the basic CPU types without company name.
+	# Some are omitted here because they have special meanings below.
+	1750a | 580 \
+	| a29k \
+	| aarch64 | aarch64_be \
+	| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+	| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+	| am33_2.0 \
+	| arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
+        | be32 | be64 \
+	| bfin \
+	| c4x | clipper \
+	| d10v | d30v | dlx | dsp16xx \
+	| epiphany \
+	| fido | fr30 | frv \
+	| h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+	| hexagon \
+	| i370 | i860 | i960 | ia64 \
+	| ip2k | iq2000 \
+	| le32 | le64 \
+	| lm32 \
+	| m32c | m32r | m32rle | m68000 | m68k | m88k \
+	| maxq | mb | microblaze | mcore | mep | metag \
+	| mips | mipsbe | mipseb | mipsel | mipsle \
+	| mips16 \
+	| mips64 | mips64el \
+	| mips64octeon | mips64octeonel \
+	| mips64orion | mips64orionel \
+	| mips64r5900 | mips64r5900el \
+	| mips64vr | mips64vrel \
+	| mips64vr4100 | mips64vr4100el \
+	| mips64vr4300 | mips64vr4300el \
+	| mips64vr5000 | mips64vr5000el \
+	| mips64vr5900 | mips64vr5900el \
+	| mipsisa32 | mipsisa32el \
+	| mipsisa32r2 | mipsisa32r2el \
+	| mipsisa64 | mipsisa64el \
+	| mipsisa64r2 | mipsisa64r2el \
+	| mipsisa64sb1 | mipsisa64sb1el \
+	| mipsisa64sr71k | mipsisa64sr71kel \
+	| mipstx39 | mipstx39el \
+	| mn10200 | mn10300 \
+	| moxie \
+	| mt \
+	| msp430 \
+	| nds32 | nds32le | nds32be \
+	| nios | nios2 \
+	| ns16k | ns32k \
+	| open8 \
+	| or32 \
+	| pdp10 | pdp11 | pj | pjl \
+	| powerpc | powerpc64 | powerpc64le | powerpcle \
+	| pyramid \
+	| rl78 | rx \
+	| score \
+	| sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+	| sh64 | sh64le \
+	| sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+	| sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+	| spu \
+	| tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \
+	| ubicom32 \
+	| v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \
+	| we32k \
+	| x86 | xc16x | xstormy16 | xtensa \
+	| z8k | z80)
+		basic_machine=$basic_machine-unknown
+		;;
+	c54x)
+		basic_machine=tic54x-unknown
+		;;
+	c55x)
+		basic_machine=tic55x-unknown
+		;;
+	c6x)
+		basic_machine=tic6x-unknown
+		;;
+	m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip)
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+		;;
+	ms1)
+		basic_machine=mt-unknown
+		;;
+
+	strongarm | thumb | xscale)
+		basic_machine=arm-unknown
+		;;
+	xgate)
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	xscaleeb)
+		basic_machine=armeb-unknown
+		;;
+
+	xscaleel)
+		basic_machine=armel-unknown
+		;;
+
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+	  basic_machine=$basic_machine-pc
+	  ;;
+	# Object if more than one company name word.
+	*-*-*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+	# Recognize the basic CPU types with company name.
+	580-* \
+	| a29k-* \
+	| aarch64-* | aarch64_be-* \
+	| alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+	| alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+	| alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
+	| arm-*  | armbe-* | armle-* | armeb-* | armv*-* \
+	| avr-* | avr32-* \
+	| be32-* | be64-* \
+	| bfin-* | bs2000-* \
+	| c[123]* | c30-* | [cjt]90-* | c4x-* \
+	| clipper-* | craynv-* | cydra-* \
+	| d10v-* | d30v-* | dlx-* \
+	| elxsi-* \
+	| f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
+	| h8300-* | h8500-* \
+	| hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+	| hexagon-* \
+	| i*86-* | i860-* | i960-* | ia64-* \
+	| ip2k-* | iq2000-* \
+	| le32-* | le64-* \
+	| lm32-* \
+	| m32c-* | m32r-* | m32rle-* \
+	| m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+	| m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \
+	| mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+	| mips16-* \
+	| mips64-* | mips64el-* \
+	| mips64octeon-* | mips64octeonel-* \
+	| mips64orion-* | mips64orionel-* \
+	| mips64r5900-* | mips64r5900el-* \
+	| mips64vr-* | mips64vrel-* \
+	| mips64vr4100-* | mips64vr4100el-* \
+	| mips64vr4300-* | mips64vr4300el-* \
+	| mips64vr5000-* | mips64vr5000el-* \
+	| mips64vr5900-* | mips64vr5900el-* \
+	| mipsisa32-* | mipsisa32el-* \
+	| mipsisa32r2-* | mipsisa32r2el-* \
+	| mipsisa64-* | mipsisa64el-* \
+	| mipsisa64r2-* | mipsisa64r2el-* \
+	| mipsisa64sb1-* | mipsisa64sb1el-* \
+	| mipsisa64sr71k-* | mipsisa64sr71kel-* \
+	| mipstx39-* | mipstx39el-* \
+	| mmix-* \
+	| mt-* \
+	| msp430-* \
+	| nds32-* | nds32le-* | nds32be-* \
+	| nios-* | nios2-* \
+	| none-* | np1-* | ns16k-* | ns32k-* \
+	| open8-* \
+	| orion-* \
+	| pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+	| powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \
+	| pyramid-* \
+	| rl78-* | romp-* | rs6000-* | rx-* \
+	| sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+	| shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+	| sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+	| sparclite-* \
+	| sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \
+	| tahoe-* \
+	| tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+	| tile*-* \
+	| tron-* \
+	| ubicom32-* \
+	| v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \
+	| vax-* \
+	| we32k-* \
+	| x86-* | x86_64-* | xc16x-* | xps100-* \
+	| xstormy16-* | xtensa*-* \
+	| ymp-* \
+	| z8k-* | z80-*)
+		;;
+	# Recognize the basic CPU types without company name, with glob match.
+	xtensa*)
+		basic_machine=$basic_machine-unknown
+		;;
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	386bsd)
+		basic_machine=i386-unknown
+		os=-bsd
+		;;
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		basic_machine=m68000-att
+		;;
+	3b*)
+		basic_machine=we32k-att
+		;;
+	a29khif)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	abacus)
+		basic_machine=abacus-unknown
+		;;
+	adobe68k)
+		basic_machine=m68010-adobe
+		os=-scout
+		;;
+	alliant | fx80)
+		basic_machine=fx80-alliant
+		;;
+	altos | altos3068)
+		basic_machine=m68k-altos
+		;;
+	am29k)
+		basic_machine=a29k-none
+		os=-bsd
+		;;
+	amd64)
+		basic_machine=x86_64-pc
+		;;
+	amd64-*)
+		basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	amdahl)
+		basic_machine=580-amdahl
+		os=-sysv
+		;;
+	amiga | amiga-*)
+		basic_machine=m68k-unknown
+		;;
+	amigaos | amigados)
+		basic_machine=m68k-unknown
+		os=-amigaos
+		;;
+	amigaunix | amix)
+		basic_machine=m68k-unknown
+		os=-sysv4
+		;;
+	apollo68)
+		basic_machine=m68k-apollo
+		os=-sysv
+		;;
+	apollo68bsd)
+		basic_machine=m68k-apollo
+		os=-bsd
+		;;
+	aros)
+		basic_machine=i386-pc
+		os=-aros
+		;;
+	aux)
+		basic_machine=m68k-apple
+		os=-aux
+		;;
+	balance)
+		basic_machine=ns32k-sequent
+		os=-dynix
+		;;
+	blackfin)
+		basic_machine=bfin-unknown
+		os=-linux
+		;;
+	blackfin-*)
+		basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	bluegene*)
+		basic_machine=powerpc-ibm
+		os=-cnk
+		;;
+	c54x-*)
+		basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c55x-*)
+		basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c6x-*)
+		basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c90)
+		basic_machine=c90-cray
+		os=-unicos
+		;;
+	cegcc)
+		basic_machine=arm-unknown
+		os=-cegcc
+		;;
+	convex-c1)
+		basic_machine=c1-convex
+		os=-bsd
+		;;
+	convex-c2)
+		basic_machine=c2-convex
+		os=-bsd
+		;;
+	convex-c32)
+		basic_machine=c32-convex
+		os=-bsd
+		;;
+	convex-c34)
+		basic_machine=c34-convex
+		os=-bsd
+		;;
+	convex-c38)
+		basic_machine=c38-convex
+		os=-bsd
+		;;
+	cray | j90)
+		basic_machine=j90-cray
+		os=-unicos
+		;;
+	craynv)
+		basic_machine=craynv-cray
+		os=-unicosmp
+		;;
+	cr16 | cr16-*)
+		basic_machine=cr16-unknown
+		os=-elf
+		;;
+	crds | unos)
+		basic_machine=m68k-crds
+		;;
+	crisv32 | crisv32-* | etraxfs*)
+		basic_machine=crisv32-axis
+		;;
+	cris | cris-* | etrax*)
+		basic_machine=cris-axis
+		;;
+	crx)
+		basic_machine=crx-unknown
+		os=-elf
+		;;
+	da30 | da30-*)
+		basic_machine=m68k-da30
+		;;
+	decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+		basic_machine=mips-dec
+		;;
+	decsystem10* | dec10*)
+		basic_machine=pdp10-dec
+		os=-tops10
+		;;
+	decsystem20* | dec20*)
+		basic_machine=pdp10-dec
+		os=-tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		basic_machine=m68k-motorola
+		;;
+	delta88)
+		basic_machine=m88k-motorola
+		os=-sysv3
+		;;
+	dicos)
+		basic_machine=i686-pc
+		os=-dicos
+		;;
+	djgpp)
+		basic_machine=i586-pc
+		os=-msdosdjgpp
+		;;
+	dpx20 | dpx20-*)
+		basic_machine=rs6000-bull
+		os=-bosx
+		;;
+	dpx2* | dpx2*-bull)
+		basic_machine=m68k-bull
+		os=-sysv3
+		;;
+	ebmon29k)
+		basic_machine=a29k-amd
+		os=-ebmon
+		;;
+	elxsi)
+		basic_machine=elxsi-elxsi
+		os=-bsd
+		;;
+	encore | umax | mmax)
+		basic_machine=ns32k-encore
+		;;
+	es1800 | OSE68k | ose68k | ose | OSE)
+		basic_machine=m68k-ericsson
+		os=-ose
+		;;
+	fx2800)
+		basic_machine=i860-alliant
+		;;
+	genix)
+		basic_machine=ns32k-ns
+		;;
+	gmicro)
+		basic_machine=tron-gmicro
+		os=-sysv
+		;;
+	go32)
+		basic_machine=i386-pc
+		os=-go32
+		;;
+	h3050r* | hiux*)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	h8300hms)
+		basic_machine=h8300-hitachi
+		os=-hms
+		;;
+	h8300xray)
+		basic_machine=h8300-hitachi
+		os=-xray
+		;;
+	h8500hms)
+		basic_machine=h8500-hitachi
+		os=-hms
+		;;
+	harris)
+		basic_machine=m88k-harris
+		os=-sysv3
+		;;
+	hp300-*)
+		basic_machine=m68k-hp
+		;;
+	hp300bsd)
+		basic_machine=m68k-hp
+		os=-bsd
+		;;
+	hp300hpux)
+		basic_machine=m68k-hp
+		os=-hpux
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		basic_machine=m68000-hp
+		;;
+	hp9k3[2-9][0-9])
+		basic_machine=m68k-hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hppa-next)
+		os=-nextstep3
+		;;
+	hppaosf)
+		basic_machine=hppa1.1-hp
+		os=-osf
+		;;
+	hppro)
+		basic_machine=hppa1.1-hp
+		os=-proelf
+		;;
+	i370-ibm* | ibm*)
+		basic_machine=i370-ibm
+		;;
+	i*86v32)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv32
+		;;
+	i*86v4*)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv4
+		;;
+	i*86v)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv
+		;;
+	i*86sol2)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-solaris2
+		;;
+	i386mach)
+		basic_machine=i386-mach
+		os=-mach
+		;;
+	i386-vsta | vsta)
+		basic_machine=i386-unknown
+		os=-vsta
+		;;
+	iris | iris4d)
+		basic_machine=mips-sgi
+		case $os in
+		    -irix*)
+			;;
+		    *)
+			os=-irix4
+			;;
+		esac
+		;;
+	isi68 | isi)
+		basic_machine=m68k-isi
+		os=-sysv
+		;;
+	m68knommu)
+		basic_machine=m68k-unknown
+		os=-linux
+		;;
+	m68knommu-*)
+		basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	m88k-omron*)
+		basic_machine=m88k-omron
+		;;
+	magnum | m3230)
+		basic_machine=mips-mips
+		os=-sysv
+		;;
+	merlin)
+		basic_machine=ns32k-utek
+		os=-sysv
+		;;
+	microblaze)
+		basic_machine=microblaze-xilinx
+		;;
+	mingw32)
+		basic_machine=i386-pc
+		os=-mingw32
+		;;
+	mingw32ce)
+		basic_machine=arm-unknown
+		os=-mingw32ce
+		;;
+	miniframe)
+		basic_machine=m68000-convergent
+		;;
+	*mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+	mips3*-*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+		;;
+	mips3*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+		;;
+	monitor)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	morphos)
+		basic_machine=powerpc-unknown
+		os=-morphos
+		;;
+	msdos)
+		basic_machine=i386-pc
+		os=-msdos
+		;;
+	ms1-*)
+		basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
+		;;
+	msys)
+		basic_machine=i386-pc
+		os=-msys
+		;;
+	mvs)
+		basic_machine=i370-ibm
+		os=-mvs
+		;;
+	nacl)
+		basic_machine=le32-unknown
+		os=-nacl
+		;;
+	ncr3000)
+		basic_machine=i486-ncr
+		os=-sysv4
+		;;
+	netbsd386)
+		basic_machine=i386-unknown
+		os=-netbsd
+		;;
+	netwinder)
+		basic_machine=armv4l-rebel
+		os=-linux
+		;;
+	news | news700 | news800 | news900)
+		basic_machine=m68k-sony
+		os=-newsos
+		;;
+	news1000)
+		basic_machine=m68030-sony
+		os=-newsos
+		;;
+	news-3600 | risc-news)
+		basic_machine=mips-sony
+		os=-newsos
+		;;
+	necv70)
+		basic_machine=v70-nec
+		os=-sysv
+		;;
+	next | m*-next )
+		basic_machine=m68k-next
+		case $os in
+		    -nextstep* )
+			;;
+		    -ns2*)
+		      os=-nextstep2
+			;;
+		    *)
+		      os=-nextstep3
+			;;
+		esac
+		;;
+	nh3000)
+		basic_machine=m68k-harris
+		os=-cxux
+		;;
+	nh[45]000)
+		basic_machine=m88k-harris
+		os=-cxux
+		;;
+	nindy960)
+		basic_machine=i960-intel
+		os=-nindy
+		;;
+	mon960)
+		basic_machine=i960-intel
+		os=-mon960
+		;;
+	nonstopux)
+		basic_machine=mips-compaq
+		os=-nonstopux
+		;;
+	np1)
+		basic_machine=np1-gould
+		;;
+	neo-tandem)
+		basic_machine=neo-tandem
+		;;
+	nse-tandem)
+		basic_machine=nse-tandem
+		;;
+	nsr-tandem)
+		basic_machine=nsr-tandem
+		;;
+	op50n-* | op60c-*)
+		basic_machine=hppa1.1-oki
+		os=-proelf
+		;;
+	openrisc | openrisc-*)
+		basic_machine=or32-unknown
+		;;
+	os400)
+		basic_machine=powerpc-ibm
+		os=-os400
+		;;
+	OSE68000 | ose68000)
+		basic_machine=m68000-ericsson
+		os=-ose
+		;;
+	os68k)
+		basic_machine=m68k-none
+		os=-os68k
+		;;
+	pa-hitachi)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	paragon)
+		basic_machine=i860-intel
+		os=-osf
+		;;
+	parisc)
+		basic_machine=hppa-unknown
+		os=-linux
+		;;
+	parisc-*)
+		basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	pbd)
+		basic_machine=sparc-tti
+		;;
+	pbb)
+		basic_machine=m68k-tti
+		;;
+	pc532 | pc532-*)
+		basic_machine=ns32k-pc532
+		;;
+	pc98)
+		basic_machine=i386-pc
+		;;
+	pc98-*)
+		basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium | p5 | k5 | k6 | nexgen | viac3)
+		basic_machine=i586-pc
+		;;
+	pentiumpro | p6 | 6x86 | athlon | athlon_*)
+		basic_machine=i686-pc
+		;;
+	pentiumii | pentium2 | pentiumiii | pentium3)
+		basic_machine=i686-pc
+		;;
+	pentium4)
+		basic_machine=i786-pc
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium4-*)
+		basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pn)
+		basic_machine=pn-gould
+		;;
+	power)	basic_machine=power-ibm
+		;;
+	ppc | ppcbe)	basic_machine=powerpc-unknown
+		;;
+	ppc-* | ppcbe-*)
+		basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppcle | powerpclittle | ppc-le | powerpc-little)
+		basic_machine=powerpcle-unknown
+		;;
+	ppcle-* | powerpclittle-*)
+		basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64)	basic_machine=powerpc64-unknown
+		;;
+	ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+		basic_machine=powerpc64le-unknown
+		;;
+	ppc64le-* | powerpc64little-*)
+		basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ps2)
+		basic_machine=i386-ibm
+		;;
+	pw32)
+		basic_machine=i586-unknown
+		os=-pw32
+		;;
+	rdos)
+		basic_machine=i386-pc
+		os=-rdos
+		;;
+	rom68k)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	rm[46]00)
+		basic_machine=mips-siemens
+		;;
+	rtpc | rtpc-*)
+		basic_machine=romp-ibm
+		;;
+	s390 | s390-*)
+		basic_machine=s390-ibm
+		;;
+	s390x | s390x-*)
+		basic_machine=s390x-ibm
+		;;
+	sa29200)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	sb1)
+		basic_machine=mipsisa64sb1-unknown
+		;;
+	sb1el)
+		basic_machine=mipsisa64sb1el-unknown
+		;;
+	sde)
+		basic_machine=mipsisa32-sde
+		os=-elf
+		;;
+	sei)
+		basic_machine=mips-sei
+		os=-seiux
+		;;
+	sequent)
+		basic_machine=i386-sequent
+		;;
+	sh)
+		basic_machine=sh-hitachi
+		os=-hms
+		;;
+	sh5el)
+		basic_machine=sh5le-unknown
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparclite-wrs | simso-wrs)
+		basic_machine=sparclite-wrs
+		os=-vxworks
+		;;
+	sps7)
+		basic_machine=m68k-bull
+		os=-sysv2
+		;;
+	spur)
+		basic_machine=spur-unknown
+		;;
+	st2000)
+		basic_machine=m68k-tandem
+		;;
+	stratus)
+		basic_machine=i860-stratus
+		os=-sysv4
+		;;
+	strongarm-* | thumb-*)
+		basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	sun2)
+		basic_machine=m68000-sun
+		;;
+	sun2os3)
+		basic_machine=m68000-sun
+		os=-sunos3
+		;;
+	sun2os4)
+		basic_machine=m68000-sun
+		os=-sunos4
+		;;
+	sun3os3)
+		basic_machine=m68k-sun
+		os=-sunos3
+		;;
+	sun3os4)
+		basic_machine=m68k-sun
+		os=-sunos4
+		;;
+	sun4os3)
+		basic_machine=sparc-sun
+		os=-sunos3
+		;;
+	sun4os4)
+		basic_machine=sparc-sun
+		os=-sunos4
+		;;
+	sun4sol2)
+		basic_machine=sparc-sun
+		os=-solaris2
+		;;
+	sun3 | sun3-*)
+		basic_machine=m68k-sun
+		;;
+	sun4)
+		basic_machine=sparc-sun
+		;;
+	sun386 | sun386i | roadrunner)
+		basic_machine=i386-sun
+		;;
+	sv1)
+		basic_machine=sv1-cray
+		os=-unicos
+		;;
+	symmetry)
+		basic_machine=i386-sequent
+		os=-dynix
+		;;
+	t3e)
+		basic_machine=alphaev5-cray
+		os=-unicos
+		;;
+	t90)
+		basic_machine=t90-cray
+		os=-unicos
+		;;
+	tile*)
+		basic_machine=$basic_machine-unknown
+		os=-linux-gnu
+		;;
+	tx39)
+		basic_machine=mipstx39-unknown
+		;;
+	tx39el)
+		basic_machine=mipstx39el-unknown
+		;;
+	toad1)
+		basic_machine=pdp10-xkl
+		os=-tops20
+		;;
+	tower | tower-32)
+		basic_machine=m68k-ncr
+		;;
+	tpf)
+		basic_machine=s390x-ibm
+		os=-tpf
+		;;
+	udi29k)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	ultra3)
+		basic_machine=a29k-nyu
+		os=-sym1
+		;;
+	v810 | necv810)
+		basic_machine=v810-nec
+		os=-none
+		;;
+	vaxv)
+		basic_machine=vax-dec
+		os=-sysv
+		;;
+	vms)
+		basic_machine=vax-dec
+		os=-vms
+		;;
+	vpp*|vx|vx-*)
+		basic_machine=f301-fujitsu
+		;;
+	vxworks960)
+		basic_machine=i960-wrs
+		os=-vxworks
+		;;
+	vxworks68)
+		basic_machine=m68k-wrs
+		os=-vxworks
+		;;
+	vxworks29k)
+		basic_machine=a29k-wrs
+		os=-vxworks
+		;;
+	w65*)
+		basic_machine=w65-wdc
+		os=-none
+		;;
+	w89k-*)
+		basic_machine=hppa1.1-winbond
+		os=-proelf
+		;;
+	xbox)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	xps | xps100)
+		basic_machine=xps100-honeywell
+		;;
+	xscale-* | xscalee[bl]-*)
+		basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'`
+		;;
+	ymp)
+		basic_machine=ymp-cray
+		os=-unicos
+		;;
+	z8k-*-coff)
+		basic_machine=z8k-unknown
+		os=-sim
+		;;
+	z80-*-coff)
+		basic_machine=z80-unknown
+		os=-sim
+		;;
+	none)
+		basic_machine=none-none
+		os=-none
+		;;
+
+# Here we handle the default manufacturer of certain CPU types.  It is in
+# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		basic_machine=hppa1.1-winbond
+		;;
+	op50n)
+		basic_machine=hppa1.1-oki
+		;;
+	op60c)
+		basic_machine=hppa1.1-oki
+		;;
+	romp)
+		basic_machine=romp-ibm
+		;;
+	mmix)
+		basic_machine=mmix-knuth
+		;;
+	rs6000)
+		basic_machine=rs6000-ibm
+		;;
+	vax)
+		basic_machine=vax-dec
+		;;
+	pdp10)
+		# there are many clones, so DEC is not a safe bet
+		basic_machine=pdp10-unknown
+		;;
+	pdp11)
+		basic_machine=pdp11-dec
+		;;
+	we32k)
+		basic_machine=we32k-att
+		;;
+	sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
+		basic_machine=sh-unknown
+		;;
+	sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
+		basic_machine=sparc-sun
+		;;
+	cydra)
+		basic_machine=cydra-cydrome
+		;;
+	orion)
+		basic_machine=orion-highlevel
+		;;
+	orion105)
+		basic_machine=clipper-highlevel
+		;;
+	mac | mpw | mac-mpw)
+		basic_machine=m68k-apple
+		;;
+	pmac | pmac-mpw)
+		basic_machine=powerpc-apple
+		;;
+	*-unknown)
+		# Make sure to match an already-canonicalized machine name.
+		;;
+	*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+	*-digital*)
+		basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+		;;
+	*-commodore*)
+		basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+	# First match some system type aliases
+	# that might get confused with valid system types.
+	# -solaris* is a basic system type, with this one exception.
+	-auroraux)
+		os=-auroraux
+		;;
+	-solaris1 | -solaris1.*)
+		os=`echo $os | sed -e 's|solaris1|sunos4|'`
+		;;
+	-solaris)
+		os=-solaris2
+		;;
+	-svr4*)
+		os=-sysv4
+		;;
+	-unixware*)
+		os=-sysv4.2uw
+		;;
+	-gnu/linux*)
+		os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+		;;
+	# First accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST END IN A *, to match a version number.
+	# -sysv* is not here because it comes later, after sysvr4.
+	-gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+	      | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
+	      | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
+	      | -sym* | -kopensolaris* \
+	      | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+	      | -aos* | -aros* \
+	      | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+	      | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+	      | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
+	      | -openbsd* | -solidbsd* \
+	      | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+	      | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+	      | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+	      | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+	      | -chorusos* | -chorusrdb* | -cegcc* \
+	      | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+	      | -mingw32* | -linux-gnu* | -linux-android* \
+	      | -linux-newlib* | -linux-uclibc* \
+	      | -uxpv* | -beos* | -mpeix* | -udk* \
+	      | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+	      | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+	      | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+	      | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+	      | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+	      | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+	      | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
+	# Remember, each alternative MUST END IN *, to match a version number.
+		;;
+	-qnx*)
+		case $basic_machine in
+		    x86-* | i*86-*)
+			;;
+		    *)
+			os=-nto$os
+			;;
+		esac
+		;;
+	-nto-qnx*)
+		;;
+	-nto*)
+		os=`echo $os | sed -e 's|nto|nto-qnx|'`
+		;;
+	-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+	      | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
+	      | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+		;;
+	-mac*)
+		os=`echo $os | sed -e 's|mac|macos|'`
+		;;
+	-linux-dietlibc)
+		os=-linux-dietlibc
+		;;
+	-linux*)
+		os=`echo $os | sed -e 's|linux|linux-gnu|'`
+		;;
+	-sunos5*)
+		os=`echo $os | sed -e 's|sunos5|solaris2|'`
+		;;
+	-sunos6*)
+		os=`echo $os | sed -e 's|sunos6|solaris3|'`
+		;;
+	-opened*)
+		os=-openedition
+		;;
+	-os400*)
+		os=-os400
+		;;
+	-wince*)
+		os=-wince
+		;;
+	-osfrose*)
+		os=-osfrose
+		;;
+	-osf*)
+		os=-osf
+		;;
+	-utek*)
+		os=-bsd
+		;;
+	-dynix*)
+		os=-bsd
+		;;
+	-acis*)
+		os=-aos
+		;;
+	-atheos*)
+		os=-atheos
+		;;
+	-syllable*)
+		os=-syllable
+		;;
+	-386bsd)
+		os=-bsd
+		;;
+	-ctix* | -uts*)
+		os=-sysv
+		;;
+	-nova*)
+		os=-rtmk-nova
+		;;
+	-ns2 )
+		os=-nextstep2
+		;;
+	-nsk*)
+		os=-nsk
+		;;
+	# Preserve the version number of sinix5.
+	-sinix5.*)
+		os=`echo $os | sed -e 's|sinix|sysv|'`
+		;;
+	-sinix*)
+		os=-sysv4
+		;;
+	-tpf*)
+		os=-tpf
+		;;
+	-triton*)
+		os=-sysv3
+		;;
+	-oss*)
+		os=-sysv3
+		;;
+	-svr4)
+		os=-sysv4
+		;;
+	-svr3)
+		os=-sysv3
+		;;
+	-sysvr4)
+		os=-sysv4
+		;;
+	# This must come after -sysvr4.
+	-sysv*)
+		;;
+	-ose*)
+		os=-ose
+		;;
+	-es1800*)
+		os=-ose
+		;;
+	-xenix)
+		os=-xenix
+		;;
+	-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+		os=-mint
+		;;
+	-aros*)
+		os=-aros
+		;;
+	-kaos*)
+		os=-kaos
+		;;
+	-zvmoe)
+		os=-zvmoe
+		;;
+	-dicos*)
+		os=-dicos
+		;;
+	-nacl*)
+		;;
+	-none)
+		;;
+	*)
+		# Get rid of the `-' at the beginning of $os.
+		os=`echo $os | sed 's/[^-]*-//'`
+		echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+		exit 1
+		;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+	score-*)
+		os=-elf
+		;;
+	spu-*)
+		os=-elf
+		;;
+	*-acorn)
+		os=-riscix1.2
+		;;
+	arm*-rebel)
+		os=-linux
+		;;
+	arm*-semi)
+		os=-aout
+		;;
+	c4x-* | tic4x-*)
+		os=-coff
+		;;
+	tic54x-*)
+		os=-coff
+		;;
+	tic55x-*)
+		os=-coff
+		;;
+	tic6x-*)
+		os=-coff
+		;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=-tops20
+		;;
+	pdp11-*)
+		os=-none
+		;;
+	*-dec | vax-*)
+		os=-ultrix4.2
+		;;
+	m68*-apollo)
+		os=-domain
+		;;
+	i386-sun)
+		os=-sunos4.0.2
+		;;
+	m68000-sun)
+		os=-sunos3
+		;;
+	m68*-cisco)
+		os=-aout
+		;;
+	mep-*)
+		os=-elf
+		;;
+	mips*-cisco)
+		os=-elf
+		;;
+	mips*-*)
+		os=-elf
+		;;
+	or32-*)
+		os=-coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=-sysv3
+		;;
+	sparc-* | *-sun)
+		os=-sunos4.1.1
+		;;
+	*-be)
+		os=-beos
+		;;
+	*-haiku)
+		os=-haiku
+		;;
+	*-ibm)
+		os=-aix
+		;;
+	*-knuth)
+		os=-mmixware
+		;;
+	*-wec)
+		os=-proelf
+		;;
+	*-winbond)
+		os=-proelf
+		;;
+	*-oki)
+		os=-proelf
+		;;
+	*-hp)
+		os=-hpux
+		;;
+	*-hitachi)
+		os=-hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=-sysv
+		;;
+	*-cbm)
+		os=-amigaos
+		;;
+	*-dg)
+		os=-dgux
+		;;
+	*-dolphin)
+		os=-sysv3
+		;;
+	m68k-ccur)
+		os=-rtu
+		;;
+	m88k-omron*)
+		os=-luna
+		;;
+	*-next )
+		os=-nextstep
+		;;
+	*-sequent)
+		os=-ptx
+		;;
+	*-crds)
+		os=-unos
+		;;
+	*-ns)
+		os=-genix
+		;;
+	i370-*)
+		os=-mvs
+		;;
+	*-next)
+		os=-nextstep3
+		;;
+	*-gould)
+		os=-sysv
+		;;
+	*-highlevel)
+		os=-bsd
+		;;
+	*-encore)
+		os=-bsd
+		;;
+	*-sgi)
+		os=-irix
+		;;
+	*-siemens)
+		os=-sysv4
+		;;
+	*-masscomp)
+		os=-rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=-uxpv
+		;;
+	*-rom68k)
+		os=-coff
+		;;
+	*-*bug)
+		os=-coff
+		;;
+	*-apple)
+		os=-macos
+		;;
+	*-atari*)
+		os=-mint
+		;;
+	*)
+		os=-none
+		;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+	*-unknown)
+		case $os in
+			-riscix*)
+				vendor=acorn
+				;;
+			-sunos*)
+				vendor=sun
+				;;
+			-cnk*|-aix*)
+				vendor=ibm
+				;;
+			-beos*)
+				vendor=be
+				;;
+			-hpux*)
+				vendor=hp
+				;;
+			-mpeix*)
+				vendor=hp
+				;;
+			-hiux*)
+				vendor=hitachi
+				;;
+			-unos*)
+				vendor=crds
+				;;
+			-dgux*)
+				vendor=dg
+				;;
+			-luna*)
+				vendor=omron
+				;;
+			-genix*)
+				vendor=ns
+				;;
+			-mvs* | -opened*)
+				vendor=ibm
+				;;
+			-os400*)
+				vendor=ibm
+				;;
+			-ptx*)
+				vendor=sequent
+				;;
+			-tpf*)
+				vendor=ibm
+				;;
+			-vxsim* | -vxworks* | -windiss*)
+				vendor=wrs
+				;;
+			-aux*)
+				vendor=apple
+				;;
+			-hms*)
+				vendor=hitachi
+				;;
+			-mpw* | -macos*)
+				vendor=apple
+				;;
+			-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+				vendor=atari
+				;;
+			-vos*)
+				vendor=stratus
+				;;
+		esac
+		basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+		;;
+esac
+
+echo $basic_machine$os
+exit
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/depcomp b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/depcomp
new file mode 100755
index 0000000..4ebd5b3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/depcomp
@@ -0,0 +1,791 @@
+#! /bin/sh
+# depcomp - compile a program generating dependencies as side-effects
+
+scriptversion=2013-05-30.07; # UTC
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
+
+case $1 in
+  '')
+    echo "$0: No command.  Try '$0 --help' for more information." 1>&2
+    exit 1;
+    ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: depcomp [--help] [--version] PROGRAM [ARGS]
+
+Run PROGRAMS ARGS to compile a file, generating dependencies
+as side-effects.
+
+Environment variables:
+  depmode     Dependency tracking mode.
+  source      Source file read by 'PROGRAMS ARGS'.
+  object      Object file output by 'PROGRAMS ARGS'.
+  DEPDIR      directory where to store dependencies.
+  depfile     Dependency file to output.
+  tmpdepfile  Temporary file to use when outputting dependencies.
+  libtool     Whether libtool is used (yes/no).
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "depcomp $scriptversion"
+    exit $?
+    ;;
+esac
+
+# Get the directory component of the given path, and save it in the
+# global variables '$dir'.  Note that this directory component will
+# be either empty or ending with a '/' character.  This is deliberate.
+set_dir_from ()
+{
+  case $1 in
+    */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
+      *) dir=;;
+  esac
+}
+
+# Get the suffix-stripped basename of the given path, and save it the
+# global variable '$base'.
+set_base_from ()
+{
+  base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
+}
+
+# If no dependency file was actually created by the compiler invocation,
+# we still have to create a dummy depfile, to avoid errors with the
+# Makefile "include basename.Plo" scheme.
+make_dummy_depfile ()
+{
+  echo "#dummy" > "$depfile"
+}
+
+# Factor out some common post-processing of the generated depfile.
+# Requires the auxiliary global variable '$tmpdepfile' to be set.
+aix_post_process_depfile ()
+{
+  # If the compiler actually managed to produce a dependency file,
+  # post-process it.
+  if test -f "$tmpdepfile"; then
+    # Each line is of the form 'foo.o: dependency.h'.
+    # Do two passes, one to just change these to
+    #   $object: dependency.h
+    # and one to simply output
+    #   dependency.h:
+    # which is needed to avoid the deleted-header problem.
+    { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
+      sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
+    } > "$depfile"
+    rm -f "$tmpdepfile"
+  else
+    make_dummy_depfile
+  fi
+}
+
+# A tabulation character.
+tab='	'
+# A newline character.
+nl='
+'
+# Character ranges might be problematic outside the C locale.
+# These definitions help.
+upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
+lower=abcdefghijklmnopqrstuvwxyz
+digits=0123456789
+alpha=${upper}${lower}
+
+if test -z "$depmode" || test -z "$source" || test -z "$object"; then
+  echo "depcomp: Variables source, object and depmode must be set" 1>&2
+  exit 1
+fi
+
+# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
+depfile=${depfile-`echo "$object" |
+  sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
+tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
+
+rm -f "$tmpdepfile"
+
+# Avoid interferences from the environment.
+gccflag= dashmflag=
+
+# Some modes work just like other modes, but use different flags.  We
+# parameterize here, but still list the modes in the big case below,
+# to make depend.m4 easier to write.  Note that we *cannot* use a case
+# here, because this file can only contain one case statement.
+if test "$depmode" = hp; then
+  # HP compiler uses -M and no extra arg.
+  gccflag=-M
+  depmode=gcc
+fi
+
+if test "$depmode" = dashXmstdout; then
+  # This is just like dashmstdout with a different argument.
+  dashmflag=-xM
+  depmode=dashmstdout
+fi
+
+cygpath_u="cygpath -u -f -"
+if test "$depmode" = msvcmsys; then
+  # This is just like msvisualcpp but w/o cygpath translation.
+  # Just convert the backslash-escaped backslashes to single forward
+  # slashes to satisfy depend.m4
+  cygpath_u='sed s,\\\\,/,g'
+  depmode=msvisualcpp
+fi
+
+if test "$depmode" = msvc7msys; then
+  # This is just like msvc7 but w/o cygpath translation.
+  # Just convert the backslash-escaped backslashes to single forward
+  # slashes to satisfy depend.m4
+  cygpath_u='sed s,\\\\,/,g'
+  depmode=msvc7
+fi
+
+if test "$depmode" = xlc; then
+  # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
+  gccflag=-qmakedep=gcc,-MF
+  depmode=gcc
+fi
+
+case "$depmode" in
+gcc3)
+## gcc 3 implements dependency tracking that does exactly what
+## we want.  Yay!  Note: for some reason libtool 1.4 doesn't like
+## it if -MD -MP comes after the -MF stuff.  Hmm.
+## Unfortunately, FreeBSD c89 acceptance of flags depends upon
+## the command line argument order; so add the flags where they
+## appear in depend2.am.  Note that the slowdown incurred here
+## affects only configure: in makefiles, %FASTDEP% shortcuts this.
+  for arg
+  do
+    case $arg in
+    -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
+    *)  set fnord "$@" "$arg" ;;
+    esac
+    shift # fnord
+    shift # $arg
+  done
+  "$@"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  mv "$tmpdepfile" "$depfile"
+  ;;
+
+gcc)
+## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
+## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
+## (see the conditional assignment to $gccflag above).
+## There are various ways to get dependency output from gcc.  Here's
+## why we pick this rather obscure method:
+## - Don't want to use -MD because we'd like the dependencies to end
+##   up in a subdir.  Having to rename by hand is ugly.
+##   (We might end up doing this anyway to support other compilers.)
+## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
+##   -MM, not -M (despite what the docs say).  Also, it might not be
+##   supported by the other compilers which use the 'gcc' depmode.
+## - Using -M directly means running the compiler twice (even worse
+##   than renaming).
+  if test -z "$gccflag"; then
+    gccflag=-MD,
+  fi
+  "$@" -Wp,"$gccflag$tmpdepfile"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  # The second -e expression handles DOS-style file names with drive
+  # letters.
+  sed -e 's/^[^:]*: / /' \
+      -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
+## This next piece of magic avoids the "deleted header file" problem.
+## The problem is that when a header file which appears in a .P file
+## is deleted, the dependency causes make to die (because there is
+## typically no way to rebuild the header).  We avoid this by adding
+## dummy dependencies for each header file.  Too bad gcc doesn't do
+## this for us directly.
+## Some versions of gcc put a space before the ':'.  On the theory
+## that the space means something, we add a space to the output as
+## well.  hp depmode also adds that space, but also prefixes the VPATH
+## to the object.  Take care to not repeat it in the output.
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+  tr ' ' "$nl" < "$tmpdepfile" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+sgi)
+  if test "$libtool" = yes; then
+    "$@" "-Wp,-MDupdate,$tmpdepfile"
+  else
+    "$@" -MDupdate "$tmpdepfile"
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+
+  if test -f "$tmpdepfile"; then  # yes, the sourcefile depend on other files
+    echo "$object : \\" > "$depfile"
+    # Clip off the initial element (the dependent).  Don't try to be
+    # clever and replace this with sed code, as IRIX sed won't handle
+    # lines with more than a fixed number of characters (4096 in
+    # IRIX 6.2 sed, 8192 in IRIX 6.5).  We also remove comment lines;
+    # the IRIX cc adds comments like '#:fec' to the end of the
+    # dependency line.
+    tr ' ' "$nl" < "$tmpdepfile" \
+      | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
+      | tr "$nl" ' ' >> "$depfile"
+    echo >> "$depfile"
+    # The second pass generates a dummy entry for each header file.
+    tr ' ' "$nl" < "$tmpdepfile" \
+      | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
+      >> "$depfile"
+  else
+    make_dummy_depfile
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+xlc)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+aix)
+  # The C for AIX Compiler uses -M and outputs the dependencies
+  # in a .u file.  In older versions, this file always lives in the
+  # current directory.  Also, the AIX compiler puts '$object:' at the
+  # start of each line; $object doesn't have directory information.
+  # Version 6 uses the directory in both cases.
+  set_dir_from "$object"
+  set_base_from "$object"
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$base.u
+    tmpdepfile3=$dir.libs/$base.u
+    "$@" -Wc,-M
+  else
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$dir$base.u
+    tmpdepfile3=$dir$base.u
+    "$@" -M
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+    exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  aix_post_process_depfile
+  ;;
+
+tcc)
+  # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
+  # FIXME: That version still under development at the moment of writing.
+  #        Make that this statement remains true also for stable, released
+  #        versions.
+  # It will wrap lines (doesn't matter whether long or short) with a
+  # trailing '\', as in:
+  #
+  #   foo.o : \
+  #    foo.c \
+  #    foo.h \
+  #
+  # It will put a trailing '\' even on the last line, and will use leading
+  # spaces rather than leading tabs (at least since its commit 0394caf7
+  # "Emit spaces for -MD").
+  "$@" -MD -MF "$tmpdepfile"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
+  # We have to change lines of the first kind to '$object: \'.
+  sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
+  # And for each line of the second kind, we have to emit a 'dep.h:'
+  # dummy dependency, to avoid the deleted-header problem.
+  sed -n -e 's|^  *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+## The order of this option in the case statement is important, since the
+## shell code in configure will try each of these formats in the order
+## listed in this file.  A plain '-MD' option would be understood by many
+## compilers, so we must ensure this comes after the gcc and icc options.
+pgcc)
+  # Portland's C compiler understands '-MD'.
+  # Will always output deps to 'file.d' where file is the root name of the
+  # source file under compilation, even if file resides in a subdirectory.
+  # The object file name does not affect the name of the '.d' file.
+  # pgcc 10.2 will output
+  #    foo.o: sub/foo.c sub/foo.h
+  # and will wrap long lines using '\' :
+  #    foo.o: sub/foo.c ... \
+  #     sub/foo.h ... \
+  #     ...
+  set_dir_from "$object"
+  # Use the source, not the object, to determine the base name, since
+  # that's sadly what pgcc will do too.
+  set_base_from "$source"
+  tmpdepfile=$base.d
+
+  # For projects that build the same source file twice into different object
+  # files, the pgcc approach of using the *source* file root name can cause
+  # problems in parallel builds.  Use a locking strategy to avoid stomping on
+  # the same $tmpdepfile.
+  lockdir=$base.d-lock
+  trap "
+    echo '$0: caught signal, cleaning up...' >&2
+    rmdir '$lockdir'
+    exit 1
+  " 1 2 13 15
+  numtries=100
+  i=$numtries
+  while test $i -gt 0; do
+    # mkdir is a portable test-and-set.
+    if mkdir "$lockdir" 2>/dev/null; then
+      # This process acquired the lock.
+      "$@" -MD
+      stat=$?
+      # Release the lock.
+      rmdir "$lockdir"
+      break
+    else
+      # If the lock is being held by a different process, wait
+      # until the winning process is done or we timeout.
+      while test -d "$lockdir" && test $i -gt 0; do
+        sleep 1
+        i=`expr $i - 1`
+      done
+    fi
+    i=`expr $i - 1`
+  done
+  trap - 1 2 13 15
+  if test $i -le 0; then
+    echo "$0: failed to acquire lock after $numtries attempts" >&2
+    echo "$0: check lockdir '$lockdir'" >&2
+    exit 1
+  fi
+
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each line is of the form `foo.o: dependent.h',
+  # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
+  # Do two passes, one to just change these to
+  # `$object: dependent.h' and one to simply `dependent.h:'.
+  sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp2)
+  # The "hp" stanza above does not work with aCC (C++) and HP's ia64
+  # compilers, which have integrated preprocessors.  The correct option
+  # to use with these is +Maked; it writes dependencies to a file named
+  # 'foo.d', which lands next to the object file, wherever that
+  # happens to be.
+  # Much of this is similar to the tru64 case; see comments there.
+  set_dir_from  "$object"
+  set_base_from "$object"
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir.libs/$base.d
+    "$@" -Wc,+Maked
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    "$@" +Maked
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+     rm -f "$tmpdepfile1" "$tmpdepfile2"
+     exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  if test -f "$tmpdepfile"; then
+    sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
+    # Add 'dependent.h:' lines.
+    sed -ne '2,${
+               s/^ *//
+               s/ \\*$//
+               s/$/:/
+               p
+             }' "$tmpdepfile" >> "$depfile"
+  else
+    make_dummy_depfile
+  fi
+  rm -f "$tmpdepfile" "$tmpdepfile2"
+  ;;
+
+tru64)
+  # The Tru64 compiler uses -MD to generate dependencies as a side
+  # effect.  'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
+  # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
+  # dependencies in 'foo.d' instead, so we check for that too.
+  # Subdirectories are respected.
+  set_dir_from  "$object"
+  set_base_from "$object"
+
+  if test "$libtool" = yes; then
+    # Libtool generates 2 separate objects for the 2 libraries.  These
+    # two compilations output dependencies in $dir.libs/$base.o.d and
+    # in $dir$base.o.d.  We have to check for both files, because
+    # one of the two compilations can be disabled.  We should prefer
+    # $dir$base.o.d over $dir.libs/$base.o.d because the latter is
+    # automatically cleaned when .libs/ is deleted, while ignoring
+    # the former would cause a distcleancheck panic.
+    tmpdepfile1=$dir$base.o.d          # libtool 1.5
+    tmpdepfile2=$dir.libs/$base.o.d    # Likewise.
+    tmpdepfile3=$dir.libs/$base.d      # Compaq CCC V6.2-504
+    "$@" -Wc,-MD
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    tmpdepfile3=$dir$base.d
+    "$@" -MD
+  fi
+
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+    exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  # Same post-processing that is required for AIX mode.
+  aix_post_process_depfile
+  ;;
+
+msvc7)
+  if test "$libtool" = yes; then
+    showIncludes=-Wc,-showIncludes
+  else
+    showIncludes=-showIncludes
+  fi
+  "$@" $showIncludes > "$tmpdepfile"
+  stat=$?
+  grep -v '^Note: including file: ' "$tmpdepfile"
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  # The first sed program below extracts the file names and escapes
+  # backslashes for cygpath.  The second sed program outputs the file
+  # name when reading, but also accumulates all include files in the
+  # hold buffer in order to output them again at the end.  This only
+  # works with sed implementations that can handle large buffers.
+  sed < "$tmpdepfile" -n '
+/^Note: including file:  *\(.*\)/ {
+  s//\1/
+  s/\\/\\\\/g
+  p
+}' | $cygpath_u | sort -u | sed -n '
+s/ /\\ /g
+s/\(.*\)/'"$tab"'\1 \\/p
+s/.\(.*\) \\/\1:/
+H
+$ {
+  s/.*/'"$tab"'/
+  G
+  p
+}' >> "$depfile"
+  echo >> "$depfile" # make sure the fragment doesn't end with a backslash
+  rm -f "$tmpdepfile"
+  ;;
+
+msvc7msys)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+#nosideeffect)
+  # This comment above is used by automake to tell side-effect
+  # dependency tracking mechanisms from slower ones.
+
+dashmstdout)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout, regardless of -o.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove '-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  test -z "$dashmflag" && dashmflag=-M
+  # Require at least two characters before searching for ':'
+  # in the target name.  This is to cope with DOS-style filenames:
+  # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
+  "$@" $dashmflag |
+    sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this sed invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  tr ' ' "$nl" < "$tmpdepfile" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+dashXmstdout)
+  # This case only exists to satisfy depend.m4.  It is never actually
+  # run, as this mode is specially recognized in the preamble.
+  exit 1
+  ;;
+
+makedepend)
+  "$@" || exit $?
+  # Remove any Libtool call
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+  # X makedepend
+  shift
+  cleared=no eat=no
+  for arg
+  do
+    case $cleared in
+    no)
+      set ""; shift
+      cleared=yes ;;
+    esac
+    if test $eat = yes; then
+      eat=no
+      continue
+    fi
+    case "$arg" in
+    -D*|-I*)
+      set fnord "$@" "$arg"; shift ;;
+    # Strip any option that makedepend may not understand.  Remove
+    # the object too, otherwise makedepend will parse it as a source file.
+    -arch)
+      eat=yes ;;
+    -*|$object)
+      ;;
+    *)
+      set fnord "$@" "$arg"; shift ;;
+    esac
+  done
+  obj_suffix=`echo "$object" | sed 's/^.*\././'`
+  touch "$tmpdepfile"
+  ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
+  rm -f "$depfile"
+  # makedepend may prepend the VPATH from the source file name to the object.
+  # No need to regex-escape $object, excess matching of '.' is harmless.
+  sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process the last invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed '1,2d' "$tmpdepfile" \
+    | tr ' ' "$nl" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile" "$tmpdepfile".bak
+  ;;
+
+cpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove '-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  "$@" -E \
+    | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+             -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+    | sed '$ s: \\$::' > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  cat < "$tmpdepfile" >> "$depfile"
+  sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvisualcpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  IFS=" "
+  for arg
+  do
+    case "$arg" in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
+        set fnord "$@"
+        shift
+        shift
+        ;;
+    *)
+        set fnord "$@" "$arg"
+        shift
+        shift
+        ;;
+    esac
+  done
+  "$@" -E 2>/dev/null |
+  sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
+  echo "$tab" >> "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvcmsys)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+none)
+  exec "$@"
+  ;;
+
+*)
+  echo "Unknown depmode $depmode" 1>&2
+  exit 1
+  ;;
+esac
+
+exit 0
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/install-sh b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/install-sh
new file mode 100755
index 0000000..a9244eb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/install-sh
@@ -0,0 +1,527 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2011-01-19.21; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# `make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+nl='
+'
+IFS=" ""	$nl"
+
+# set DOITPROG to echo to test this script
+
+# Don't use :- since 4.3BSD and earlier shells don't like it.
+doit=${DOITPROG-}
+if test -z "$doit"; then
+  doit_exec=exec
+else
+  doit_exec=$doit
+fi
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_glob='?'
+initialize_posix_glob='
+  test "$posix_glob" != "?" || {
+    if (set -f) 2>/dev/null; then
+      posix_glob=
+    else
+      posix_glob=:
+    fi
+  }
+'
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+no_target_directory=
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+   or: $0 [OPTION]... SRCFILES... DIRECTORY
+   or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+   or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+     --help     display this help and exit.
+     --version  display version info and exit.
+
+  -c            (ignored)
+  -C            install only if different (preserve the last data modification time)
+  -d            create directories instead of installing files.
+  -g GROUP      $chgrpprog installed files to GROUP.
+  -m MODE       $chmodprog installed files to MODE.
+  -o USER       $chownprog installed files to USER.
+  -s            $stripprog installed files.
+  -t DIRECTORY  install into DIRECTORY.
+  -T            report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+  CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+  RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+  case $1 in
+    -c) ;;
+
+    -C) copy_on_change=true;;
+
+    -d) dir_arg=true;;
+
+    -g) chgrpcmd="$chgrpprog $2"
+	shift;;
+
+    --help) echo "$usage"; exit $?;;
+
+    -m) mode=$2
+	case $mode in
+	  *' '* | *'	'* | *'
+'*	  | *'*'* | *'?'* | *'['*)
+	    echo "$0: invalid mode: $mode" >&2
+	    exit 1;;
+	esac
+	shift;;
+
+    -o) chowncmd="$chownprog $2"
+	shift;;
+
+    -s) stripcmd=$stripprog;;
+
+    -t) dst_arg=$2
+	# Protect names problematic for `test' and other utilities.
+	case $dst_arg in
+	  -* | [=\(\)!]) dst_arg=./$dst_arg;;
+	esac
+	shift;;
+
+    -T) no_target_directory=true;;
+
+    --version) echo "$0 $scriptversion"; exit $?;;
+
+    --)	shift
+	break;;
+
+    -*)	echo "$0: invalid option: $1" >&2
+	exit 1;;
+
+    *)  break;;
+  esac
+  shift
+done
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+  # When -d is used, all remaining arguments are directories to create.
+  # When -t is used, the destination is already specified.
+  # Otherwise, the last argument is the destination.  Remove it from $@.
+  for arg
+  do
+    if test -n "$dst_arg"; then
+      # $@ is not empty: it contains at least $arg.
+      set fnord "$@" "$dst_arg"
+      shift # fnord
+    fi
+    shift # arg
+    dst_arg=$arg
+    # Protect names problematic for `test' and other utilities.
+    case $dst_arg in
+      -* | [=\(\)!]) dst_arg=./$dst_arg;;
+    esac
+  done
+fi
+
+if test $# -eq 0; then
+  if test -z "$dir_arg"; then
+    echo "$0: no input file specified." >&2
+    exit 1
+  fi
+  # It's OK to call `install-sh -d' without argument.
+  # This can happen when creating conditional directories.
+  exit 0
+fi
+
+if test -z "$dir_arg"; then
+  do_exit='(exit $ret); exit $ret'
+  trap "ret=129; $do_exit" 1
+  trap "ret=130; $do_exit" 2
+  trap "ret=141; $do_exit" 13
+  trap "ret=143; $do_exit" 15
+
+  # Set umask so as not to create temps with too-generous modes.
+  # However, 'strip' requires both read and write access to temps.
+  case $mode in
+    # Optimize common cases.
+    *644) cp_umask=133;;
+    *755) cp_umask=22;;
+
+    *[0-7])
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw='% 200'
+      fi
+      cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+    *)
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw=,u+rw
+      fi
+      cp_umask=$mode$u_plus_rw;;
+  esac
+fi
+
+for src
+do
+  # Protect names problematic for `test' and other utilities.
+  case $src in
+    -* | [=\(\)!]) src=./$src;;
+  esac
+
+  if test -n "$dir_arg"; then
+    dst=$src
+    dstdir=$dst
+    test -d "$dstdir"
+    dstdir_status=$?
+  else
+
+    # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+    # might cause directories to be created, which would be especially bad
+    # if $src (and thus $dsttmp) contains '*'.
+    if test ! -f "$src" && test ! -d "$src"; then
+      echo "$0: $src does not exist." >&2
+      exit 1
+    fi
+
+    if test -z "$dst_arg"; then
+      echo "$0: no destination specified." >&2
+      exit 1
+    fi
+    dst=$dst_arg
+
+    # If destination is a directory, append the input filename; won't work
+    # if double slashes aren't ignored.
+    if test -d "$dst"; then
+      if test -n "$no_target_directory"; then
+	echo "$0: $dst_arg: Is a directory" >&2
+	exit 1
+      fi
+      dstdir=$dst
+      dst=$dstdir/`basename "$src"`
+      dstdir_status=0
+    else
+      # Prefer dirname, but fall back on a substitute if dirname fails.
+      dstdir=`
+	(dirname "$dst") 2>/dev/null ||
+	expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	     X"$dst" : 'X\(//\)[^/]' \| \
+	     X"$dst" : 'X\(//\)$' \| \
+	     X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
+	echo X"$dst" |
+	    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)[^/].*/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\).*/{
+		   s//\1/
+		   q
+		 }
+		 s/.*/./; q'
+      `
+
+      test -d "$dstdir"
+      dstdir_status=$?
+    fi
+  fi
+
+  obsolete_mkdir_used=false
+
+  if test $dstdir_status != 0; then
+    case $posix_mkdir in
+      '')
+	# Create intermediate dirs using mode 755 as modified by the umask.
+	# This is like FreeBSD 'install' as of 1997-10-28.
+	umask=`umask`
+	case $stripcmd.$umask in
+	  # Optimize common cases.
+	  *[2367][2367]) mkdir_umask=$umask;;
+	  .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+	  *[0-7])
+	    mkdir_umask=`expr $umask + 22 \
+	      - $umask % 100 % 40 + $umask % 20 \
+	      - $umask % 10 % 4 + $umask % 2
+	    `;;
+	  *) mkdir_umask=$umask,go-w;;
+	esac
+
+	# With -d, create the new directory with the user-specified mode.
+	# Otherwise, rely on $mkdir_umask.
+	if test -n "$dir_arg"; then
+	  mkdir_mode=-m$mode
+	else
+	  mkdir_mode=
+	fi
+
+	posix_mkdir=false
+	case $umask in
+	  *[123567][0-7][0-7])
+	    # POSIX mkdir -p sets u+wx bits regardless of umask, which
+	    # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+	    ;;
+	  *)
+	    tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+	    trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+	    if (umask $mkdir_umask &&
+		exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
+	    then
+	      if test -z "$dir_arg" || {
+		   # Check for POSIX incompatibilities with -m.
+		   # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+		   # other-writeable bit of parent directory when it shouldn't.
+		   # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+		   ls_ld_tmpdir=`ls -ld "$tmpdir"`
+		   case $ls_ld_tmpdir in
+		     d????-?r-*) different_mode=700;;
+		     d????-?--*) different_mode=755;;
+		     *) false;;
+		   esac &&
+		   $mkdirprog -m$different_mode -p -- "$tmpdir" && {
+		     ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
+		     test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+		   }
+		 }
+	      then posix_mkdir=:
+	      fi
+	      rmdir "$tmpdir/d" "$tmpdir"
+	    else
+	      # Remove any dirs left behind by ancient mkdir implementations.
+	      rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
+	    fi
+	    trap '' 0;;
+	esac;;
+    esac
+
+    if
+      $posix_mkdir && (
+	umask $mkdir_umask &&
+	$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+      )
+    then :
+    else
+
+      # The umask is ridiculous, or mkdir does not conform to POSIX,
+      # or it failed possibly due to a race condition.  Create the
+      # directory the slow way, step by step, checking for races as we go.
+
+      case $dstdir in
+	/*) prefix='/';;
+	[-=\(\)!]*) prefix='./';;
+	*)  prefix='';;
+      esac
+
+      eval "$initialize_posix_glob"
+
+      oIFS=$IFS
+      IFS=/
+      $posix_glob set -f
+      set fnord $dstdir
+      shift
+      $posix_glob set +f
+      IFS=$oIFS
+
+      prefixes=
+
+      for d
+      do
+	test X"$d" = X && continue
+
+	prefix=$prefix$d
+	if test -d "$prefix"; then
+	  prefixes=
+	else
+	  if $posix_mkdir; then
+	    (umask=$mkdir_umask &&
+	     $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+	    # Don't fail if two instances are running concurrently.
+	    test -d "$prefix" || exit 1
+	  else
+	    case $prefix in
+	      *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+	      *) qprefix=$prefix;;
+	    esac
+	    prefixes="$prefixes '$qprefix'"
+	  fi
+	fi
+	prefix=$prefix/
+      done
+
+      if test -n "$prefixes"; then
+	# Don't fail if two instances are running concurrently.
+	(umask $mkdir_umask &&
+	 eval "\$doit_exec \$mkdirprog $prefixes") ||
+	  test -d "$dstdir" || exit 1
+	obsolete_mkdir_used=true
+      fi
+    fi
+  fi
+
+  if test -n "$dir_arg"; then
+    { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+    { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+      test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+  else
+
+    # Make a couple of temp file names in the proper directory.
+    dsttmp=$dstdir/_inst.$$_
+    rmtmp=$dstdir/_rm.$$_
+
+    # Trap to clean up those temp files at exit.
+    trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+    # Copy the file name to the temp name.
+    (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+    # and set any options; do chmod last to preserve setuid bits.
+    #
+    # If any of these fail, we abort the whole thing.  If we want to
+    # ignore errors from any of these, just make sure not to ignore
+    # errors from the above "$doit $cpprog $src $dsttmp" command.
+    #
+    { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+    { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+    { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+    # If -C, don't bother to copy if it wouldn't change the file.
+    if $copy_on_change &&
+       old=`LC_ALL=C ls -dlL "$dst"	2>/dev/null` &&
+       new=`LC_ALL=C ls -dlL "$dsttmp"	2>/dev/null` &&
+
+       eval "$initialize_posix_glob" &&
+       $posix_glob set -f &&
+       set X $old && old=:$2:$4:$5:$6 &&
+       set X $new && new=:$2:$4:$5:$6 &&
+       $posix_glob set +f &&
+
+       test "$old" = "$new" &&
+       $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+    then
+      rm -f "$dsttmp"
+    else
+      # Rename the file to the real destination.
+      $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+      # The rename failed, perhaps because mv can't rename something else
+      # to itself, or perhaps because mv is so ancient that it does not
+      # support -f.
+      {
+	# Now remove or move aside any old file at destination location.
+	# We try this two ways since rm can't unlink itself on some
+	# systems and the destination file might be busy for other
+	# reasons.  In this case, the final cleanup might fail but the new
+	# file should still install successfully.
+	{
+	  test ! -f "$dst" ||
+	  $doit $rmcmd -f "$dst" 2>/dev/null ||
+	  { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+	    { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
+	  } ||
+	  { echo "$0: cannot unlink or rename $dst" >&2
+	    (exit 1); exit 1
+	  }
+	} &&
+
+	# Now rename the file to the real destination.
+	$doit $mvcmd "$dsttmp" "$dst"
+      }
+    fi || exit 1
+
+    trap '' 0
+  fi
+done
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ltmain.sh b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ltmain.sh
new file mode 100644
index 0000000..63ae69d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/ltmain.sh
@@ -0,0 +1,9655 @@
+
+# libtool (GNU libtool) 2.4.2
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --no-warn            don't display warning messages
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.4.2
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.2
+TIMESTAMP=""
+package_revision=1.3337
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+    eval "${1}=\$${1}\${2}"
+} # func_append may be replaced by extended shell implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+    func_quote_for_eval "${2}"
+    eval "${1}=\$${1}\\ \$func_quote_for_eval_result"
+} # func_append_quoted may be replaced by extended shell implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+    func_arith_result=`expr "${@}"`
+} # func_arith may be replaced by extended shell implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+    func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len`
+} # func_len may be replaced by extended shell implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+    func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+} # func_lo2o may be replaced by extended shell implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+    func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+} # func_xform may be replaced by extended shell implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+  shift; set dummy --mode clean ${1+"$@"}; shift
+  ;;
+compile|compil|compi|comp|com|co|c)
+  shift; set dummy --mode compile ${1+"$@"}; shift
+  ;;
+execute|execut|execu|exec|exe|ex|e)
+  shift; set dummy --mode execute ${1+"$@"}; shift
+  ;;
+finish|finis|fini|fin|fi|f)
+  shift; set dummy --mode finish ${1+"$@"}; shift
+  ;;
+install|instal|insta|inst|ins|in|i)
+  shift; set dummy --mode install ${1+"$@"}; shift
+  ;;
+link|lin|li|l)
+  shift; set dummy --mode link ${1+"$@"}; shift
+  ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+  shift; set dummy --mode uninstall ${1+"$@"}; shift
+  ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_warning=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+			;;
+      --config)
+			opt_config=:
+func_config
+			;;
+      --dlopen|-dlopen)
+			optarg="$1"
+			opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+			shift
+			;;
+      --preserve-dup-deps)
+			opt_preserve_dup_deps=:
+			;;
+      --features)
+			opt_features=:
+func_features
+			;;
+      --finish)
+			opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+			;;
+      --help)
+			opt_help=:
+			;;
+      --help-all)
+			opt_help_all=:
+opt_help=': help-all'
+			;;
+      --mode)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_mode="$optarg"
+case $optarg in
+  # Valid mode arguments:
+  clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+  # Catch anything else as an error
+  *) func_error "invalid argument for $opt"
+     exit_cmd=exit
+     break
+     ;;
+esac
+			shift
+			;;
+      --no-silent|--no-quiet)
+			opt_silent=false
+func_append preserve_args " $opt"
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+func_append preserve_args " $opt"
+			;;
+      --no-verbose)
+			opt_verbose=false
+func_append preserve_args " $opt"
+			;;
+      --silent|--quiet)
+			opt_silent=:
+func_append preserve_args " $opt"
+        opt_verbose=false
+			;;
+      --verbose|-v)
+			opt_verbose=:
+func_append preserve_args " $opt"
+opt_silent=false
+			;;
+      --tag)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_tag="$optarg"
+func_append preserve_args " $opt $optarg"
+func_enable_tag "$optarg"
+			shift
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-n*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # save first non-option argument
+  if test "$#" -gt 0; then
+    nonopt="$opt"
+    shift
+  fi
+
+  # preserve --debug
+  test "$opt_debug" = : || func_append preserve_args " --debug"
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+      ;;
+  esac
+
+  $opt_help || {
+    # Sanity checks first:
+    func_check_version_match
+
+    if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+      func_fatal_configuration "not configured to build any kind of library"
+    fi
+
+    # Darwin sucks
+    eval std_shrext=\"$shrext_cmds\"
+
+    # Only execute mode is allowed to have -dlopen flags.
+    if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+      func_error "unrecognized option \`-dlopen'"
+      $ECHO "$help" 1>&2
+      exit $EXIT_FAILURE
+    fi
+
+    # Change the help message to a mode-specific one.
+    generic_help="$help"
+    help="Try \`$progname --help --mode=$opt_mode' for more information."
+  }
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_dirname_and_basename "$1" "" "."
+    func_stripname '' '.exe' "$func_basename_result"
+    func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot.  Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+  func_resolve_sysroot_result=$1
+  case $func_resolve_sysroot_result in
+  =*)
+    func_stripname '=' '' "$func_resolve_sysroot_result"
+    func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+    ;;
+  esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+  case "$lt_sysroot:$1" in
+  ?*:"$lt_sysroot"*)
+    func_stripname "$lt_sysroot" '' "$1"
+    func_replace_sysroot_result="=$func_stripname_result"
+    ;;
+  *)
+    # Including no sysroot.
+    func_replace_sysroot_result=$1
+    ;;
+  esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+	func_append_quoted CC_quoted "$arg"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_append_quoted CC_quoted "$arg"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+  $opt_debug
+  func_convert_core_file_wine_to_w32_result="$1"
+  if test -n "$1"; then
+    # Unfortunately, winepath does not exit with a non-zero error code, so we
+    # are forced to check the contents of stdout. On the other hand, if the
+    # command is not found, the shell will set an exit code of 127 and print
+    # *an error message* to stdout. So we must check for both error code of
+    # zero AND non-empty stdout, which explains the odd construction:
+    func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+    if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+      func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+        $SED -e "$lt_sed_naive_backslashify"`
+    else
+      func_convert_core_file_wine_to_w32_result=
+    fi
+  fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+  $opt_debug
+  # unfortunately, winepath doesn't convert paths, only file names
+  func_convert_core_path_wine_to_w32_result=""
+  if test -n "$1"; then
+    oldIFS=$IFS
+    IFS=:
+    for func_convert_core_path_wine_to_w32_f in $1; do
+      IFS=$oldIFS
+      func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+      if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+        if test -z "$func_convert_core_path_wine_to_w32_result"; then
+          func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+        else
+          func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+        fi
+      fi
+    done
+    IFS=$oldIFS
+  fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+  $opt_debug
+  if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+    func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+    if test "$?" -ne 0; then
+      # on failure, ensure result is empty
+      func_cygpath_result=
+    fi
+  else
+    func_cygpath_result=
+    func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+  fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format.  Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+  $opt_debug
+  # awkward: cmd appends spaces to result
+  func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+    $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+  $opt_debug
+  if test -z "$2" && test -n "$1" ; then
+    func_error "Could not determine host file name corresponding to"
+    func_error "  \`$1'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback:
+    func_to_host_file_result="$1"
+  fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+  $opt_debug
+  if test -z "$4" && test -n "$3"; then
+    func_error "Could not determine the host path corresponding to"
+    func_error "  \`$3'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback.  This is a deliberately simplistic "conversion" and
+    # should not be "improved".  See libtool.info.
+    if test "x$1" != "x$2"; then
+      lt_replace_pathsep_chars="s|$1|$2|g"
+      func_to_host_path_result=`echo "$3" |
+        $SED -e "$lt_replace_pathsep_chars"`
+    else
+      func_to_host_path_result="$3"
+    fi
+  fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+  $opt_debug
+  case $4 in
+  $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+    ;;
+  esac
+  case $4 in
+  $2 ) func_append func_to_host_path_result "$3"
+    ;;
+  esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+  $opt_debug
+  $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result.  If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+  $opt_debug
+  case ,$2, in
+    *,"$to_tool_file_cmd",*)
+      func_to_tool_file_result=$1
+      ;;
+    *)
+      $to_tool_file_cmd "$1"
+      func_to_tool_file_result=$func_to_host_file_result
+      ;;
+  esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+  func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+    # LT_CYGPATH in this case.
+    func_to_host_file_result=`cygpath -m "$1"`
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format.  Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_file_wine_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_msys_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format.  Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set.  Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+    func_convert_core_file_wine_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format.  If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+#   file name conversion function    : func_convert_file_X_to_Y ()
+#   path conversion function         : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same.  If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+  $opt_debug
+  if test -z "$to_host_path_cmd"; then
+    func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+    to_host_path_cmd="func_convert_path_${func_stripname_result}"
+  fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+  $opt_debug
+  func_init_to_host_path_cmd
+  $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+  func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from ARG.  MSYS
+    # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+    # and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format.  Requires a wine environment and
+# a working winepath.  Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format.  Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set.  Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from
+    # ARG. msys behavior is inconsistent here, cygpath turns them
+    # into '.;' and ';.', and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          func_append pie_flag " $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  func_append later " $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_append_quoted lastarg "$arg"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  func_append base_compile " $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_append_quoted base_compile "$lastarg"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      func_append removelist " $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    func_append removelist " $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+    srcfile=$func_to_tool_file_result
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	func_append command " -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	func_append command " -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      func_append command "$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $opt_mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$opt_mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $opt_dlopen; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  func_append dir "/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_append_quoted args "$file"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libs=
+    libdirs=
+    admincmds=
+
+    for opt in "$nonopt" ${1+"$@"}
+    do
+      if test -d "$opt"; then
+	func_append libdirs " $opt"
+
+      elif test -f "$opt"; then
+	if func_lalib_unsafe_p "$opt"; then
+	  func_append libs " $opt"
+	else
+	  func_warning "\`$opt' is not a valid libtool archive"
+	fi
+
+      else
+	func_fatal_error "invalid argument \`$opt'"
+      fi
+    done
+
+    if test -n "$libs"; then
+      if test -n "$lt_sysroot"; then
+        sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+        sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+      else
+        sysroot_cmd=
+      fi
+
+      # Remove sysroot references
+      if $opt_dry_run; then
+        for lib in $libs; do
+          echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+        done
+      else
+        tmpdir=`func_mktempdir`
+        for lib in $libs; do
+	  sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+	    > $tmpdir/tmp-la
+	  mv -f $tmpdir/tmp-la $lib
+	done
+        ${RM}r "$tmpdir"
+      fi
+    fi
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || func_append admincmds "
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      echo "----------------------------------------------------------------------"
+      echo "Libraries have been installed in:"
+      for libdir in $libdirs; do
+	$ECHO "   $libdir"
+      done
+      echo
+      echo "If you ever happen to want to link against installed libraries"
+      echo "in a given directory, LIBDIR, you must either use libtool, and"
+      echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+      echo "flag during linking and do at least one of the following:"
+      if test -n "$shlibpath_var"; then
+	echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+	echo "     during execution"
+      fi
+      if test -n "$runpath_var"; then
+	echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+	echo "     during linking"
+      fi
+      if test -n "$hardcode_libdir_flag_spec"; then
+	libdir=LIBDIR
+	eval flag=\"$hardcode_libdir_flag_spec\"
+
+	$ECHO "   - use the \`$flag' linker flag"
+      fi
+      if test -n "$admincmds"; then
+	$ECHO "   - have your system administrator run these commands:$admincmds"
+      fi
+      if test -f /etc/ld.so.conf; then
+	echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+      fi
+      echo
+
+      echo "See any operating system documentation about shared libraries for"
+      case $host in
+	solaris2.[6789]|solaris2.1[0-9])
+	  echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	  echo "pages."
+	  ;;
+	*)
+	  echo "more information, such as the ld(1) and ld.so(8) manual pages."
+	  ;;
+      esac
+      echo "----------------------------------------------------------------------"
+    fi
+    exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    func_append install_prog "$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	func_append files " $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      func_append install_prog " $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      func_append install_shared_prog " $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	func_append install_shared_prog " -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	func_append staticlibs " $file"
+	;;
+
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) func_append current_libdirs " $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) func_append future_libdirs " $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	func_append dir "$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && func_append staticlibs " $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+      func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+      tool_oldlib=$func_to_tool_file_result
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+	    func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+	    $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+          case $host in
+	    *cygwin* | *mingw* | *cegcc* )
+	      # if an import library, we need to obtain dlname
+	      if func_win32_import_lib_p "$dlprefile"; then
+	        func_tr_sh "$dlprefile"
+	        eval "curr_lafile=\$libfile_$func_tr_sh_result"
+	        dlprefile_dlbasename=""
+	        if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+	          # Use subshell, to avoid clobbering current variable values
+	          dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+	          if test -n "$dlprefile_dlname" ; then
+	            func_basename "$dlprefile_dlname"
+	            dlprefile_dlbasename="$func_basename_result"
+	          else
+	            # no lafile. user explicitly requested -dlpreopen <import library>.
+	            $sharedlib_from_linklib_cmd "$dlprefile"
+	            dlprefile_dlbasename=$sharedlib_from_linklib_result
+	          fi
+	        fi
+	        $opt_dry_run || {
+	          if test -n "$dlprefile_dlbasename" ; then
+	            eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+	          else
+	            func_warning "Could not compute DLL name from $name"
+	            eval '$ECHO ": $name " >> "$nlist"'
+	          fi
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+	            $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+	        }
+	      else # not an import lib
+	        $opt_dry_run || {
+	          eval '$ECHO ": $name " >> "$nlist"'
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	        }
+	      fi
+	    ;;
+	    *)
+	      $opt_dry_run || {
+	        eval '$ECHO ": $name " >> "$nlist"'
+	        func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	        eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	      }
+	    ;;
+          esac
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) func_append symtab_cflags " $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      func_to_tool_file "$1" func_convert_file_msys_to_w32
+      win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+  $opt_debug
+  sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+  $opt_debug
+  match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+  $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+    $SED '/^Contents of section '"$match_literal"':/{
+      # Place marker at beginning of archive member dllname section
+      s/.*/====MARK====/
+      p
+      d
+    }
+    # These lines can sometimes be longer than 43 characters, but
+    # are always uninteresting
+    /:[	 ]*file format pe[i]\{,1\}-/d
+    /^In archive [^:]*:/d
+    # Ensure marker is printed
+    /^====MARK====/p
+    # Remove all lines with less than 43 characters
+    /^.\{43\}/!d
+    # From remaining lines, remove first 43 characters
+    s/^.\{43\}//' |
+    $SED -n '
+      # Join marker and all lines until next marker into a single line
+      /^====MARK====/ b para
+      H
+      $ b para
+      b
+      :para
+      x
+      s/\n//g
+      # Remove the marker
+      s/^====MARK====//
+      # Remove trailing dots and whitespace
+      s/[\. \t]*$//
+      # Print
+      /./p' |
+    # we now have a list, one entry per line, of the stringified
+    # contents of the appropriate section of all members of the
+    # archive which possess that section. Heuristic: eliminate
+    # all those which have a first or second character that is
+    # a '.' (that is, objdump's representation of an unprintable
+    # character.) This should work for all archives with less than
+    # 0x302f exports -- but will fail for DLLs whose name actually
+    # begins with a literal '.' or a single character followed by
+    # a '.'.
+    #
+    # Of those that remain, print the first one.
+    $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+  test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+  test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+  $opt_debug
+  if func_cygming_gnu_implib_p "$1" ; then
+    # binutils import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+  elif func_cygming_ms_implib_p "$1" ; then
+    # ms-generated import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+  else
+    # unknown
+    sharedlib_from_linklib_result=""
+  fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  case \" \$* \" in
+  *\\ --lt-*)
+    for lt_wr_arg
+    do
+      case \$lt_wr_arg in
+      --lt-*) ;;
+      *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+      esac
+      shift
+    done ;;
+  esac
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# fixup the dll searchpath if we need to.
+	#
+	# Fix the DLL searchpath if we need to.  Do this before prepending
+	# to shlibpath, because on Windows, both are PATH and uninstalled
+	# libraries must come first.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_path "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_path "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  /* Update the DLL searchpath.  EXE_PATH_VALUE ($dllsearchpath) must
+     be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+     because on Windows, both *_VARNAMEs are PATH but uninstalled
+     libraries must come first. */
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+	      $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/  fputs ("\1", f);/p
+g
+D'
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      func_append libtool_args " $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  func_append compile_command " @OUTPUT@"
+	  func_append finalize_command " @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    func_append compile_command " @SYMFILE@"
+	    func_append finalize_command " @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      func_append dlfiles " $arg"
+	    else
+	      func_append dlprefiles " $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) func_append deplibs " $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      func_append moreargs " $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      func_append dlfiles " $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    func_append dlprefiles " $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  func_append libobjs " $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  func_append non_pic_objects " $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  func_append libobjs " $pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) func_append rpath " $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) func_append xrpath " $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  func_append weak_libs " $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  func_append linker_flags " $qarg"
+	  func_append compiler_flags " $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  func_append compiler_flags " $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  func_append linker_flags " $qarg"
+	  func_append compiler_flags " $wl$qarg"
+	  prev=
+	  func_append compile_command " $wl$qarg"
+	  func_append finalize_command " $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  func_append compile_command " $link_static_flag"
+	  func_append finalize_command " $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  func_append compile_command " $arg"
+	  func_append finalize_command " $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname "-L" '' "$arg"
+	if test -z "$func_stripname_result"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	func_resolve_sysroot "$func_stripname_result"
+	dir=$func_resolve_sysroot_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "* | *" $arg "*)
+	  # Will only happen for absolute or sysroot arguments
+	  ;;
+	*)
+	  # Preserve sysroot, but never include relative directories
+	  case $dir in
+	    [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
+	    *) func_append deplibs " -L$dir" ;;
+	  esac
+	  func_append lib_search_path " $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) func_append dllsearchpath ":$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) func_append dllsearchpath ":$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    func_append deplibs " System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	func_append deplibs " $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot|--sysroot)
+	func_append compiler_flags " $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+      |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	func_append compiler_flags " $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) func_append new_inherited_linker_flags " $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	=*)
+	  func_stripname '=' '' "$dir"
+	  dir=$lt_sysroot$func_stripname_result
+	  ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) func_append xrpath " $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  func_append arg " $func_quote_for_eval_result"
+	  func_append compiler_flags " $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  func_append arg " $wl$func_quote_for_eval_result"
+	  func_append compiler_flags " $wl$func_quote_for_eval_result"
+	  func_append linker_flags " $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      # --sysroot=*          for sysroot support
+      # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+      -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        func_append compile_command " $arg"
+        func_append finalize_command " $arg"
+        func_append compiler_flags " $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	func_append objs " $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		func_append dlfiles " $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      func_append dlprefiles " $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    func_append libobjs " $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    func_append non_pic_objects " $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    func_append libobjs " $pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	func_append deplibs " $arg"
+	func_append old_deplibs " $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	func_resolve_sysroot "$arg"
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  func_append dlfiles " $func_resolve_sysroot_result"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  func_append dlprefiles " $func_resolve_sysroot_result"
+	  prev=
+	else
+	  func_append deplibs " $func_resolve_sysroot_result"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      func_append compile_command " $arg"
+      func_append finalize_command " $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    func_to_tool_file "$output_objdir/"
+    tool_output_objdir=$func_to_tool_file_result
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_preserve_dup_deps ; then
+	case "$libs " in
+	*" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	esac
+      fi
+      func_append libs " $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
+	  esac
+	  func_append pre_post_deps " $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  func_resolve_sysroot "$lib"
+	  case $lib in
+	  *.la)	func_source "$func_resolve_sysroot_result" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) func_append deplibs " $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+        |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    func_append compiler_flags " $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) func_append new_inherited_linker_flags " $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) func_append new_inherited_linker_flags " $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    func_append newlib_search_path " $func_resolve_sysroot_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    func_append newlib_search_path " $func_resolve_sysroot_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    dir=$func_resolve_sysroot_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) func_append xrpath " $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la)
+	  func_resolve_sysroot "$deplib"
+	  lib=$func_resolve_sysroot_result
+	  ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      func_append newdlprefiles " $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      func_append newdlfiles " $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && func_append dlfiles " $dlopen"
+	  test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    func_append convenience " $ladir/$objdir/$old_library"
+	    func_append old_convenience " $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	if test -n "$old_library" &&
+	   { test "$prefer_static_libs" = yes ||
+	     test "$prefer_static_libs,$installed" = "built,no"; }; then
+	  linklib=$old_library
+	else
+	  for l in $old_library $library_names; do
+	    linklib="$l"
+	  done
+	fi
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    func_append dlprefiles " $lib $dependency_libs"
+	  else
+	    func_append newdlfiles " $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$lt_sysroot$libdir"
+	    absdir="$lt_sysroot$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    func_append notinst_path " $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    func_append notinst_path " $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  case "$host" in
+	    # special handling for platforms with PE-DLLs.
+	    *cygwin* | *mingw* | *cegcc* )
+	      # Linker will automatically link against shared library if both
+	      # static and shared are present.  Therefore, ensure we extract
+	      # symbols from the import library if a shared library is present
+	      # (otherwise, the dlopen module name will be incorrect).  We do
+	      # this by putting the import library name into $newdlprefiles.
+	      # We recover the dlopen module name by 'saving' the la file
+	      # name in a special purpose variable, and (later) extracting the
+	      # dlname from the la file.
+	      if test -n "$dlname"; then
+	        func_tr_sh "$dir/$linklib"
+	        eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+	        func_append newdlprefiles " $dir/$linklib"
+	      else
+	        func_append newdlprefiles " $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          func_append dlpreconveniencelibs " $dir/$old_library"
+	      fi
+	    ;;
+	    * )
+	      # Prefer using a static library (so that no silly _DYNAMIC symbols
+	      # are required to link).
+	      if test -n "$old_library"; then
+	        func_append newdlprefiles " $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          func_append dlpreconveniencelibs " $dir/$old_library"
+	      # Otherwise, use the dlname, so that lt_dlopen finds it.
+	      elif test -n "$dlname"; then
+	        func_append newdlprefiles " $dir/$dlname"
+	      else
+	        func_append newdlprefiles " $dir/$linklib"
+	      fi
+	    ;;
+	  esac
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  func_append newlib_search_path " $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         func_resolve_sysroot "$func_stripname_result"
+	         func_append newlib_search_path " $func_resolve_sysroot_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) func_append temp_rpath "$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) func_append compile_rpath " $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append finalize_rpath " $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      func_append notinst_deplibs " $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      func_append notinst_deplibs " $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) func_append compile_rpath " $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append finalize_rpath " $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$opt_mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$absdir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      func_append add_dir " -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) func_append compile_shlibpath "$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) func_append finalize_shlibpath "$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$opt_mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) func_append finalize_shlibpath "$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    func_append add_dir " -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) func_append xrpath " $temp_xrpath";;
+		   esac;;
+	      *) func_append temp_deplibs " $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  func_append newlib_search_path " $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    case $deplib in
+              -L*) func_stripname '-L' '' "$deplib"
+                   func_resolve_sysroot "$func_stripname_result";;
+              *) func_resolve_sysroot "$deplib" ;;
+            esac
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $func_resolve_sysroot_result "*)
+                func_append specialdeplibs " $func_resolve_sysroot_result" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $func_resolve_sysroot_result"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_resolve_sysroot "$deplib"
+	        deplib=$func_resolve_sysroot_result
+	        func_dirname "$deplib" "" "."
+		dir=$func_dirname_result
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) func_append lib_search_path " $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) func_append tmp_libs " $deplib" ;;
+	      esac
+	      ;;
+	    *) func_append tmp_libs " $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  func_append tmp_libs " $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      func_append objs "$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  func_append libobjs " $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  # correct linux to gnu/linux during the next big refactor
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux) # correct to gnu/linux during the next big refactor
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  func_append verstring ":${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      func_append libobjs " $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$opt_mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       func_append removelist " $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	func_append oldlibs " $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  func_replace_sysroot "$libdir"
+	  func_append temp_xrpath " -R$func_replace_sysroot_result"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_rpath " $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) func_append dlfiles " $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) func_append dlprefiles " $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    func_append deplibs " System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      func_append deplibs " -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    func_append newdeplibs " $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    func_append newdeplibs " $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		func_append newdeplibs " $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      func_append newdeplibs " $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      func_append newdeplibs " $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		func_append newdeplibs " $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  func_append newdeplibs " $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		if test -n "$file_magic_glob"; then
+		  libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+		else
+		  libnameglob=$libname
+		fi
+		test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  if test "$want_nocaseglob" = yes; then
+		    shopt -s nocaseglob
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		    $nocaseglob
+		  else
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		  fi
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			func_append newdeplibs " $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      func_append newdeplibs " $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  func_append newdeplibs " $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      func_append newdeplibs " $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      func_append newdeplibs " $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    func_append new_libs " -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) func_append new_libs " $deplib" ;;
+	  esac
+	  ;;
+	*) func_append new_libs " $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	# Remove ${wl} instances when linking with ld.
+	# FIXME: should test the right _cmds variable.
+	case $archive_cmds in
+	  *\$LD\ *) wl= ;;
+        esac
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		func_replace_sysroot "$libdir"
+		libdir=$func_replace_sysroot_result
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		func_append dep_rpath " $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append perm_rpath " $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      func_append rpath "$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  func_append linknames " $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  func_append delfiles " $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd1 in $cmds; do
+	      IFS="$save_ifs"
+	      # Take the normal branch if the nm_file_list_spec branch
+	      # doesn't work or if tool conversion is not needed.
+	      case $nm_file_list_spec~$to_tool_file_cmd in
+		*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+		  try_normal_branch=yes
+		  eval cmd=\"$cmd1\"
+		  func_len " $cmd"
+		  len=$func_len_result
+		  ;;
+		*)
+		  try_normal_branch=no
+		  ;;
+	      esac
+	      if test "$try_normal_branch" = yes \
+		 && { test "$len" -lt "$max_cmd_len" \
+		      || test "$max_cmd_len" -le -1; }
+	      then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      elif test -n "$nm_file_list_spec"; then
+		func_basename "$output"
+		output_la=$func_basename_result
+		save_libobjs=$libobjs
+		save_output=$output
+		output=${output_objdir}/${output_la}.nm
+		func_to_tool_file "$output"
+		libobjs=$nm_file_list_spec$func_to_tool_file_result
+		func_append delfiles " $output"
+		func_verbose "creating $NM input file list: $output"
+		for obj in $save_libobjs; do
+		  func_to_tool_file "$obj"
+		  $ECHO "$func_to_tool_file_result"
+		done > "$output"
+		eval cmd=\"$cmd1\"
+		func_show_eval "$cmd" 'exit $?'
+		output=$save_output
+		libobjs=$save_libobjs
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    func_append tmp_deplibs " $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    func_append generated " $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    func_append libobjs " $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  func_append linker_flags " $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    echo ')' >> $output
+	    func_append delfiles " $output"
+	    func_to_tool_file "$output"
+	    output=$func_to_tool_file_result
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    func_append delfiles " $output"
+	    func_to_tool_file "$output"
+	    output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  func_append objlist " $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      func_append delfiles " $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$opt_mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  func_append libobjs " $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$opt_mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # If we're not building shared, we need to use non_pic_objs
+      test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      func_append compile_command " ${wl}-bind_at_load"
+	      func_append finalize_command " ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    func_append new_libs " -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) func_append new_libs " $deplib" ;;
+	  esac
+	  ;;
+	*) func_append new_libs " $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      func_append compile_command " $compile_deplibs"
+      func_append finalize_command " $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_rpath " $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    func_append rpath " $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append perm_rpath " $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) func_append dllsearchpath ":$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) func_append dllsearchpath ":$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    func_append rpath " $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_perm_rpath " $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    func_append rpath "$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    func_append rpath "$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      if test -n "$postlink_cmds"; then
+	func_to_tool_file "$output_objdir/$outputname"
+	postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	func_execute_cmds "$postlink_cmds" 'exit $?'
+      fi
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    func_append oldobjs " $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	func_append generated " $gentop"
+
+	func_extract_archives $gentop $addlibs
+	func_append oldobjs " $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  func_append oldobjs " $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      func_append oldobjs " $gentop/$newobj"
+	      ;;
+	    *) func_append oldobjs " $obj" ;;
+	    esac
+	  done
+	fi
+	func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+	tool_oldlib=$func_to_tool_file_result
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	elif test -n "$archiver_list_spec"; then
+	  func_verbose "using command file archive linking..."
+	  for obj in $oldobjs
+	  do
+	    func_to_tool_file "$obj"
+	    $ECHO "$func_to_tool_file_result"
+	  done > $output_objdir/$libname.libcmd
+	  func_to_tool_file "$output_objdir/$libname.libcmd"
+	  oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    func_append objlist " $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		func_resolve_sysroot "$deplib"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      -L*)
+		func_stripname -L '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		func_append newdependency_libs " -L$func_replace_sysroot_result"
+		;;
+	      -R*)
+		func_stripname -R '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		func_append newdependency_libs " -R$func_replace_sysroot_result"
+		;;
+	      *) func_append newdependency_libs " $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      *) func_append newdlfiles " $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      func_append newdlfiles " $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      func_append newdlprefiles " $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) func_append RM " $arg"; rmforce=yes ;;
+      -*) func_append RM " $arg" ;;
+      *) func_append files " $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	odir="$objdir"
+      else
+	odir="$dir/$objdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$opt_mode" = uninstall && odir="$dir"
+
+      # Remember odir for removal later, being careful to avoid duplicates
+      if test "$opt_mode" = clean; then
+	case " $rmdirs " in
+	  *" $odir "*) ;;
+	  *) func_append rmdirs " $odir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    func_append rmfiles " $odir/$n"
+	  done
+	  test -n "$old_library" && func_append rmfiles " $odir/$old_library"
+
+	  case "$opt_mode" in
+	  clean)
+	    case " $library_names " in
+	    *" $dlname "*) ;;
+	    *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    func_append rmfiles " $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    func_append rmfiles " $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$opt_mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    func_append rmfiles " $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      func_append rmfiles " $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    func_append rmfiles " $odir/$name $odir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      func_append rmfiles " $odir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      func_append rmfiles " $odir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_compiler.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_compiler.m4
new file mode 100644
index 0000000..701ebff
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_compiler.m4
@@ -0,0 +1,108 @@
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+# _AX_CHECK_COMPILER_OPTION_WITH_VAR(language, variable, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - The variable to add the checked compiler option to.
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language to the provided variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_COMPILER_OPTION_WITH_VAR],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG compiler understands $3])
+    SAVE_[]_AC_LANG_PREFIX[]FLAGS=${_AC_LANG_PREFIX[]FLAGS}
+    SAVE_$2=${$2}
+    _AC_LANG_PREFIX[]FLAGS=$3
+    AC_TRY_COMPILE(,[;],AC_MSG_RESULT([yes]); _AC_LANG_PREFIX[]FLAGS="${SAVE_[]_AC_LANG_PREFIX[]FLAGS}"; $2="${SAVE_$2} $3",AC_MSG_RESULT([no]); _AC_LANG_PREFIX[]FLAGS=${SAVE_[]_AC_LANG_PREFIX[]FLAGS}; $2=${SAVE_$2});
+    unset SAVE_[]_AC_LANG_PREFIX[]FLAGS
+    unset SAVE_$2
+    AC_LANG_POP($1)
+])
+
+#
+# _AX_CHECK_COMPILER_OPTION(language, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_COMPILER_OPTION],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG compiler understands $2])
+    SAVE_[]_AC_LANG_PREFIX[]FLAGS=${_AC_LANG_PREFIX[]FLAGS}
+    _AC_LANG_PREFIX[]FLAGS=$2
+    AC_TRY_COMPILE(,[;],AC_MSG_RESULT([yes]); _AC_LANG_PREFIX[]FLAGS="${SAVE_[]_AC_LANG_PREFIX[]FLAGS} $2",AC_MSG_RESULT([no]); _AC_LANG_PREFIX[]FLAGS=${SAVE_[]_AC_LANG_PREFIX[]FLAGS});
+    unset SAVE_[]_AC_LANG_PREFIX[]FLAGS
+    AC_LANG_POP($1)
+])
+
+#
+# AX_CHECK_COMPILER_OPTION(language, [variable,] option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked compiler option
+#              to.
+#   option   - The compiler flag to check.
+#
+# Add, if supported, the specified compiler flag for the compiler selected
+# for the specified language, optionally saving it to the specified variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_COMPILER_OPTION],
+[
+    ifelse($#,
+        3,
+        [_AX_CHECK_COMPILER_OPTION_WITH_VAR($1, $2, $3)],
+        [_AX_CHECK_COMPILER_OPTION($1, $2)])
+])
+
+#
+# AX_CHECK_COMPILER_OPTIONS(language, [variable,] option ...)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked compiler option
+#              to.
+#   options  - The compiler flags to check.
+#
+# Add, if supported, the specified compiler flags for the compiler selected
+# for the specified language, optionally saving it to the specified variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_COMPILER_OPTIONS],
+[
+    ifelse($#,
+        3,
+        [
+            for ax_compiler_option in [$3]; do
+                _AX_CHECK_COMPILER_OPTION_WITH_VAR([$1], [$2], $ax_compiler_option)
+            done
+	],
+        [
+            for ax_compiler_option in [$2]; do
+                _AX_CHECK_COMPILER_OPTION([$1], $ax_compiler_option)
+            done
+	])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_file.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_file.m4
new file mode 100644
index 0000000..ccb7048
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_file.m4
@@ -0,0 +1,42 @@
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file implements a GNU M4 autoconf macro for checking for
+#      the existence of files.
+#
+#      The autoconf version of AC_CHECK_FILE is absolutely broken in
+#      that it cannot check for files when cross-compiling even though
+#      the only thing it relies upon is a shell file readability
+#      check.
+#
+
+# AX_CHECK_FILE(FILE, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# -------------------------------------------------------------
+#
+# Check for the existence of FILE.
+AC_DEFUN([AX_CHECK_FILE],
+[AS_VAR_PUSHDEF([ac_File], [ac_cv_file_$1])dnl
+AC_CACHE_CHECK([for $1], [ac_File],
+[if test -r "$1"; then
+  AS_VAR_SET([ac_File], [yes])
+else
+  AS_VAR_SET([ac_File], [no])
+fi])
+AS_VAR_IF([ac_File], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_File])dnl
+])# AX_CHECK_FILE
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_preprocessor.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_preprocessor.m4
new file mode 100644
index 0000000..88fac92
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_check_preprocessor.m4
@@ -0,0 +1,116 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a number of GNU autoconf M4-style macros
+#      for checking language-specific preprocessor options.
+#
+
+#
+# _AX_CHECK_PREPROCESSOR_OPTION_WITH_VAR(language, variable, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - The variable to add the checked preprocessor option to.
+#   option   - The preprocessor flag to check.
+#
+# Add, if supported, the specified preprocessor flag for the preprocessor
+# selected for the specified language to the provided variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_PREPROCESSOR_OPTION_WITH_VAR],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG preprocessor understands $3])
+    SAVE_CPPFLAGS=${CPPFLAGS}
+    SAVE_$2=${$2}
+    CPPFLAGS=$3
+    AC_TRY_CPP(,AC_MSG_RESULT([yes]); CPPFLAGS="${SAVE_CPPFLAGS}"; $2="${SAVE_$2} $3",AC_MSG_RESULT([no]); CPPFLAGS=${SAVE_CPPFLAGS}; $2=${SAVE_$2});
+    unset SAVE_CPPFLAGS
+    unset SAVE_$2
+    AC_LANG_POP($1)
+])
+
+#
+# _AX_CHECK_PREPROCESSOR_OPTION(language, option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   option   - The preprocessor flag to check.
+#
+# Add, if supported, the specified preprocessor flag for the preprocessor
+# selected for the specified language.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_AX_CHECK_PREPROCESSOR_OPTION],
+[
+    AC_LANG_PUSH($1)
+    AC_MSG_CHECKING([whether the _AC_LANG preprocessor understands $2])
+    SAVE_CPPFLAGS=${CPPFLAGS}
+    CPPFLAGS=$2
+    AC_TRY_CPP(,AC_MSG_RESULT([yes]); CPPFLAGS="${SAVE_CPPFLAGS} $2",AC_MSG_RESULT([no]); CPPFLAGS=${SAVE_CPPFLAGS});
+    unset SAVE_CPPFLAGS
+    AC_LANG_POP($1)
+])
+
+#
+# AX_CHECK_PREPROCESSOR_OPTION(language, [variable,] option)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked preprocessor option
+#              to.
+#   option   - The preprocessor flag to check.
+#
+# Add, if supported, the specified preprocessor flag for the preprocessor
+# selected for the specified language, optionally saving it to the specified
+# variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_PREPROCESSOR_OPTION],
+[
+    ifelse($#,
+        3,
+        [_AX_CHECK_PREPROCESSOR_OPTION_WITH_VAR($1, $2, $3)],
+        [_AX_CHECK_PREPROCESSOR_OPTION($1, $2)])
+])
+
+#
+# AX_CHECK_PREPROCESSOR_OPTIONS(language, [variable,] option ...)
+#
+#   language - The autoconf language (C, C++, Objective C, Objective C++,
+#              etc.).
+#   variable - If supplied, the variable to add the checked preprocessor option
+#              to.
+#   options  - The preprocessor flags to check.
+#
+# Add, if supported, the specified preprocessor flags for the preprocessor
+# selected for the specified language, optionally saving it to the specified
+# variable.
+# ----------------------------------------------------------------------------
+AC_DEFUN([AX_CHECK_PREPROCESSOR_OPTIONS],
+[
+    ifelse($#,
+        3,
+        [
+            for ax_preprocessor_option in [$3]; do
+                _AX_CHECK_PREPROCESSOR_OPTION_WITH_VAR([$1], [$2], $ax_preprocessor_option)
+            done
+	],
+        [
+            for ax_preprocessor_option in [$2]; do
+                _AX_CHECK_PREPROCESSOR_OPTION([$1], $ax_preprocessor_option)
+            done
+	])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_jni_include_dir.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_jni_include_dir.m4
new file mode 100644
index 0000000..71aea79
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_jni_include_dir.m4
@@ -0,0 +1,130 @@
+# ===========================================================================
+#    http://www.gnu.org/software/autoconf-archive/ax_jni_include_dir.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_JNI_INCLUDE_DIR
+#
+# DESCRIPTION
+#
+#   AX_JNI_INCLUDE_DIR finds include directories needed for compiling
+#   programs using the JNI interface.
+#
+#   JNI include directories are usually in the Java distribution. This is
+#   deduced from the value of $JAVA_HOME, $JAVAC, or the path to "javac", in
+#   that order. When this macro completes, a list of directories is left in
+#   the variable JNI_INCLUDE_DIRS.
+#
+#   Example usage follows:
+#
+#     AX_JNI_INCLUDE_DIR
+#
+#     for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS
+#     do
+#             CPPFLAGS="$CPPFLAGS -I$JNI_INCLUDE_DIR"
+#     done
+#
+#   If you want to force a specific compiler:
+#
+#   - at the configure.in level, set JAVAC=yourcompiler before calling
+#   AX_JNI_INCLUDE_DIR
+#
+#   - at the configure level, setenv JAVAC
+#
+#   Note: This macro can work with the autoconf M4 macros for Java programs.
+#   This particular macro is not part of the original set of macros.
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Don Anderson <dda@sleepycat.com>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 10
+
+AU_ALIAS([AC_JNI_INCLUDE_DIR], [AX_JNI_INCLUDE_DIR])
+AC_DEFUN([AX_JNI_INCLUDE_DIR],[
+
+JNI_INCLUDE_DIRS=""
+
+if test "x$JAVA_HOME" != x; then
+	_JTOPDIR="$JAVA_HOME"
+else
+	if test "x$JAVAC" = x; then
+		JAVAC=javac
+	fi
+	AC_PATH_PROG([_ACJNI_JAVAC], [$JAVAC], [no])
+	if test "x$_ACJNI_JAVAC" = xno; then
+		AC_MSG_ERROR([cannot find JDK; try setting \$JAVAC or \$JAVA_HOME])
+	fi
+	_ACJNI_FOLLOW_SYMLINKS("$_ACJNI_JAVAC")
+	_JTOPDIR=`echo "$_ACJNI_FOLLOWED" | sed -e 's://*:/:g' -e 's:/[[^/]]*$::'`
+fi
+
+case "$host_os" in
+        darwin*)        _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[[^/]]*$::'`
+                        _JINC="$_JTOPDIR/Headers";;
+        *)              _JINC="$_JTOPDIR/include";;
+esac
+_AS_ECHO_LOG([_JTOPDIR=$_JTOPDIR])
+_AS_ECHO_LOG([_JINC=$_JINC])
+
+# On Mac OS X 10.6.4, jni.h is a symlink:
+# /System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers/jni.h
+# -> ../../CurrentJDK/Headers/jni.h.
+#
+# Override AC_CHECK_FILE with our own AX_CHECK_FILE to fix
+# cross-compilation issues.
+
+AX_CHECK_FILE([$_JINC/jni.h],
+	[JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JINC"],
+	[_JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[[^/]]*$::'`
+	 AX_CHECK_FILE([$_JTOPDIR/include/jni.h],
+		[JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include"],
+                AC_MSG_ERROR([cannot find JDK header files]))
+	])
+
+# get the likely subdirectories for system specific java includes
+case "$host_os" in
+bsdi*)          _JNI_INC_SUBDIRS="bsdos";;
+freebsd*)       _JNI_INC_SUBDIRS="freebsd";;
+linux*)         _JNI_INC_SUBDIRS="linux genunix";;
+osf*)           _JNI_INC_SUBDIRS="alpha";;
+solaris*)       _JNI_INC_SUBDIRS="solaris";;
+mingw*)		_JNI_INC_SUBDIRS="win32";;
+cygwin*)	_JNI_INC_SUBDIRS="win32";;
+*)              _JNI_INC_SUBDIRS="genunix";;
+esac
+
+# add any subdirectories that are present
+for JINCSUBDIR in $_JNI_INC_SUBDIRS
+do
+    if test -d "$_JTOPDIR/include/$JINCSUBDIR"; then
+         JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include/$JINCSUBDIR"
+    fi
+done
+])
+
+# _ACJNI_FOLLOW_SYMLINKS <path>
+# Follows symbolic links on <path>,
+# finally setting variable _ACJNI_FOLLOWED
+# ----------------------------------------
+AC_DEFUN([_ACJNI_FOLLOW_SYMLINKS],[
+# find the include directory relative to the javac executable
+_cur="$1"
+while ls -ld "$_cur" 2>/dev/null | grep " -> " >/dev/null; do
+        AC_MSG_CHECKING([symlink for $_cur])
+        _slink=`ls -ld "$_cur" | sed 's/.* -> //'`
+        case "$_slink" in
+        /*) _cur="$_slink";;
+        # 'X' avoids triggering unwanted echo options.
+        *) _cur=`echo "X$_cur" | sed -e 's/^X//' -e 's:[[^/]]*$::'`"$_slink";;
+        esac
+        AC_MSG_RESULT([$_cur])
+done
+_ACJNI_FOLLOWED="$_cur"
+])# _ACJNI
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_prog_doxygen.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_prog_doxygen.m4
new file mode 100644
index 0000000..44b22b0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_prog_doxygen.m4
@@ -0,0 +1,532 @@
+# ===========================================================================
+#      http://www.gnu.org/software/autoconf-archive/ax_prog_doxygen.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   DX_INIT_DOXYGEN(PROJECT-NAME, DOXYFILE-PATH, [OUTPUT-DIR])
+#   DX_DOXYGEN_FEATURE(ON|OFF)
+#   DX_DOT_FEATURE(ON|OFF)
+#   DX_HTML_FEATURE(ON|OFF)
+#   DX_CHM_FEATURE(ON|OFF)
+#   DX_CHI_FEATURE(ON|OFF)
+#   DX_MAN_FEATURE(ON|OFF)
+#   DX_RTF_FEATURE(ON|OFF)
+#   DX_XML_FEATURE(ON|OFF)
+#   DX_PDF_FEATURE(ON|OFF)
+#   DX_PS_FEATURE(ON|OFF)
+#
+# DESCRIPTION
+#
+#   The DX_*_FEATURE macros control the default setting for the given
+#   Doxygen feature. Supported features are 'DOXYGEN' itself, 'DOT' for
+#   generating graphics, 'HTML' for plain HTML, 'CHM' for compressed HTML
+#   help (for MS users), 'CHI' for generating a seperate .chi file by the
+#   .chm file, and 'MAN', 'RTF', 'XML', 'PDF' and 'PS' for the appropriate
+#   output formats. The environment variable DOXYGEN_PAPER_SIZE may be
+#   specified to override the default 'a4wide' paper size.
+#
+#   By default, HTML, PDF and PS documentation is generated as this seems to
+#   be the most popular and portable combination. MAN pages created by
+#   Doxygen are usually problematic, though by picking an appropriate subset
+#   and doing some massaging they might be better than nothing. CHM and RTF
+#   are specific for MS (note that you can't generate both HTML and CHM at
+#   the same time). The XML is rather useless unless you apply specialized
+#   post-processing to it.
+#
+#   The macros mainly control the default state of the feature. The use can
+#   override the default by specifying --enable or --disable. The macros
+#   ensure that contradictory flags are not given (e.g.,
+#   --enable-doxygen-html and --enable-doxygen-chm,
+#   --enable-doxygen-anything with --disable-doxygen, etc.) Finally, each
+#   feature will be automatically disabled (with a warning) if the required
+#   programs are missing.
+#
+#   Once all the feature defaults have been specified, call DX_INIT_DOXYGEN
+#   with the following parameters: a one-word name for the project for use
+#   as a filename base etc., an optional configuration file name (the
+#   default is 'Doxyfile', the same as Doxygen's default), and an optional
+#   output directory name (the default is 'doxygen-doc').
+#
+#   Automake Support
+#
+#   The following is a template aminclude.am file for use with Automake.
+#   Make targets and variables values are controlled by the various
+#   DX_COND_* conditionals set by autoconf.
+#
+#   The provided targets are:
+#
+#     doxygen-doc: Generate all doxygen documentation.
+#
+#     doxygen-run: Run doxygen, which will generate some of the
+#                  documentation (HTML, CHM, CHI, MAN, RTF, XML)
+#                  but will not do the post processing required
+#                  for the rest of it (PS, PDF, and some MAN).
+#
+#     doxygen-man: Rename some doxygen generated man pages.
+#
+#     doxygen-ps:  Generate doxygen PostScript documentation.
+#
+#     doxygen-pdf: Generate doxygen PDF documentation.
+#
+#   Note that by default these are not integrated into the automake targets.
+#   If doxygen is used to generate man pages, you can achieve this
+#   integration by setting man3_MANS to the list of man pages generated and
+#   then adding the dependency:
+#
+#     $(man3_MANS): doxygen-doc
+#
+#   This will cause make to run doxygen and generate all the documentation.
+#
+#   The following variable is intended for use in Makefile.am:
+#
+#     DX_CLEANFILES = everything to clean.
+#
+#   Then add this variable to MOSTLYCLEANFILES.
+#
+#     ----- begin aminclude.am -------------------------------------
+#
+#     ## --------------------------------- ##
+#     ## Format-independent Doxygen rules. ##
+#     ## --------------------------------- ##
+#
+#     if DX_COND_doc
+#
+#     ## ------------------------------- ##
+#     ## Rules specific for HTML output. ##
+#     ## ------------------------------- ##
+#
+#     if DX_COND_html
+#
+#     DX_CLEAN_HTML = @DX_DOCDIR@/html
+#
+#     endif DX_COND_html
+#
+#     ## ------------------------------ ##
+#     ## Rules specific for CHM output. ##
+#     ## ------------------------------ ##
+#
+#     if DX_COND_chm
+#
+#     DX_CLEAN_CHM = @DX_DOCDIR@/chm
+#
+#     if DX_COND_chi
+#
+#     DX_CLEAN_CHI = @DX_DOCDIR@/@PACKAGE@.chi
+#
+#     endif DX_COND_chi
+#
+#     endif DX_COND_chm
+#
+#     ## ------------------------------ ##
+#     ## Rules specific for MAN output. ##
+#     ## ------------------------------ ##
+#
+#     if DX_COND_man
+#
+#     DX_CLEAN_MAN = @DX_DOCDIR@/man
+#
+#     endif DX_COND_man
+#
+#     ## ------------------------------ ##
+#     ## Rules specific for RTF output. ##
+#     ## ------------------------------ ##
+#
+#     if DX_COND_rtf
+#
+#     DX_CLEAN_RTF = @DX_DOCDIR@/rtf
+#
+#     endif DX_COND_rtf
+#
+#     ## ------------------------------ ##
+#     ## Rules specific for XML output. ##
+#     ## ------------------------------ ##
+#
+#     if DX_COND_xml
+#
+#     DX_CLEAN_XML = @DX_DOCDIR@/xml
+#
+#     endif DX_COND_xml
+#
+#     ## ----------------------------- ##
+#     ## Rules specific for PS output. ##
+#     ## ----------------------------- ##
+#
+#     if DX_COND_ps
+#
+#     DX_CLEAN_PS = @DX_DOCDIR@/@PACKAGE@.ps
+#
+#     DX_PS_GOAL = doxygen-ps
+#
+#     doxygen-ps: @DX_DOCDIR@/@PACKAGE@.ps
+#
+#     @DX_DOCDIR@/@PACKAGE@.ps: @DX_DOCDIR@/@PACKAGE@.tag
+#         cd @DX_DOCDIR@/latex; \
+#         rm -f *.aux *.toc *.idx *.ind *.ilg *.log *.out; \
+#         $(DX_LATEX) refman.tex; \
+#         $(MAKEINDEX_PATH) refman.idx; \
+#         $(DX_LATEX) refman.tex; \
+#         countdown=5; \
+#         while $(DX_EGREP) 'Rerun (LaTeX|to get cross-references right)' \
+#                           refman.log > /dev/null 2>&1 \
+#            && test $$countdown -gt 0; do \
+#             $(DX_LATEX) refman.tex; \
+#             countdown=`expr $$countdown - 1`; \
+#         done; \
+#         $(DX_DVIPS) -o ../@PACKAGE@.ps refman.dvi
+#
+#     endif DX_COND_ps
+#
+#     ## ------------------------------ ##
+#     ## Rules specific for PDF output. ##
+#     ## ------------------------------ ##
+#
+#     if DX_COND_pdf
+#
+#     DX_CLEAN_PDF = @DX_DOCDIR@/@PACKAGE@.pdf
+#
+#     DX_PDF_GOAL = doxygen-pdf
+#
+#     doxygen-pdf: @DX_DOCDIR@/@PACKAGE@.pdf
+#
+#     @DX_DOCDIR@/@PACKAGE@.pdf: @DX_DOCDIR@/@PACKAGE@.tag
+#         cd @DX_DOCDIR@/latex; \
+#         rm -f *.aux *.toc *.idx *.ind *.ilg *.log *.out; \
+#         $(DX_PDFLATEX) refman.tex; \
+#         $(DX_MAKEINDEX) refman.idx; \
+#         $(DX_PDFLATEX) refman.tex; \
+#         countdown=5; \
+#         while $(DX_EGREP) 'Rerun (LaTeX|to get cross-references right)' \
+#                           refman.log > /dev/null 2>&1 \
+#            && test $$countdown -gt 0; do \
+#             $(DX_PDFLATEX) refman.tex; \
+#             countdown=`expr $$countdown - 1`; \
+#         done; \
+#         mv refman.pdf ../@PACKAGE@.pdf
+#
+#     endif DX_COND_pdf
+#
+#     ## ------------------------------------------------- ##
+#     ## Rules specific for LaTeX (shared for PS and PDF). ##
+#     ## ------------------------------------------------- ##
+#
+#     if DX_COND_latex
+#
+#     DX_CLEAN_LATEX = @DX_DOCDIR@/latex
+#
+#     endif DX_COND_latex
+#
+#     .PHONY: doxygen-run doxygen-doc $(DX_PS_GOAL) $(DX_PDF_GOAL)
+#
+#     .INTERMEDIATE: doxygen-run $(DX_PS_GOAL) $(DX_PDF_GOAL)
+#
+#     doxygen-run: @DX_DOCDIR@/@PACKAGE@.tag
+#
+#     doxygen-doc: doxygen-run $(DX_PS_GOAL) $(DX_PDF_GOAL)
+#
+#     @DX_DOCDIR@/@PACKAGE@.tag: $(DX_CONFIG) $(pkginclude_HEADERS)
+#         rm -rf @DX_DOCDIR@
+#         $(DX_ENV) $(DX_DOXYGEN) $(srcdir)/$(DX_CONFIG)
+#
+#     DX_CLEANFILES = \
+#         @DX_DOCDIR@/@PACKAGE@.tag \
+#         -r \
+#         $(DX_CLEAN_HTML) \
+#         $(DX_CLEAN_CHM) \
+#         $(DX_CLEAN_CHI) \
+#         $(DX_CLEAN_MAN) \
+#         $(DX_CLEAN_RTF) \
+#         $(DX_CLEAN_XML) \
+#         $(DX_CLEAN_PS) \
+#         $(DX_CLEAN_PDF) \
+#         $(DX_CLEAN_LATEX)
+#
+#     endif DX_COND_doc
+#
+#     ----- end aminclude.am ---------------------------------------
+#
+# LICENSE
+#
+#   Copyright (c) 2009 Oren Ben-Kiki <oren@ben-kiki.org>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 12
+
+## ----------##
+## Defaults. ##
+## ----------##
+
+DX_ENV=""
+AC_DEFUN([DX_FEATURE_doc],  ON)
+AC_DEFUN([DX_FEATURE_dot],  OFF)
+AC_DEFUN([DX_FEATURE_man],  OFF)
+AC_DEFUN([DX_FEATURE_html], ON)
+AC_DEFUN([DX_FEATURE_chm],  OFF)
+AC_DEFUN([DX_FEATURE_chi],  OFF)
+AC_DEFUN([DX_FEATURE_rtf],  OFF)
+AC_DEFUN([DX_FEATURE_xml],  OFF)
+AC_DEFUN([DX_FEATURE_pdf],  ON)
+AC_DEFUN([DX_FEATURE_ps],   ON)
+
+## --------------- ##
+## Private macros. ##
+## --------------- ##
+
+# DX_ENV_APPEND(VARIABLE, VALUE)
+# ------------------------------
+# Append VARIABLE="VALUE" to DX_ENV for invoking doxygen.
+AC_DEFUN([DX_ENV_APPEND], [AC_SUBST([DX_ENV], ["$DX_ENV $1='$2'"])])
+
+# DX_DIRNAME_EXPR
+# ---------------
+# Expand into a shell expression prints the directory part of a path.
+AC_DEFUN([DX_DIRNAME_EXPR],
+         [[expr ".$1" : '\(\.\)[^/]*$' \| "x$1" : 'x\(.*\)/[^/]*$']])
+
+# DX_IF_FEATURE(FEATURE, IF-ON, IF-OFF)
+# -------------------------------------
+# Expands according to the M4 (static) status of the feature.
+AC_DEFUN([DX_IF_FEATURE], [ifelse(DX_FEATURE_$1, ON, [$2], [$3])])
+
+# DX_REQUIRE_PROG(VARIABLE, PROGRAM)
+# ----------------------------------
+# Require the specified program to be found for the DX_CURRENT_FEATURE to work.
+AC_DEFUN([DX_REQUIRE_PROG], [
+AC_PATH_TOOL([$1], [$2])
+if test "$DX_FLAG_[]DX_CURRENT_FEATURE$$1" = 1; then
+    AC_MSG_WARN([$2 not found - will not DX_CURRENT_DESCRIPTION])
+    AC_SUBST(DX_FLAG_[]DX_CURRENT_FEATURE, 0)
+fi
+])
+
+# DX_TEST_FEATURE(FEATURE)
+# ------------------------
+# Expand to a shell expression testing whether the feature is active.
+AC_DEFUN([DX_TEST_FEATURE], [test "$DX_FLAG_$1" = 1])
+
+# DX_CHECK_DEPEND(REQUIRED_FEATURE, REQUIRED_STATE)
+# -------------------------------------------------
+# Verify that a required features has the right state before trying to turn on
+# the DX_CURRENT_FEATURE.
+AC_DEFUN([DX_CHECK_DEPEND], [
+test "$DX_FLAG_$1" = "$2" \
+|| AC_MSG_ERROR([doxygen-DX_CURRENT_FEATURE ifelse([$2], 1,
+                            requires, contradicts) doxygen-DX_CURRENT_FEATURE])
+])
+
+# DX_CLEAR_DEPEND(FEATURE, REQUIRED_FEATURE, REQUIRED_STATE)
+# ----------------------------------------------------------
+# Turn off the DX_CURRENT_FEATURE if the required feature is off.
+AC_DEFUN([DX_CLEAR_DEPEND], [
+test "$DX_FLAG_$1" = "$2" || AC_SUBST(DX_FLAG_[]DX_CURRENT_FEATURE, 0)
+])
+
+# DX_FEATURE_ARG(FEATURE, DESCRIPTION,
+#                CHECK_DEPEND, CLEAR_DEPEND,
+#                REQUIRE, DO-IF-ON, DO-IF-OFF)
+# --------------------------------------------
+# Parse the command-line option controlling a feature. CHECK_DEPEND is called
+# if the user explicitly turns the feature on (and invokes DX_CHECK_DEPEND),
+# otherwise CLEAR_DEPEND is called to turn off the default state if a required
+# feature is disabled (using DX_CLEAR_DEPEND). REQUIRE performs additional
+# requirement tests (DX_REQUIRE_PROG). Finally, an automake flag is set and
+# DO-IF-ON or DO-IF-OFF are called according to the final state of the feature.
+AC_DEFUN([DX_ARG_ABLE], [
+    AC_DEFUN([DX_CURRENT_FEATURE], [$1])
+    AC_DEFUN([DX_CURRENT_DESCRIPTION], [$2])
+    AC_ARG_ENABLE(doxygen-$1,
+                  [AS_HELP_STRING(DX_IF_FEATURE([$1], [--disable-doxygen-$1],
+                                                      [--enable-doxygen-$1]),
+                                  DX_IF_FEATURE([$1], [don't $2], [$2]))],
+                  [
+case "$enableval" in
+#(
+y|Y|yes|Yes|YES)
+    AC_SUBST([DX_FLAG_$1], 1)
+    $3
+;; #(
+n|N|no|No|NO)
+    AC_SUBST([DX_FLAG_$1], 0)
+;; #(
+*)
+    AC_MSG_ERROR([invalid value '$enableval' given to doxygen-$1])
+;;
+esac
+], [
+AC_SUBST([DX_FLAG_$1], [DX_IF_FEATURE([$1], 1, 0)])
+$4
+])
+if DX_TEST_FEATURE([$1]); then
+    $5
+    :
+fi
+AM_CONDITIONAL(DX_COND_$1, DX_TEST_FEATURE([$1]))
+if DX_TEST_FEATURE([$1]); then
+    $6
+    :
+else
+    $7
+    :
+fi
+])
+
+## -------------- ##
+## Public macros. ##
+## -------------- ##
+
+# DX_XXX_FEATURE(DEFAULT_STATE)
+# -----------------------------
+AC_DEFUN([DX_DOXYGEN_FEATURE], [AC_DEFUN([DX_FEATURE_doc],  [$1])])
+AC_DEFUN([DX_DOT_FEATURE],     [AC_DEFUN([DX_FEATURE_dot], [$1])])
+AC_DEFUN([DX_MAN_FEATURE],     [AC_DEFUN([DX_FEATURE_man],  [$1])])
+AC_DEFUN([DX_HTML_FEATURE],    [AC_DEFUN([DX_FEATURE_html], [$1])])
+AC_DEFUN([DX_CHM_FEATURE],     [AC_DEFUN([DX_FEATURE_chm],  [$1])])
+AC_DEFUN([DX_CHI_FEATURE],     [AC_DEFUN([DX_FEATURE_chi],  [$1])])
+AC_DEFUN([DX_RTF_FEATURE],     [AC_DEFUN([DX_FEATURE_rtf],  [$1])])
+AC_DEFUN([DX_XML_FEATURE],     [AC_DEFUN([DX_FEATURE_xml],  [$1])])
+AC_DEFUN([DX_XML_FEATURE],     [AC_DEFUN([DX_FEATURE_xml],  [$1])])
+AC_DEFUN([DX_PDF_FEATURE],     [AC_DEFUN([DX_FEATURE_pdf],  [$1])])
+AC_DEFUN([DX_PS_FEATURE],      [AC_DEFUN([DX_FEATURE_ps],   [$1])])
+
+# DX_INIT_DOXYGEN(PROJECT, [CONFIG-FILE], [OUTPUT-DOC-DIR])
+# ---------------------------------------------------------
+# PROJECT also serves as the base name for the documentation files.
+# The default CONFIG-FILE is "Doxyfile" and OUTPUT-DOC-DIR is "doxygen-doc".
+AC_DEFUN([DX_INIT_DOXYGEN], [
+
+# Files:
+AC_SUBST([DX_PROJECT], [$1])
+AC_SUBST([DX_CONFIG], [ifelse([$2], [], Doxyfile, [$2])])
+AC_SUBST([DX_DOCDIR], [ifelse([$3], [], doxygen-doc, [$3])])
+
+# Environment variables used inside doxygen.cfg:
+DX_ENV_APPEND(SRCDIR, $srcdir)
+DX_ENV_APPEND(PROJECT, $DX_PROJECT)
+DX_ENV_APPEND(DOCDIR, $DX_DOCDIR)
+DX_ENV_APPEND(VERSION, $PACKAGE_VERSION)
+
+# Doxygen itself:
+DX_ARG_ABLE(doc, [generate any doxygen documentation],
+            [],
+            [],
+            [DX_REQUIRE_PROG([DX_DOXYGEN], doxygen)
+             DX_REQUIRE_PROG([DX_PERL], perl)],
+            [DX_ENV_APPEND(PERL_PATH, $DX_PERL)])
+
+# Dot for graphics:
+DX_ARG_ABLE(dot, [generate graphics for doxygen documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_DOT], dot)],
+            [DX_ENV_APPEND(HAVE_DOT, YES)
+             DX_ENV_APPEND(DOT_PATH, [`DX_DIRNAME_EXPR($DX_DOT)`])],
+            [DX_ENV_APPEND(HAVE_DOT, NO)])
+
+# Man pages generation:
+DX_ARG_ABLE(man, [generate doxygen manual pages],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_MAN, YES)],
+            [DX_ENV_APPEND(GENERATE_MAN, NO)])
+
+# RTF file generation:
+DX_ARG_ABLE(rtf, [generate doxygen RTF documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_RTF, YES)],
+            [DX_ENV_APPEND(GENERATE_RTF, NO)])
+
+# XML file generation:
+DX_ARG_ABLE(xml, [generate doxygen XML documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_XML, YES)],
+            [DX_ENV_APPEND(GENERATE_XML, NO)])
+
+# (Compressed) HTML help generation:
+DX_ARG_ABLE(chm, [generate doxygen compressed HTML help documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_HHC], hhc)],
+            [DX_ENV_APPEND(HHC_PATH, $DX_HHC)
+             DX_ENV_APPEND(GENERATE_HTML, YES)
+             DX_ENV_APPEND(GENERATE_HTMLHELP, YES)],
+            [DX_ENV_APPEND(GENERATE_HTMLHELP, NO)])
+
+# Seperate CHI file generation.
+DX_ARG_ABLE(chi, [generate doxygen seperate compressed HTML help index file],
+            [DX_CHECK_DEPEND(chm, 1)],
+            [DX_CLEAR_DEPEND(chm, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_CHI, YES)],
+            [DX_ENV_APPEND(GENERATE_CHI, NO)])
+
+# Plain HTML pages generation:
+DX_ARG_ABLE(html, [generate doxygen plain HTML documentation],
+            [DX_CHECK_DEPEND(doc, 1) DX_CHECK_DEPEND(chm, 0)],
+            [DX_CLEAR_DEPEND(doc, 1) DX_CLEAR_DEPEND(chm, 0)],
+            [],
+            [DX_ENV_APPEND(GENERATE_HTML, YES)],
+            [DX_TEST_FEATURE(chm) || DX_ENV_APPEND(GENERATE_HTML, NO)])
+
+# PostScript file generation:
+DX_ARG_ABLE(ps, [generate doxygen PostScript documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_LATEX], latex)
+             DX_REQUIRE_PROG([DX_MAKEINDEX], makeindex)
+             DX_REQUIRE_PROG([DX_DVIPS], dvips)
+             DX_REQUIRE_PROG([DX_EGREP], egrep)])
+
+# PDF file generation:
+DX_ARG_ABLE(pdf, [generate doxygen PDF documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_PDFLATEX], pdflatex)
+             DX_REQUIRE_PROG([DX_MAKEINDEX], makeindex)
+             DX_REQUIRE_PROG([DX_EGREP], egrep)])
+
+# LaTeX generation for PS and/or PDF:
+AM_CONDITIONAL(DX_COND_latex, DX_TEST_FEATURE(ps) || DX_TEST_FEATURE(pdf))
+if DX_TEST_FEATURE(ps) || DX_TEST_FEATURE(pdf); then
+    DX_ENV_APPEND(GENERATE_LATEX, YES)
+else
+    DX_ENV_APPEND(GENERATE_LATEX, NO)
+fi
+
+# Paper size for PS and/or PDF:
+AC_ARG_VAR(DOXYGEN_PAPER_SIZE,
+           [a4wide (default), a4, letter, legal or executive])
+case "$DOXYGEN_PAPER_SIZE" in
+#(
+"")
+    AC_SUBST(DOXYGEN_PAPER_SIZE, "")
+;; #(
+a4wide|a4|letter|legal|executive)
+    DX_ENV_APPEND(PAPER_SIZE, $DOXYGEN_PAPER_SIZE)
+;; #(
+*)
+    AC_MSG_ERROR([unknown DOXYGEN_PAPER_SIZE='$DOXYGEN_PAPER_SIZE'])
+;;
+esac
+
+#For debugging:
+#echo DX_FLAG_doc=$DX_FLAG_doc
+#echo DX_FLAG_dot=$DX_FLAG_dot
+#echo DX_FLAG_man=$DX_FLAG_man
+#echo DX_FLAG_html=$DX_FLAG_html
+#echo DX_FLAG_chm=$DX_FLAG_chm
+#echo DX_FLAG_chi=$DX_FLAG_chi
+#echo DX_FLAG_rtf=$DX_FLAG_rtf
+#echo DX_FLAG_xml=$DX_FLAG_xml
+#echo DX_FLAG_pdf=$DX_FLAG_pdf
+#echo DX_FLAG_ps=$DX_FLAG_ps
+#echo DX_ENV=$DX_ENV
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_pthread.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_pthread.m4
new file mode 100644
index 0000000..4c4051e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ax_pthread.m4
@@ -0,0 +1,485 @@
+# ===========================================================================
+#        http://www.gnu.org/software/autoconf-archive/ax_pthread.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_PTHREAD([ACTION-IF-FOUND[, ACTION-IF-NOT-FOUND]])
+#
+# DESCRIPTION
+#
+#   This macro figures out how to build C programs using POSIX threads. It
+#   sets the PTHREAD_LIBS output variable to the threads library and linker
+#   flags, and the PTHREAD_CFLAGS output variable to any special C compiler
+#   flags that are needed. (The user can also force certain compiler
+#   flags/libs to be tested by setting these environment variables.)
+#
+#   Also sets PTHREAD_CC to any special C compiler that is needed for
+#   multi-threaded programs (defaults to the value of CC otherwise). (This
+#   is necessary on AIX to use the special cc_r compiler alias.)
+#
+#   NOTE: You are assumed to not only compile your program with these flags,
+#   but also to link with them as well. For example, you might link with
+#   $PTHREAD_CC $CFLAGS $PTHREAD_CFLAGS $LDFLAGS ... $PTHREAD_LIBS $LIBS
+#
+#   If you are only building threaded programs, you may wish to use these
+#   variables in your default LIBS, CFLAGS, and CC:
+#
+#     LIBS="$PTHREAD_LIBS $LIBS"
+#     CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+#     CC="$PTHREAD_CC"
+#
+#   In addition, if the PTHREAD_CREATE_JOINABLE thread-attribute constant
+#   has a nonstandard name, this macro defines PTHREAD_CREATE_JOINABLE to
+#   that name (e.g. PTHREAD_CREATE_UNDETACHED on AIX).
+#
+#   Also HAVE_PTHREAD_PRIO_INHERIT is defined if pthread is found and the
+#   PTHREAD_PRIO_INHERIT symbol is defined when compiling with
+#   PTHREAD_CFLAGS.
+#
+#   ACTION-IF-FOUND is a list of shell commands to run if a threads library
+#   is found, and ACTION-IF-NOT-FOUND is a list of commands to run it if it
+#   is not found. If ACTION-IF-FOUND is not specified, the default action
+#   will define HAVE_PTHREAD.
+#
+#   Please let the authors know if this macro fails on any platform, or if
+#   you have any other suggestions or comments. This macro was based on work
+#   by SGJ on autoconf scripts for FFTW (http://www.fftw.org/) (with help
+#   from M. Frigo), as well as ac_pthread and hb_pthread macros posted by
+#   Alejandro Forero Cuervo to the autoconf macro repository. We are also
+#   grateful for the helpful feedback of numerous users.
+#
+#   Updated for Autoconf 2.68 by Daniel Richard G.
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Steven G. Johnson <stevenj@alum.mit.edu>
+#   Copyright (c) 2011 Daniel Richard G. <skunk@iSKUNK.ORG>
+#
+#   This program is free software: you can redistribute it and/or modify it
+#   under the terms of the GNU General Public License as published by the
+#   Free Software Foundation, either version 3 of the License, or (at your
+#   option) any later version.
+#
+#   This program is distributed in the hope that it will be useful, but
+#   WITHOUT ANY WARRANTY; without even the implied warranty of
+#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+#   Public License for more details.
+#
+#   You should have received a copy of the GNU General Public License along
+#   with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+#   As a special exception, the respective Autoconf Macro's copyright owner
+#   gives unlimited permission to copy, distribute and modify the configure
+#   scripts that are the output of Autoconf when processing the Macro. You
+#   need not follow the terms of the GNU General Public License when using
+#   or distributing such scripts, even though portions of the text of the
+#   Macro appear in them. The GNU General Public License (GPL) does govern
+#   all other use of the material that constitutes the Autoconf Macro.
+#
+#   This special exception to the GPL applies to versions of the Autoconf
+#   Macro released by the Autoconf Archive. When you make and distribute a
+#   modified version of the Autoconf Macro, you may extend this special
+#   exception to the GPL to apply to your modified version as well.
+
+#serial 23
+
+AU_ALIAS([ACX_PTHREAD], [AX_PTHREAD])
+AC_DEFUN([AX_PTHREAD], [
+AC_REQUIRE([AC_CANONICAL_HOST])
+AC_REQUIRE([AC_PROG_CC])
+AC_REQUIRE([AC_PROG_SED])
+AC_LANG_PUSH([C])
+ax_pthread_ok=no
+
+# We used to check for pthread.h first, but this fails if pthread.h
+# requires special compiler flags (e.g. on Tru64 or Sequent).
+# It gets checked for in the link test anyway.
+
+# First of all, check if the user has set any of the PTHREAD_LIBS,
+# etcetera environment variables, and if threads linking works using
+# them:
+if test "x$PTHREAD_CFLAGS$PTHREAD_LIBS" != "x"; then
+        ax_pthread_save_CC="$CC"
+        ax_pthread_save_CFLAGS="$CFLAGS"
+        ax_pthread_save_LIBS="$LIBS"
+        AS_IF([test "x$PTHREAD_CC" != "x"], [CC="$PTHREAD_CC"])
+        CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+        LIBS="$PTHREAD_LIBS $LIBS"
+        AC_MSG_CHECKING([for pthread_join using $CC $PTHREAD_CFLAGS $PTHREAD_LIBS])
+        AC_LINK_IFELSE([AC_LANG_CALL([], [pthread_join])], [ax_pthread_ok=yes])
+        AC_MSG_RESULT([$ax_pthread_ok])
+        if test "x$ax_pthread_ok" = "xno"; then
+                PTHREAD_LIBS=""
+                PTHREAD_CFLAGS=""
+        fi
+        CC="$ax_pthread_save_CC"
+        CFLAGS="$ax_pthread_save_CFLAGS"
+        LIBS="$ax_pthread_save_LIBS"
+fi
+
+# We must check for the threads library under a number of different
+# names; the ordering is very important because some systems
+# (e.g. DEC) have both -lpthread and -lpthreads, where one of the
+# libraries is broken (non-POSIX).
+
+# Create a list of thread flags to try.  Items starting with a "-" are
+# C compiler flags, and other items are library names, except for "none"
+# which indicates that we try without any flags at all, and "pthread-config"
+# which is a program returning the flags for the Pth emulation library.
+
+ax_pthread_flags="pthreads none -Kthread -pthread -pthreads -mthreads pthread --thread-safe -mt pthread-config"
+
+# The ordering *is* (sometimes) important.  Some notes on the
+# individual items follow:
+
+# pthreads: AIX (must check this before -lpthread)
+# none: in case threads are in libc; should be tried before -Kthread and
+#       other compiler flags to prevent continual compiler warnings
+# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
+# -pthread: Linux/gcc (kernel threads), BSD/gcc (userland threads), Tru64
+#           (Note: HP C rejects this with "bad form for `-t' option")
+# -pthreads: Solaris/gcc (Note: HP C also rejects)
+# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
+#      doesn't hurt to check since this sometimes defines pthreads and
+#      -D_REENTRANT too), HP C (must be checked before -lpthread, which
+#      is present but should not be used directly; and before -mthreads,
+#      because the compiler interprets this as "-mt" + "-hreads")
+# -mthreads: Mingw32/gcc, Lynx/gcc
+# pthread: Linux, etcetera
+# --thread-safe: KAI C++
+# pthread-config: use pthread-config program (for GNU Pth library)
+
+case $host_os in
+
+        freebsd*)
+
+        # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
+        # lthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
+
+        ax_pthread_flags="-kthread lthread $ax_pthread_flags"
+        ;;
+
+        hpux*)
+
+        # From the cc(1) man page: "[-mt] Sets various -D flags to enable
+        # multi-threading and also sets -lpthread."
+
+        ax_pthread_flags="-mt -pthread pthread $ax_pthread_flags"
+        ;;
+
+        openedition*)
+
+        # IBM z/OS requires a feature-test macro to be defined in order to
+        # enable POSIX threads at all, so give the user a hint if this is
+        # not set. (We don't define these ourselves, as they can affect
+        # other portions of the system API in unpredictable ways.)
+
+        AC_EGREP_CPP([AX_PTHREAD_ZOS_MISSING],
+            [
+#            if !defined(_OPEN_THREADS) && !defined(_UNIX03_THREADS)
+             AX_PTHREAD_ZOS_MISSING
+#            endif
+            ],
+            [AC_MSG_WARN([IBM z/OS requires -D_OPEN_THREADS or -D_UNIX03_THREADS to enable pthreads support.])])
+        ;;
+
+        solaris*)
+
+        # On Solaris (at least, for some versions), libc contains stubbed
+        # (non-functional) versions of the pthreads routines, so link-based
+        # tests will erroneously succeed. (N.B.: The stubs are missing
+        # pthread_cleanup_push, or rather a function called by this macro,
+        # so we could check for that, but who knows whether they'll stub
+        # that too in a future libc.)  So we'll check first for the
+        # standard Solaris way of linking pthreads (-mt -lpthread).
+
+        ax_pthread_flags="-mt,pthread pthread $ax_pthread_flags"
+        ;;
+esac
+
+# GCC generally uses -pthread, or -pthreads on some platforms (e.g. SPARC)
+
+AS_IF([test "x$GCC" = "xyes"],
+      [ax_pthread_flags="-pthread -pthreads $ax_pthread_flags"])
+
+# The presence of a feature test macro requesting re-entrant function
+# definitions is, on some systems, a strong hint that pthreads support is
+# correctly enabled
+
+case $host_os in
+        darwin* | hpux* | linux* | osf* | solaris*)
+        ax_pthread_check_macro="_REENTRANT"
+        ;;
+
+        aix*)
+        ax_pthread_check_macro="_THREAD_SAFE"
+        ;;
+
+        *)
+        ax_pthread_check_macro="--"
+        ;;
+esac
+AS_IF([test "x$ax_pthread_check_macro" = "x--"],
+      [ax_pthread_check_cond=0],
+      [ax_pthread_check_cond="!defined($ax_pthread_check_macro)"])
+
+# Are we compiling with Clang?
+
+AC_CACHE_CHECK([whether $CC is Clang],
+    [ax_cv_PTHREAD_CLANG],
+    [ax_cv_PTHREAD_CLANG=no
+     # Note that Autoconf sets GCC=yes for Clang as well as GCC
+     if test "x$GCC" = "xyes"; then
+        AC_EGREP_CPP([AX_PTHREAD_CC_IS_CLANG],
+            [/* Note: Clang 2.7 lacks __clang_[a-z]+__ */
+#            if defined(__clang__) && defined(__llvm__)
+             AX_PTHREAD_CC_IS_CLANG
+#            endif
+            ],
+            [ax_cv_PTHREAD_CLANG=yes])
+     fi
+    ])
+ax_pthread_clang="$ax_cv_PTHREAD_CLANG"
+
+ax_pthread_clang_warning=no
+
+# Clang needs special handling, because older versions handle the -pthread
+# option in a rather... idiosyncratic way
+
+if test "x$ax_pthread_clang" = "xyes"; then
+
+        # Clang takes -pthread; it has never supported any other flag
+
+        # (Note 1: This will need to be revisited if a system that Clang
+        # supports has POSIX threads in a separate library.  This tends not
+        # to be the way of modern systems, but it's conceivable.)
+
+        # (Note 2: On some systems, notably Darwin, -pthread is not needed
+        # to get POSIX threads support; the API is always present and
+        # active.  We could reasonably leave PTHREAD_CFLAGS empty.  But
+        # -pthread does define _REENTRANT, and while the Darwin headers
+        # ignore this macro, third-party headers might not.)
+
+        PTHREAD_CFLAGS="-pthread"
+        PTHREAD_LIBS=
+
+        ax_pthread_ok=yes
+
+        # However, older versions of Clang make a point of warning the user
+        # that, in an invocation where only linking and no compilation is
+        # taking place, the -pthread option has no effect ("argument unused
+        # during compilation").  They expect -pthread to be passed in only
+        # when source code is being compiled.
+        #
+        # Problem is, this is at odds with the way Automake and most other
+        # C build frameworks function, which is that the same flags used in
+        # compilation (CFLAGS) are also used in linking.  Many systems
+        # supported by AX_PTHREAD require exactly this for POSIX threads
+        # support, and in fact it is often not straightforward to specify a
+        # flag that is used only in the compilation phase and not in
+        # linking.  Such a scenario is extremely rare in practice.
+        #
+        # Even though use of the -pthread flag in linking would only print
+        # a warning, this can be a nuisance for well-run software projects
+        # that build with -Werror.  So if the active version of Clang has
+        # this misfeature, we search for an option to squash it.
+
+        AC_CACHE_CHECK([whether Clang needs flag to prevent "argument unused" warning when linking with -pthread],
+            [ax_cv_PTHREAD_CLANG_NO_WARN_FLAG],
+            [ax_cv_PTHREAD_CLANG_NO_WARN_FLAG=unknown
+             # Create an alternate version of $ac_link that compiles and
+             # links in two steps (.c -> .o, .o -> exe) instead of one
+             # (.c -> exe), because the warning occurs only in the second
+             # step
+             ax_pthread_save_ac_link="$ac_link"
+             ax_pthread_sed='s/conftest\.\$ac_ext/conftest.$ac_objext/g'
+             ax_pthread_link_step=`$as_echo "$ac_link" | sed "$ax_pthread_sed"`
+             ax_pthread_2step_ac_link="($ac_compile) && (echo ==== >&5) && ($ax_pthread_link_step)"
+             ax_pthread_save_CFLAGS="$CFLAGS"
+             for ax_pthread_try in '' -Qunused-arguments -Wno-unused-command-line-argument unknown; do
+                AS_IF([test "x$ax_pthread_try" = "xunknown"], [break])
+                CFLAGS="-Werror -Wunknown-warning-option $ax_pthread_try -pthread $ax_pthread_save_CFLAGS"
+                ac_link="$ax_pthread_save_ac_link"
+                AC_LINK_IFELSE([AC_LANG_SOURCE([[int main(void){return 0;}]])],
+                    [ac_link="$ax_pthread_2step_ac_link"
+                     AC_LINK_IFELSE([AC_LANG_SOURCE([[int main(void){return 0;}]])],
+                         [break])
+                    ])
+             done
+             ac_link="$ax_pthread_save_ac_link"
+             CFLAGS="$ax_pthread_save_CFLAGS"
+             AS_IF([test "x$ax_pthread_try" = "x"], [ax_pthread_try=no])
+             ax_cv_PTHREAD_CLANG_NO_WARN_FLAG="$ax_pthread_try"
+            ])
+
+        case "$ax_cv_PTHREAD_CLANG_NO_WARN_FLAG" in
+                no | unknown) ;;
+                *) PTHREAD_CFLAGS="$ax_cv_PTHREAD_CLANG_NO_WARN_FLAG $PTHREAD_CFLAGS" ;;
+        esac
+
+fi # $ax_pthread_clang = yes
+
+if test "x$ax_pthread_ok" = "xno"; then
+for ax_pthread_try_flag in $ax_pthread_flags; do
+
+        case $ax_pthread_try_flag in
+                none)
+                AC_MSG_CHECKING([whether pthreads work without any flags])
+                ;;
+
+                -mt,pthread)
+                AC_MSG_CHECKING([whether pthreads work with -mt -lpthread])
+                PTHREAD_CFLAGS="-mt"
+                PTHREAD_LIBS="-lpthread"
+                ;;
+
+                -*)
+                AC_MSG_CHECKING([whether pthreads work with $ax_pthread_try_flag])
+                PTHREAD_CFLAGS="$ax_pthread_try_flag"
+                ;;
+
+                pthread-config)
+                AC_CHECK_PROG([ax_pthread_config], [pthread-config], [yes], [no])
+                AS_IF([test "x$ax_pthread_config" = "xno"], [continue])
+                PTHREAD_CFLAGS="`pthread-config --cflags`"
+                PTHREAD_LIBS="`pthread-config --ldflags` `pthread-config --libs`"
+                ;;
+
+                *)
+                AC_MSG_CHECKING([for the pthreads library -l$ax_pthread_try_flag])
+                PTHREAD_LIBS="-l$ax_pthread_try_flag"
+                ;;
+        esac
+
+        ax_pthread_save_CFLAGS="$CFLAGS"
+        ax_pthread_save_LIBS="$LIBS"
+        CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+        LIBS="$PTHREAD_LIBS $LIBS"
+
+        # Check for various functions.  We must include pthread.h,
+        # since some functions may be macros.  (On the Sequent, we
+        # need a special flag -Kthread to make this header compile.)
+        # We check for pthread_join because it is in -lpthread on IRIX
+        # while pthread_create is in libc.  We check for pthread_attr_init
+        # due to DEC craziness with -lpthreads.  We check for
+        # pthread_cleanup_push because it is one of the few pthread
+        # functions on Solaris that doesn't have a non-functional libc stub.
+        # We try pthread_create on general principles.
+
+        AC_LINK_IFELSE([AC_LANG_PROGRAM([#include <pthread.h>
+#                       if $ax_pthread_check_cond
+#                        error "$ax_pthread_check_macro must be defined"
+#                       endif
+                        static void routine(void *a) { a = 0; }
+                        static void *start_routine(void *a) { return a; }],
+                       [pthread_t th; pthread_attr_t attr;
+                        pthread_create(&th, 0, start_routine, 0);
+                        pthread_join(th, 0);
+                        pthread_attr_init(&attr);
+                        pthread_cleanup_push(routine, 0);
+                        pthread_cleanup_pop(0) /* ; */])],
+            [ax_pthread_ok=yes],
+            [])
+
+        CFLAGS="$ax_pthread_save_CFLAGS"
+        LIBS="$ax_pthread_save_LIBS"
+
+        AC_MSG_RESULT([$ax_pthread_ok])
+        AS_IF([test "x$ax_pthread_ok" = "xyes"], [break])
+
+        PTHREAD_LIBS=""
+        PTHREAD_CFLAGS=""
+done
+fi
+
+# Various other checks:
+if test "x$ax_pthread_ok" = "xyes"; then
+        ax_pthread_save_CFLAGS="$CFLAGS"
+        ax_pthread_save_LIBS="$LIBS"
+        CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
+        LIBS="$PTHREAD_LIBS $LIBS"
+
+        # Detect AIX lossage: JOINABLE attribute is called UNDETACHED.
+        AC_CACHE_CHECK([for joinable pthread attribute],
+            [ax_cv_PTHREAD_JOINABLE_ATTR],
+            [ax_cv_PTHREAD_JOINABLE_ATTR=unknown
+             for ax_pthread_attr in PTHREAD_CREATE_JOINABLE PTHREAD_CREATE_UNDETACHED; do
+                 AC_LINK_IFELSE([AC_LANG_PROGRAM([#include <pthread.h>],
+                                                 [int attr = $ax_pthread_attr; return attr /* ; */])],
+                                [ax_cv_PTHREAD_JOINABLE_ATTR=$ax_pthread_attr; break],
+                                [])
+             done
+            ])
+        AS_IF([test "x$ax_cv_PTHREAD_JOINABLE_ATTR" != "xunknown" && \
+               test "x$ax_cv_PTHREAD_JOINABLE_ATTR" != "xPTHREAD_CREATE_JOINABLE" && \
+               test "x$ax_pthread_joinable_attr_defined" != "xyes"],
+              [AC_DEFINE_UNQUOTED([PTHREAD_CREATE_JOINABLE],
+                                  [$ax_cv_PTHREAD_JOINABLE_ATTR],
+                                  [Define to necessary symbol if this constant
+                                   uses a non-standard name on your system.])
+               ax_pthread_joinable_attr_defined=yes
+              ])
+
+        AC_CACHE_CHECK([whether more special flags are required for pthreads],
+            [ax_cv_PTHREAD_SPECIAL_FLAGS],
+            [ax_cv_PTHREAD_SPECIAL_FLAGS=no
+             case $host_os in
+             solaris*)
+             ax_cv_PTHREAD_SPECIAL_FLAGS="-D_POSIX_PTHREAD_SEMANTICS"
+             ;;
+             esac
+            ])
+        AS_IF([test "x$ax_cv_PTHREAD_SPECIAL_FLAGS" != "xno" && \
+               test "x$ax_pthread_special_flags_added" != "xyes"],
+              [PTHREAD_CFLAGS="$ax_cv_PTHREAD_SPECIAL_FLAGS $PTHREAD_CFLAGS"
+               ax_pthread_special_flags_added=yes])
+
+        AC_CACHE_CHECK([for PTHREAD_PRIO_INHERIT],
+            [ax_cv_PTHREAD_PRIO_INHERIT],
+            [AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <pthread.h>]],
+                                             [[int i = PTHREAD_PRIO_INHERIT;]])],
+                            [ax_cv_PTHREAD_PRIO_INHERIT=yes],
+                            [ax_cv_PTHREAD_PRIO_INHERIT=no])
+            ])
+        AS_IF([test "x$ax_cv_PTHREAD_PRIO_INHERIT" = "xyes" && \
+               test "x$ax_pthread_prio_inherit_defined" != "xyes"],
+              [AC_DEFINE([HAVE_PTHREAD_PRIO_INHERIT], [1], [Have PTHREAD_PRIO_INHERIT.])
+               ax_pthread_prio_inherit_defined=yes
+              ])
+
+        CFLAGS="$ax_pthread_save_CFLAGS"
+        LIBS="$ax_pthread_save_LIBS"
+
+        # More AIX lossage: compile with *_r variant
+        if test "x$GCC" != "xyes"; then
+            case $host_os in
+                aix*)
+                AS_CASE(["x/$CC"],
+                    [x*/c89|x*/c89_128|x*/c99|x*/c99_128|x*/cc|x*/cc128|x*/xlc|x*/xlc_v6|x*/xlc128|x*/xlc128_v6],
+                    [#handle absolute path differently from PATH based program lookup
+                     AS_CASE(["x$CC"],
+                         [x/*],
+                         [AS_IF([AS_EXECUTABLE_P([${CC}_r])],[PTHREAD_CC="${CC}_r"])],
+                         [AC_CHECK_PROGS([PTHREAD_CC],[${CC}_r],[$CC])])])
+                ;;
+            esac
+        fi
+fi
+
+test -n "$PTHREAD_CC" || PTHREAD_CC="$CC"
+
+AC_SUBST([PTHREAD_LIBS])
+AC_SUBST([PTHREAD_CFLAGS])
+AC_SUBST([PTHREAD_CC])
+
+# Finally, execute ACTION-IF-FOUND/ACTION-IF-NOT-FOUND:
+if test "x$ax_pthread_ok" = "xyes"; then
+        ifelse([$1],,[AC_DEFINE([HAVE_PTHREAD],[1],[Define if you have POSIX threads libraries and header files.])],[$1])
+        :
+else
+        ax_pthread_ok=no
+        $2
+fi
+AC_LANG_POP
+])dnl AX_PTHREAD
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/libtool.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/libtool.m4
new file mode 100644
index 0000000..44e0ecf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/libtool.m4
@@ -0,0 +1,7982 @@
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+])
+
+# serial 57 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+       [m4_default([$3],
+		   [m4_fatal([Libtool version $1 or higher is required],
+		             63)])],
+       [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+  *\ * | *\	*)
+    AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# Calculate cc_basename.  Skip known compiler wrappers and cross-prefix.
+m4_defun([_LT_CC_BASENAME],
+[for cc_temp in $1""; do
+  case $cc_temp in
+    compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+    distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl
+dnl
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    _LT_PATH_MAGIC
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from `configure', and `config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool.  Notably,
+# `config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain="$ac_aux_dir/ltmain.sh"
+])# _LT_PROG_LTMAIN
+
+
+## ------------------------------------- ##
+## Accumulate code for creating libtool. ##
+## ------------------------------------- ##
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the `libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+              [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+## ------------------------ ##
+## FIXME: Eliminate VARNAME ##
+## ------------------------ ##
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME.  Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+    [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+	[m4_ifval([$1], [$1], [$2])])
+    lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+    m4_ifval([$4],
+	[lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+    lt_dict_add_subkey([lt_decl_dict], [$2],
+	[tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+  [0], [m4_fatal([$0: too few arguments: $#])],
+  [1], [m4_fatal([$0: too few arguments: $#: $1])],
+  [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+  [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+  [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+    m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+    m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+     m4_if([$2], [],
+	   m4_quote(lt_decl_varnames),
+	m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+			lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to `config.status' so that its
+# declaration there will have the same value as in `configure'.  VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly.  In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+#    <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+    [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags="_LT_TAGS"dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+#    # Some comment about what VAR is for.
+#    visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+					   [description])))[]dnl
+m4_pushdef([_libtool_name],
+    m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+    [0], [_libtool_name=[$]$1],
+    [1], [_libtool_name=$lt_[]$1],
+    [2], [_libtool_name=$lt_[]$1],
+    [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
+# script.  Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+    m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS.  Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into `config.status', and then the shell code to quote escape them in
+# for loops in `config.status'.  Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+	dnl If the libtool generation code has been placed in $CONFIG_LT,
+	dnl instead of duplicating it all over again into config.status,
+	dnl then we will have config.status run $CONFIG_LT later, so it
+	dnl needs to know what name is stored there:
+        [AC_CONFIG_COMMANDS([libtool],
+            [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+    dnl If the libtool generation code is destined for config.status,
+    dnl expand the accumulated commands and init code now:
+    [AC_CONFIG_COMMANDS([libtool],
+        [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test $lt_write_fail = 0 && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+\`$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+  -q, --quiet     do not print progress messages
+  -d, --debug     don't remove temporary files
+
+Report bugs to <bug-libtool@gnu.org>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2011 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test $[#] != 0
+do
+  case $[1] in
+    --version | --v* | -V )
+      echo "$lt_cl_version"; exit 0 ;;
+    --help | --h* | -h )
+      echo "$lt_cl_help"; exit 0 ;;
+    --debug | --d* | -d )
+      debug=: ;;
+    --quiet | --q* | --silent | --s* | -q )
+      lt_cl_silent=: ;;
+
+    -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try \`$[0] --help' for more information.]) ;;
+
+    *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try \`$[0] --help' for more information.]) ;;
+  esac
+  shift
+done
+
+if $lt_cl_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure.  Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test "$silent" = yes &&
+  lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars.  Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+  m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+  m4_if(_LT_TAG, [C], [
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+  _LT_PROG_LTMAIN
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+  _LT_PROG_REPLACE_SHELLFNS
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+#    autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+  [C],			[_LT_LANG(C)],
+  [C++],		[_LT_LANG(CXX)],
+  [Go],			[_LT_LANG(GO)],
+  [Java],		[_LT_LANG(GCJ)],
+  [Fortran 77],		[_LT_LANG(F77)],
+  [Fortran],		[_LT_LANG(FC)],
+  [Windows Resource],	[_LT_LANG(RC)],
+  [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+    [_LT_LANG($1)],
+    [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+  [LT_SUPPORTED_TAG([$1])dnl
+  m4_append([_LT_TAGS], [$1 ])dnl
+  m4_define([_LT_LANG_]$1[_enabled], [])dnl
+  _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+m4_ifndef([AC_PROG_GO], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_GO.  When it is available in    #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+m4_defun([AC_PROG_GO],
+[AC_LANG_PUSH(Go)dnl
+AC_ARG_VAR([GOC],     [Go compiler command])dnl
+AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+AC_CHECK_TOOL(GOC, gccgo)
+if test -z "$GOC"; then
+  if test -n "$ac_tool_prefix"; then
+    AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo])
+  fi
+fi
+if test -z "$GOC"; then
+  AC_CHECK_PROG(GOC, gccgo, gccgo, false)
+fi
+])#m4_defun
+])#m4_ifndef
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+  [LT_LANG(CXX)],
+  [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+  [LT_LANG(F77)],
+  [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+  [LT_LANG(FC)],
+  [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+  [LT_LANG(GCJ)],
+  [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+    [LT_LANG(GCJ)],
+    [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+      [LT_LANG(GCJ)],
+      [m4_ifdef([AC_PROG_GCJ],
+	[m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([A][M_PROG_GCJ],
+	[m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([LT_PROG_GCJ],
+	[m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([AC_PROG_GO],
+  [LT_LANG(GO)],
+  [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+  [LT_LANG(RC)],
+  [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+  case $host_os in
+    rhapsody* | darwin*)
+    AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+    AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+    AC_CHECK_TOOL([LIPO], [lipo], [:])
+    AC_CHECK_TOOL([OTOOL], [otool], [:])
+    AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+    _LT_DECL([], [DSYMUTIL], [1],
+      [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+    _LT_DECL([], [NMEDIT], [1],
+      [Tool to change global to local symbols on Mac OS X])
+    _LT_DECL([], [LIPO], [1],
+      [Tool to manipulate fat objects and archives on Mac OS X])
+    _LT_DECL([], [OTOOL], [1],
+      [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+    _LT_DECL([], [OTOOL64], [1],
+      [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+    AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+      [lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	# If there is a non-empty error log, and "single_module"
+	# appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	# Otherwise, if the output was created with a 0 exit code from
+	# the compiler, it worked.
+	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi])
+
+    AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+      [lt_cv_ld_exported_symbols_list],
+      [lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+	[lt_cv_ld_exported_symbols_list=yes],
+	[lt_cv_ld_exported_symbols_list=no])
+	LDFLAGS="$save_LDFLAGS"
+    ])
+
+    AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+      [lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+      echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+      $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+      echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+      $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+    ])
+    case $host_os in
+    rhapsody* | darwin1.[[012]])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[[012]]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES([TAG])
+# ---------------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+  m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_automatic, $1)=yes
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+    m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes],
+                  [FC],  [_LT_TAGVAR(compiler_needs_object, $1)=yes])
+  else
+    _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+  fi
+  _LT_TAGVAR(link_all_deplibs, $1)=yes
+  _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+    m4_if([$1], [CXX],
+[   if test "$lt_cv_apple_cc_single_mod" != "yes"; then
+      _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
+      _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
+    fi
+],[])
+  else
+  _LT_TAGVAR(ld_shlibs, $1)=no
+  fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+  [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+  lt_aix_libpath_sed='[
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }]'
+  _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi],[])
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
+  fi
+  ])
+  aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script which will find a shell with a builtin
+# printf (which we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*" 
+}
+
+case "$ECHO" in
+  printf*) AC_MSG_RESULT([printf]) ;;
+  print*) AC_MSG_RESULT([print -r]) ;;
+  *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+  test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test "X`printf %s $ECHO`" = "X$ECHO" \
+      || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[  --with-sysroot[=DIR] Search for dependent libraries within DIR
+                        (or the compiler's sysroot if not specified).],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted.  We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+   if test "$GCC" = yes; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   AC_MSG_RESULT([${with_sysroot}])
+   AC_MSG_ERROR([The sysroot must be an absolute path.])
+   ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and in which our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+  [AS_HELP_STRING([--disable-libtool-lock],
+    [avoid locking (might break parallel builds)])])
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+    [AC_LANG_PUSH(C)
+     AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+     AC_LANG_POP])
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD="${LD-ld}_sol2"
+        fi
+        ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+  [lt_cv_ar_at_file=no
+   AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+     [echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+      AC_TRY_EVAL([lt_ar_try])
+      if test "$ac_status" -eq 0; then
+	# Ensure the archiver fails upon bogus file names.
+	rm -f conftest.$ac_objext libconftest.a
+	AC_TRY_EVAL([lt_ar_try])
+	if test "$ac_status" -ne 0; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+     ])
+  ])
+
+if test "x$lt_cv_ar_at_file" = xno; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+  [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+    [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+    [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+    [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#		[OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$3"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       $2=yes
+     fi
+   fi
+   $RM conftest*
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$5], , :, [$5])
+else
+    m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#                  [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $3"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&AS_MESSAGE_LOG_FD
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         $2=yes
+       fi
+     else
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$4], , :, [$4])
+else
+    m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+  i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[	 ]]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+])
+if test -n $lt_cv_sys_max_cmd_len ; then
+  AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+  AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+    [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+#                      ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "$cross_compiling" = yes; then :
+  [$4]
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}]
+_LT_EOF
+  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) $1 ;;
+      x$lt_dlneed_uscore) $2 ;;
+      x$lt_dlunknown|x*) $3 ;;
+    esac
+  else :
+    # compilation failed
+    $3
+  fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ])
+    ;;
+
+  *)
+    AC_CHECK_FUNC([shl_load],
+	  [lt_cv_dlopen="shl_load"],
+      [AC_CHECK_LIB([dld], [shl_load],
+	    [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
+	[AC_CHECK_FUNC([dlopen],
+	      [lt_cv_dlopen="dlopen"],
+	  [AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
+	    [AC_CHECK_LIB([svld], [dlopen],
+		  [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
+	      [AC_CHECK_LIB([dld], [dld_link],
+		    [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
+	      ])
+	    ])
+	  ])
+	])
+      ])
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    AC_CACHE_CHECK([whether a program can dlopen itself],
+	  lt_cv_dlopen_self, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+	    lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+    ])
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+	  lt_cv_dlopen_self_static, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+	    lt_cv_dlopen_self_static=no,  lt_cv_dlopen_self_static=cross)
+      ])
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+	 [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+	 [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+	 [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+     fi
+   fi
+   chmod u+w . 2>&AS_MESSAGE_LOG_FD
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+	[Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links="nottested"
+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  AC_MSG_CHECKING([if we can lock with hard links])
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  AC_MSG_RESULT([$hard_links])
+  if test "$hard_links" = no; then
+    AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+         [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
+  [Define to the sub-directory in which libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+   test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+   test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
+     test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
+    # Linking always hardcodes the temporary library directory.
+    _LT_TAGVAR(hardcode_action, $1)=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    _LT_TAGVAR(hardcode_action, $1)=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
+   test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+    [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      AC_MSG_RESULT([yes])
+    else
+      AC_MSG_RESULT([no])
+    fi
+    ;;
+  *)
+    AC_MSG_RESULT([no])
+    ;;
+  esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+	[], [
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+  if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([[A-Za-z]]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[[4-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[[01]] | aix4.[[01]].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[[45]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+m4_if([$1], [],[
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+    library_names_spec='${libname}.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec="$LIB"
+      if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[[23]].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+  freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[[3-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux # correct to gnu/linux during the next big refactor
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+    [lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+	 LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+      [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+	 [lt_cv_shlibpath_overrides_runpath=yes])])
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+    ])
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[[89]] | openbsd2.[[89]].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+    [Variables whose values should be saved in libtool wrapper scripts and
+    restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+    [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0],  [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+    [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+    [[List of archive names.  First name is the real one, the rest are links.
+    The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+    [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+    [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+    [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+    [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+    [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+    [[As "finish_cmds", except a single script fragment to be evaled but
+    not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+    [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+    [Compile-time system search path for libraries])
+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
+    [Run-time system search path for libraries])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program which can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] |  ?:[\\/]*])
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word.  This closes a longstanding sh security hole.
+  ac_dummy="m4_if([$2], , $PATH, [$2])"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/$1; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/$1"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac])
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  AC_MSG_RESULT($MAGIC_CMD)
+else
+  AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+	 [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program which can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+  else
+    MAGIC_CMD=:
+  fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+    [AS_HELP_STRING([--with-gnu-ld],
+	[assume the C compiler uses GNU ld @<:@default=no@:>@])],
+    [test "$withval" = no || with_gnu_ld=yes],
+    [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  AC_MSG_CHECKING([for ld used by $CC])
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [[\\/]]* | ?:[[\\/]]*)
+      re_direlt='/[[^/]][[^/]]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  AC_MSG_CHECKING([for GNU ld])
+else
+  AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi])
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  AC_MSG_RESULT($LD)
+else
+  AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+#   -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+  lt_cv_ld_reload_flag,
+  [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test "$GCC" != yes; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[[45]]*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[[3-9]]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+    [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+    [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+    [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+    [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi])
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+  AC_SUBST([DUMPBIN])
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+  [lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+  cat conftest.out >&AS_MESSAGE_LOG_FD
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh
+  # decide which to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+  ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+    [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
+  [lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*])
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+  MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+  # These system don't have libm, or don't need it
+  ;;
+*-ncr-sysv4.3*)
+  AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
+  AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+  ;;
+*)
+  AC_CHECK_LIB(m, cos, LIBM="-lm")
+  ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+  esac
+
+  _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+    lt_cv_prog_compiler_rtti_exceptions,
+    [-fno-rtti -fno-exceptions], [],
+    [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+	[Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[[BCDT]]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[[ABCDGISTW]]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[[ABCDEGRST]]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[[BCDEGRST]]'
+  ;;
+osf*)
+  symcode='[[BCDEGQRST]]'
+  ;;
+solaris*)
+  symcode='[[BDRT]]'
+  ;;
+sco3.2v5*)
+  symcode='[[DT]]'
+  ;;
+sysv4.2uw2*)
+  symcode='[[DT]]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[[ABDT]]'
+  ;;
+sysv4)
+  symcode='[[DFNSTU]]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK ['"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx]"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[	 ]]\($symcode$symcode*\)[[	 ]][[	 ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT@&t@_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT@&t@_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_globsym_save_LIBS=$LIBS
+	  lt_globsym_save_CFLAGS=$CFLAGS
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+	  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS=$lt_globsym_save_LIBS
+	  CFLAGS=$lt_globsym_save_CFLAGS
+	else
+	  echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  AC_MSG_RESULT(failed)
+else
+  AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+    [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+    [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_c_name_address],
+    [lt_cv_sys_global_symbol_to_c_name_address], [1],
+    [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+    [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+    [Transform the output of nm in a C name address pair when lib prefix is needed])
+_LT_DECL([], [nm_file_list_spec], [1],
+    [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+  # C++ specific cases for pic, static, wl, etc.
+  if test "$GXX" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+    aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+    mingw* | cygwin* | os2* | pw32* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+    *djgpp*)
+      # DJGPP does not support shared libraries at all
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+      ;;
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+    *qnx* | *nto*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+  else
+    case $host_os in
+      aix[[4-9]]*)
+	# All AIX code is PIC.
+	if test "$host_cpu" = ia64; then
+	  # AIX 5 now supports IA64 processor
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	else
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+	fi
+	;;
+      chorus*)
+	case $cc_basename in
+	cxch68*)
+	  # Green Hills C++ Compiler
+	  # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+	  ;;
+	esac
+	;;
+      mingw* | cygwin* | os2* | pw32* | cegcc*)
+	# This hack is so that the source file can tell whether it is being
+	# built for inclusion in a dll (and should export symbols for example).
+	m4_if([$1], [GCJ], [],
+	  [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+	;;
+      dgux*)
+	case $cc_basename in
+	  ec++*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  ghcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      freebsd* | dragonfly*)
+	# FreeBSD uses GNU C++
+	;;
+      hpux9* | hpux10* | hpux11*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    if test "$host_cpu" != ia64; then
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	    fi
+	    ;;
+	  aCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    case $host_cpu in
+	    hppa*64*|ia64*)
+	      # +Z the default
+	      ;;
+	    *)
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	      ;;
+	    esac
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      interix*)
+	# This is c89, which is MS Visual C++ (no shared libs)
+	# Anyone wants to do a port?
+	;;
+      irix5* | irix6* | nonstopux*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    # CC pic flag -KPIC is the default.
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+	case $cc_basename in
+	  KCC*)
+	    # KAI C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    ;;
+	  ecpc* )
+	    # old Intel C++ for x86_64 which still supported -KPIC.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  icpc* )
+	    # Intel C++, used to be incompatible with GCC.
+	    # ICC 10 doesn't accept -KPIC any more.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  pgCC* | pgcpp*)
+	    # Portland Group C++ compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  cxx*)
+	    # Compaq C++
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+	    # IBM XL 8.0, 9.0 on PPC and BlueGene
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+      lynxos*)
+	;;
+      m88k*)
+	;;
+      mvs*)
+	case $cc_basename in
+	  cxx*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      netbsd*)
+	;;
+      *qnx* | *nto*)
+        # QNX uses GNU C++, but need to define -shared option too, otherwise
+        # it will coredump.
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+        ;;
+      osf3* | osf4* | osf5*)
+	case $cc_basename in
+	  KCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    ;;
+	  RCC*)
+	    # Rational C++ 2.4.1
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  cxx*)
+	    # Digital/Compaq C++
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      psos*)
+	;;
+      solaris*)
+	case $cc_basename in
+	  CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	    ;;
+	  gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sunos4*)
+	case $cc_basename in
+	  CC*)
+	    # Sun C++ 4.x
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  lcc*)
+	    # Lucid
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	esac
+	;;
+      tandem*)
+	case $cc_basename in
+	  NCC*)
+	    # NonStop-UX NCC 3.20
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      vxworks*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+	;;
+    esac
+  fi
+],
+[
+  if test "$GCC" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+      if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      else
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC (with -KPIC) is the default.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+	;;
+      nagfor*)
+	# NAG Fortran compiler
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      ccc*)
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+        # All Alpha code is PIC.
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+	  ;;
+	*Sun\ F* | *Sun*Fortran*)
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  ;;
+        *Intel*\ [[CF]]*Compiler*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	  ;;
+	*Portland\ Group*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # All OSF/1 code is PIC.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    rdos*)
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    unicos*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+    esac
+  fi
+])
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+    ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+    ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+  _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+    [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+    [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+    [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+     "" | " "*) ;;
+     *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+     esac],
+    [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+     _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+	[Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+	[How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+  _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+  $lt_tmp_static_flag,
+  [],
+  [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+	[Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  case $host_os in
+  aix[[4-9]]*)
+    # If we're using GNU nm, then we don't want the "-C" option.
+    # -C means demangle to AIX nm, but means don't demangle with GNU nm
+    # Also, AIX nm treats weak defined symbols like other global defined
+    # symbols, whereas GNU nm marks them as "W".
+    if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    else
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    fi
+    ;;
+  pw32*)
+    _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+    ;;
+  cygwin* | mingw* | cegcc*)
+    case $cc_basename in
+    cl*)
+      _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+      ;;
+    *)
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+      ;;
+    esac
+    ;;
+  *)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+    ;;
+  esac
+], [
+  runpath_var=
+  _LT_TAGVAR(allow_undefined_flag, $1)=
+  _LT_TAGVAR(always_export_symbols, $1)=no
+  _LT_TAGVAR(archive_cmds, $1)=
+  _LT_TAGVAR(archive_expsym_cmds, $1)=
+  _LT_TAGVAR(compiler_needs_object, $1)=no
+  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+  _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(hardcode_automatic, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+  _LT_TAGVAR(hardcode_minus_L, $1)=no
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  _LT_TAGVAR(inherit_rpath, $1)=no
+  _LT_TAGVAR(link_all_deplibs, $1)=unknown
+  _LT_TAGVAR(module_cmds, $1)=
+  _LT_TAGVAR(module_expsym_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+  _LT_TAGVAR(thread_safe_flag_spec, $1)=
+  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  _LT_TAGVAR(include_expsyms, $1)=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  _LT_TAGVAR(ld_shlibs, $1)=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+	  *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[[3-9]]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	_LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+      # as there is no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=no
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    haiku*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    interix[[3-9]]*)
+      _LT_TAGVAR(hardcode_direct, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=' $pic_flag'
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+	  tmp_sharedflag='--shared' ;;
+	xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	_LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+    esac
+
+    if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
+      runpath_var=
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	_LT_TAGVAR(hardcode_direct, $1)=unsupported
+      fi
+      ;;
+
+    aix[[4-9]]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      _LT_TAGVAR(archive_cmds, $1)=''
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[[012]]|aix4.[[012]].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	_LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX([$1])
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	  _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 _LT_SYS_MODULE_PATH_AIX([$1])
+	 _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	  fi
+	  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[[45]]*)
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+	# Native MSVC
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	_LT_TAGVAR(always_export_symbols, $1)=yes
+	_LT_TAGVAR(file_list_spec, $1)='@'
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	  else
+	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	  fi~
+	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	  linknames='
+	# The linker will not automatically build a static lib if we build a DLL.
+	# _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	_LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+	_LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+	# Don't use ranlib
+	_LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	_LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	  lt_tool_outputfile="@TOOL_OUTPUT@"~
+	  case $lt_outputfile in
+	    *.exe|*.EXE) ;;
+	    *)
+	      lt_outputfile="$lt_outputfile.exe"
+	      lt_tool_outputfile="$lt_tool_outputfile.exe"
+	      ;;
+	  esac~
+	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	    $RM "$lt_outputfile.manifest";
+	  fi'
+	;;
+      *)
+	# Assume MSVC wrapper
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+	# The linker will automatically build a .lib file if we build a DLL.
+	_LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	# FIXME: Should let the user specify the lib program.
+	_LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+      _LT_DARWIN_LINKER_FEATURES($1)
+      ;;
+
+    dgux*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	_LT_TAGVAR(hardcode_minus_L, $1)=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	m4_if($1, [], [
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  _LT_LINKER_OPTION([if $CC understands -b],
+	    _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+	    [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+	    [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+	  [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  ;;
+	*)
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+	# This should be the same for all languages, so no per-tag cache variable.
+	AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+	  [lt_cv_irix_exported_symbol],
+	  [save_LDFLAGS="$LDFLAGS"
+	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+	   AC_LINK_IFELSE(
+	     [AC_LANG_SOURCE(
+	        [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+			      [C++], [[int foo (void) { return 0; }]],
+			      [Fortran 77], [[
+      subroutine foo
+      end]],
+			      [Fortran], [[
+      subroutine foo
+      end]])])],
+	      [lt_cv_irix_exported_symbol=yes],
+	      [lt_cv_irix_exported_symbol=no])
+           LDFLAGS="$save_LDFLAGS"])
+	if test "$lt_cv_irix_exported_symbol" = yes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+	fi
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(inherit_rpath, $1)=yes
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
+	     _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	     ;;
+	   *)
+	     _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      case $host_os in
+      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+        ;;
+	motorola)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4.3*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	_LT_TAGVAR(ld_shlibs, $1)=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+    [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+  # Assume -lc should be added
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $_LT_TAGVAR(archive_cmds, $1) in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+	[lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+	[$RM conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+	  pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+	  _LT_TAGVAR(allow_undefined_flag, $1)=
+	  if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+	  then
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	  else
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  fi
+	  _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM conftest*
+	])
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+    [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+    [enable_shared_with_static_runtimes], [0],
+    [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+    [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+    [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+    [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+    [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+    [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+    [Commands used to build a loadable module if different from building
+    a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+    [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+    [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+    [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+    [Flag to hardcode $libdir into a binary during linking.
+    This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+    [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary and the resulting library dependency is
+    "absolute", i.e impossible to change by setting ${shlibpath_var} if the
+    library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+    [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+    [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+    [Set to "yes" if building a shared library automatically hardcodes DIR
+    into the library and all subsequent libraries and executables linked
+    against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+    [Set to yes if linker adds runtime paths of dependent libraries
+    to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+    [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+    [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+    [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+    [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+    [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+    [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+    [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+    [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl    [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC="$CC"
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_SYS_DYNAMIC_LINKER($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+  LT_SYS_DLOPEN_SELF
+  _LT_CMD_STRIPLIB
+
+  # Report which library types will actually be built
+  AC_MSG_CHECKING([if libtool supports shared libraries])
+  AC_MSG_RESULT([$can_build_shared])
+
+  AC_MSG_CHECKING([whether to build shared libraries])
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[[4-9]]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  AC_MSG_RESULT([$enable_shared])
+
+  AC_MSG_CHECKING([whether to build static libraries])
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  AC_MSG_RESULT([$enable_static])
+
+  _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC="$lt_save_CC"
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+    ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+    (test "X$CXX" != "Xg++"))) ; then
+  AC_PROG_CXXCPP
+else
+  _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_caught_CXX_error" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="int some_variable = 0;"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_CFLAGS=$CFLAGS
+  lt_save_LD=$LD
+  lt_save_GCC=$GCC
+  GCC=$GXX
+  lt_save_with_gnu_ld=$with_gnu_ld
+  lt_save_path_LD=$lt_cv_path_LD
+  if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+    lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+  else
+    $as_unset lt_cv_prog_gnu_ld
+  fi
+  if test -n "${lt_cv_path_LDCXX+set}"; then
+    lt_cv_path_LD=$lt_cv_path_LDCXX
+  else
+    $as_unset lt_cv_path_LD
+  fi
+  test -z "${LDCXX+set}" || LD=$LDCXX
+  CC=${CXX-"c++"}
+  CFLAGS=$CXXFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    # We don't want -fno-exception when compiling C++ code, so set the
+    # no_builtin_flag separately
+    if test "$GXX" = yes; then
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+    else
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+    fi
+
+    if test "$GXX" = yes; then
+      # Set up default GNU C++ configuration
+
+      LT_PATH_LD
+
+      # Check if GNU C++ uses GNU ld as the underlying linker, since the
+      # archiving commands below assume that GNU ld is being used.
+      if test "$with_gnu_ld" = yes; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+        # If archive_cmds runs LD, not CC, wlarc should be empty
+        # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+        #     investigate it a little bit more. (MM)
+        wlarc='${wl}'
+
+        # ancient GNU ld didn't support --whole-archive et. al.
+        if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+	  $GREP 'no-whole-archive' > /dev/null; then
+          _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+        else
+          _LT_TAGVAR(whole_archive_flag_spec, $1)=
+        fi
+      else
+        with_gnu_ld=no
+        wlarc=
+
+        # A generic and very simple default shared library creation
+        # command for GNU C++ for the case where it uses the native
+        # linker, instead of GNU ld.  If possible, this setting should
+        # overridden to take advantage of the native linker features on
+        # the platform it is being used on.
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+      fi
+
+      # Commands to make compiler produce verbose output that lists
+      # what "hidden" libraries, object files and flags are used when
+      # linking a shared library.
+      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+    else
+      GXX=no
+      with_gnu_ld=no
+      wlarc=
+    fi
+
+    # PORTME: fill in a description of your system's C++ link characteristics
+    AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+    _LT_TAGVAR(ld_shlibs, $1)=yes
+    case $host_os in
+      aix3*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+      aix[[4-9]]*)
+        if test "$host_cpu" = ia64; then
+          # On IA64, the linker does run time linking by default, so we don't
+          # have to do anything special.
+          aix_use_runtimelinking=no
+          exp_sym_flag='-Bexport'
+          no_entry_flag=""
+        else
+          aix_use_runtimelinking=no
+
+          # Test if we are trying to use run time linking or normal
+          # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+          # need to do runtime linking.
+          case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	    for ld_flag in $LDFLAGS; do
+	      case $ld_flag in
+	      *-brtl*)
+	        aix_use_runtimelinking=yes
+	        break
+	        ;;
+	      esac
+	    done
+	    ;;
+          esac
+
+          exp_sym_flag='-bexport'
+          no_entry_flag='-bnoentry'
+        fi
+
+        # When large executables or shared objects are built, AIX ld can
+        # have problems creating the table of contents.  If linking a library
+        # or program results in "error TOC overflow" add -mminimal-toc to
+        # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+        # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+        _LT_TAGVAR(archive_cmds, $1)=''
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+        if test "$GXX" = yes; then
+          case $host_os in aix4.[[012]]|aix4.[[012]].*)
+          # We only want to do this on AIX 4.2 and lower, the check
+          # below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	     strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	    # We have reworked collect2
+	    :
+	  else
+	    # We have old collect2
+	    _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	    # It fails to find uninstalled libraries when the uninstalled
+	    # path is not listed in the libpath.  Setting hardcode_minus_L
+	    # to unsupported forces relinking
+	    _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+          esac
+          shared_flag='-shared'
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag="$shared_flag "'${wl}-G'
+	  fi
+        else
+          # not using gcc
+          if test "$host_cpu" = ia64; then
+	  # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	  # chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+          else
+	    if test "$aix_use_runtimelinking" = yes; then
+	      shared_flag='${wl}-G'
+	    else
+	      shared_flag='${wl}-bM:SRE'
+	    fi
+          fi
+        fi
+
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+        # It seems that -bexpall does not export symbols beginning with
+        # underscore (_), so it is better to generate a list of symbols to
+	# export.
+        _LT_TAGVAR(always_export_symbols, $1)=yes
+        if test "$aix_use_runtimelinking" = yes; then
+          # Warning - without using the other runtime loading flags (-brtl),
+          # -berok will link without error, but may produce a broken library.
+          _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+          # Determine the default libpath from the value encoded in an empty
+          # executable.
+          _LT_SYS_MODULE_PATH_AIX([$1])
+          _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+        else
+          if test "$host_cpu" = ia64; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	    _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+          else
+	    # Determine the default libpath from the value encoded in an
+	    # empty executable.
+	    _LT_SYS_MODULE_PATH_AIX([$1])
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	    # Warning - without using the other run time loading flags,
+	    # -berok will link without error, but may produce a broken library.
+	    _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	    _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	    if test "$with_gnu_ld" = yes; then
+	      # We only use this code for GNU lds that support --whole-archive.
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    else
+	      # Exported symbols can be pulled into shared objects from archives
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	    fi
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	    # This is similar to how AIX traditionally builds its shared
+	    # libraries.
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+          fi
+        fi
+        ;;
+
+      beos*)
+	if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	  # support --undefined.  This deserves some investigation.  FIXME
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      chorus*)
+        case $cc_basename in
+          *)
+	  # FIXME: insert proper C++ library support
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	  ;;
+        esac
+        ;;
+
+      cygwin* | mingw* | pw32* | cegcc*)
+	case $GXX,$cc_basename in
+	,cl* | no,cl*)
+	  # Native MSVC
+	  # hardcode_libdir_flag_spec is actually meaningless, as there is
+	  # no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=yes
+	  _LT_TAGVAR(file_list_spec, $1)='@'
+	  # Tell ltmain to make .lib files, not .a files.
+	  libext=lib
+	  # Tell ltmain to make .dll files, not .so files.
+	  shrext_cmds=".dll"
+	  # FIXME: Setting linknames here is a bad hack.
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	    else
+	      $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	    fi~
+	    $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	    linknames='
+	  # The linker will not automatically build a static lib if we build a DLL.
+	  # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	  # Don't use ranlib
+	  _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	  _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	    lt_tool_outputfile="@TOOL_OUTPUT@"~
+	    case $lt_outputfile in
+	      *.exe|*.EXE) ;;
+	      *)
+		lt_outputfile="$lt_outputfile.exe"
+		lt_tool_outputfile="$lt_tool_outputfile.exe"
+		;;
+	    esac~
+	    func_to_tool_file "$lt_outputfile"~
+	    if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	      $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	      $RM "$lt_outputfile.manifest";
+	    fi'
+	  ;;
+	*)
+	  # g++
+	  # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+	  # as there is no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=no
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+	  if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	    # If the export-symbols file already is a .def file (1st line
+	    # is EXPORTS), use it as is; otherwise, prepend...
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      cp $export_symbols $output_objdir/$soname.def;
+	    else
+	      echo EXPORTS > $output_objdir/$soname.def;
+	      cat $export_symbols >> $output_objdir/$soname.def;
+	    fi~
+	    $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	  ;;
+	esac
+	;;
+      darwin* | rhapsody*)
+        _LT_DARWIN_LINKER_FEATURES($1)
+	;;
+
+      dgux*)
+        case $cc_basename in
+          ec++*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          ghcx*)
+	    # Green Hills C++ Compiler
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      freebsd2.*)
+        # C++ shared libraries reported to be fairly broken before
+	# switch to ELF
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      freebsd-elf*)
+        _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+        ;;
+
+      freebsd* | dragonfly*)
+        # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+        # conventions
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+        ;;
+
+      gnu*)
+        ;;
+
+      haiku*)
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        ;;
+
+      hpux9*)
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+				             # but as the default
+				             # location of the library.
+
+        case $cc_basename in
+          CC*)
+            # FIXME: insert proper C++ library support
+            _LT_TAGVAR(ld_shlibs, $1)=no
+            ;;
+          aCC*)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            # Commands to make compiler produce verbose output that lists
+            # what "hidden" libraries, object files and flags are used when
+            # linking a shared library.
+            #
+            # There doesn't appear to be a way to prevent this compiler from
+            # explicitly linking system object files so we need to strip them
+            # from the output so that they don't get included in the library
+            # dependencies.
+            output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+            ;;
+          *)
+            if test "$GXX" = yes; then
+              _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            else
+              # FIXME: insert proper C++ library support
+              _LT_TAGVAR(ld_shlibs, $1)=no
+            fi
+            ;;
+        esac
+        ;;
+
+      hpux10*|hpux11*)
+        if test $with_gnu_ld = no; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+          case $host_cpu in
+            hppa*64*|ia64*)
+              ;;
+            *)
+	      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+              ;;
+          esac
+        fi
+        case $host_cpu in
+          hppa*64*|ia64*)
+            _LT_TAGVAR(hardcode_direct, $1)=no
+            _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+            ;;
+          *)
+            _LT_TAGVAR(hardcode_direct, $1)=yes
+            _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+					         # but as the default
+					         # location of the library.
+            ;;
+        esac
+
+        case $cc_basename in
+          CC*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          aCC*)
+	    case $host_cpu in
+	      hppa*64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      ia64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      *)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	    esac
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test $with_gnu_ld = no; then
+	        case $host_cpu in
+	          hppa*64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          ia64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          *)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	        esac
+	      fi
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      interix[[3-9]]*)
+	_LT_TAGVAR(hardcode_direct, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+	# Instead, shared libraries are loaded at an image base (0x10000000 by
+	# default) and relocated if they conflict, which is a slow very memory
+	# consuming and fragmenting process.  To avoid this, we pick a random,
+	# 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+	# time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	;;
+      irix5* | irix6*)
+        case $cc_basename in
+          CC*)
+	    # SGI C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -ar", where "CC" is the IRIX C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test "$with_gnu_ld" = no; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	      else
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+	      fi
+	    fi
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+	    ;;
+        esac
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(inherit_rpath, $1)=yes
+        ;;
+
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+	    ;;
+	  icpc* | ecpc* )
+	    # Intel C++
+	    with_gnu_ld=yes
+	    # version 8.0 and above of icpc choke on multiply defined symbols
+	    # if we add $predep_objects and $postdep_objects, however 7.1 and
+	    # earlier do not add the objects themselves.
+	    case `$CC -V 2>&1` in
+	      *"Version 7."*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	      *)  # Version 8.0 or newer
+	        tmp_idyn=
+	        case $host_cpu in
+		  ia64*) tmp_idyn=' -i_dynamic';;
+		esac
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	    esac
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    ;;
+          pgCC* | pgcpp*)
+            # Portland Group C++ compiler
+	    case `$CC -V` in
+	    *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+	      _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+		compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+	      _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+		$AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+		$RANLIB $oldlib'
+	      _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    *) # Version 6 and above use weak symbols
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+            ;;
+	  cxx*)
+	    # Compaq C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname  -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
+
+	    runpath_var=LD_RUN_PATH
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+	    ;;
+	  xl* | mpixl* | bgxl*)
+	    # IBM XL 8.0 on PPC, with GNU ld
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    if test "x$supports_anon_versioning" = xyes; then
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+		cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+		echo "local: *; };" >> $output_objdir/$libname.ver~
+		$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+	    fi
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	      _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+	      # Not sure whether something based on
+	      # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+	      # would be better.
+	      output_verbose_link_cmd='func_echo_all'
+
+	      # Archives containing C++ object files must be created using
+	      # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	      # necessary to make sure instantiated templates are included
+	      # in the archive.
+	      _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+
+      lynxos*)
+        # FIXME: insert proper C++ library support
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      m88k*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      mvs*)
+        case $cc_basename in
+          cxx*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	  *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	esac
+	;;
+
+      netbsd*)
+        if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable  -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+	  wlarc=
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	fi
+	# Workaround some broken pre-1.5 toolchains
+	output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+	;;
+
+      *nto* | *qnx*)
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+	;;
+
+      openbsd2*)
+        # C++ shared libraries are fairly broken
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      openbsd*)
+	if test -f /usr/libexec/ld.so; then
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+	  fi
+	  output_verbose_link_cmd=func_echo_all
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      osf3* | osf4* | osf5*)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Archives containing C++ object files must be created using
+	    # the KAI C++ compiler.
+	    case $host in
+	      osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+	      *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+	    esac
+	    ;;
+          RCC*)
+	    # Rational C++ 2.4.1
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          cxx*)
+	    case $host in
+	      osf3*)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+		;;
+	      *)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+	          echo "-hidden">> $lib.exp~
+	          $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp  `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~
+	          $RM $lib.exp'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+		;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+	  *)
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	      case $host in
+	        osf3*)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	        *)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	      esac
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	      # Commands to make compiler produce verbose output that lists
+	      # what "hidden" libraries, object files and flags are used when
+	      # linking a shared library.
+	      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      psos*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      sunos4*)
+        case $cc_basename in
+          CC*)
+	    # Sun C++ 4.x
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          lcc*)
+	    # Lucid
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      solaris*)
+        case $cc_basename in
+          CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+            _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+	    _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag}  -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	      $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	    _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	    case $host_os in
+	      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+	      *)
+		# The compiler driver will combine and reorder linker options,
+		# but understands `-z linker_flag'.
+	        # Supported since Solaris 2.6 (maybe 2.5.1?)
+		_LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	        ;;
+	    esac
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+	    output_verbose_link_cmd='func_echo_all'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	    ;;
+          gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+
+	    # The C++ compiler must be used to create the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    # GNU C++ compiler with Solaris linker
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+	      if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      else
+	        # g++ 2.7 appears to require `-G' NOT `-shared' on this
+	        # platform.
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      fi
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
+	      case $host_os in
+		solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+		*)
+		  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+		  ;;
+	      esac
+	    fi
+	    ;;
+        esac
+        ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      case $cc_basename in
+        CC*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+      esac
+      ;;
+
+      sysv5* | sco3.2v5* | sco5v6*)
+	# Note: We can NOT use -z defs as we might desire, because we do not
+	# link with -lc, and that would cause any symbols used from libc to
+	# always be unresolved, which means just about no library would
+	# ever link correctly.  If we're not using GNU ld we use -z text
+	# though, which does catch some bad symbols but isn't as heavy-handed
+	# as -z defs.
+	_LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+	_LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+	_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+	_LT_TAGVAR(link_all_deplibs, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+	runpath_var='LD_RUN_PATH'
+
+	case $cc_basename in
+          CC*)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+	      '"$_LT_TAGVAR(old_archive_cmds, $1)"
+	    _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+	      '"$_LT_TAGVAR(reload_cmds, $1)"
+	    ;;
+	  *)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    ;;
+	esac
+      ;;
+
+      tandem*)
+        case $cc_basename in
+          NCC*)
+	    # NonStop-UX NCC 3.20
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      vxworks*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      *)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+    esac
+
+    AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+    test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+    _LT_TAGVAR(GCC, $1)="$GXX"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+  LDCXX=$LD
+  LD=$lt_save_LD
+  GCC=$lt_save_GCC
+  with_gnu_ld=$lt_save_with_gnu_ld
+  lt_cv_path_LDCXX=$lt_cv_path_LD
+  lt_cv_path_LD=$lt_save_path_LD
+  lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+  lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test "$_lt_caught_CXX_error" != yes
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+  case ${2} in
+  .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+  *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+  esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library.  It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+  Foo (void) { a = 0; }
+private:
+  int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer*4 a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+  private int a;
+  public void bar (void) {
+    a = 0;
+  }
+};
+_LT_EOF
+], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF
+package foo
+func foo() {
+}
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+  # Parse the compiler output and extract the necessary
+  # objects, libraries and library flags.
+
+  # Sentinel used to keep track of whether or not we are before
+  # the conftest object file.
+  pre_test_object_deps_done=no
+
+  for p in `eval "$output_verbose_link_cmd"`; do
+    case ${prev}${p} in
+
+    -L* | -R* | -l*)
+       # Some compilers place space between "-{L,R}" and the path.
+       # Remove the space.
+       if test $p = "-L" ||
+          test $p = "-R"; then
+	 prev=$p
+	 continue
+       fi
+
+       # Expand the sysroot to ease extracting the directories later.
+       if test -z "$prev"; then
+         case $p in
+         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
+         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
+         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
+         esac
+       fi
+       case $p in
+       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
+       esac
+       if test "$pre_test_object_deps_done" = no; then
+	 case ${prev} in
+	 -L | -R)
+	   # Internal compiler library paths should come after those
+	   # provided the user.  The postdeps already come after the
+	   # user supplied libs so there is no need to process them.
+	   if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
+	   else
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
+	   fi
+	   ;;
+	 # The "-l" case would never come before the object being
+	 # linked, so don't bother handling this case.
+	 esac
+       else
+	 if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+	   _LT_TAGVAR(postdeps, $1)="${prev}${p}"
+	 else
+	   _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+	 fi
+       fi
+       prev=
+       ;;
+
+    *.lto.$objext) ;; # Ignore GCC LTO objects
+    *.$objext)
+       # This assumes that the test object file only shows up
+       # once in the compiler output.
+       if test "$p" = "conftest.$objext"; then
+	 pre_test_object_deps_done=yes
+	 continue
+       fi
+
+       if test "$pre_test_object_deps_done" = no; then
+	 if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+	   _LT_TAGVAR(predep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+	 fi
+       else
+	 if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+	   _LT_TAGVAR(postdep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+	 fi
+       fi
+       ;;
+
+    *) ;; # Ignore the rest.
+
+    esac
+  done
+
+  # Clean up.
+  rm -f a.out a.exe
+else
+  echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+  # Interix 3.5 installs completely hosed .la files for C++, so rather than
+  # hack all around it, let's just trust "g++" to DTRT.
+  _LT_TAGVAR(predep_objects,$1)=
+  _LT_TAGVAR(postdep_objects,$1)=
+  _LT_TAGVAR(postdeps,$1)=
+  ;;
+
+linux*)
+  case `$CC -V 2>&1 | sed 5q` in
+  *Sun\ C*)
+    # Sun C++ 5.9
+
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+
+solaris*)
+  case $cc_basename in
+  CC* | sunCC*)
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    # Adding this requires a known-good setup of shared libraries for
+    # Sun compiler versions before 5.6, else PIC objects from an old
+    # archive will be linked into the output, leading to subtle bugs.
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+    [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+    [Dependencies to place before and after the objects being linked to
+    create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+    [The library search path used internally by the compiler when linking
+    a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test "X$F77" = "Xno"; then
+  _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_F77" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${F77-"f77"}
+  CFLAGS=$FFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+  GCC=$G77
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$G77"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC="$lt_save_CC"
+  CFLAGS="$lt_save_CFLAGS"
+fi # test "$_lt_disable_F77" != yes
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test "X$FC" = "Xno"; then
+  _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_FC" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${FC-"f95"}
+  CFLAGS=$FCFLAGS
+  compiler=$CC
+  GCC=$ac_cv_fc_compiler_gnu
+
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+fi # test "$_lt_disable_FC" != yes
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_GO_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Go compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GO_CONFIG],
+[AC_REQUIRE([LT_PROG_GO])dnl
+AC_LANG_SAVE
+
+# Source file extension for Go test sources.
+ac_ext=go
+
+# Object file extension for compiled Go test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="package main; func main() { }"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='package main; func main() { }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GOC-"gccgo"}
+CFLAGS=$GOFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# Go did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GO_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code="$lt_simple_compile_test_code"
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+  :
+  _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+  [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+    [AC_CHECK_TOOL(GCJ, gcj,)
+      test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
+      AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_GO
+# ----------
+AC_DEFUN([LT_PROG_GO],
+[AC_CHECK_TOOL(GOC, gccgo,)
+])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+    [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_SED.  When it is available in   #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for lt_ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+        lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+      fi
+    done
+  done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+  test ! -f $lt_ac_sed && continue
+  cat /dev/null > conftest.in
+  lt_ac_count=0
+  echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+  # Check for GNU sed and select it if it is found.
+  if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+    lt_cv_path_SED=$lt_ac_sed
+    break
+  fi
+  while true; do
+    cat conftest.in conftest.in >conftest.tmp
+    mv conftest.tmp conftest.in
+    cp conftest.in conftest.nl
+    echo >>conftest.nl
+    $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+    cmp -s conftest.out conftest.nl || break
+    # 10000 chars as input seems more than enough
+    test $lt_ac_count -gt 10 && break
+    lt_ac_count=`expr $lt_ac_count + 1`
+    if test $lt_ac_count -gt $lt_ac_max; then
+      lt_ac_max=$lt_ac_count
+      lt_cv_path_SED=$lt_ac_sed
+    fi
+  done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,b/c, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+AC_MSG_RESULT([$xsi_shell])
+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
+
+AC_MSG_CHECKING([whether the shell understands "+="])
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+AC_MSG_RESULT([$lt_shell_append])
+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
+# ------------------------------------------------------
+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
+m4_defun([_LT_PROG_FUNCTION_REPLACE],
+[dnl {
+sed -e '/^$1 ()$/,/^} # $1 /c\
+$1 ()\
+{\
+m4_bpatsubsts([$2], [$], [\\], [^\([	 ]\)], [\\\1])
+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+])
+
+
+# _LT_PROG_REPLACE_SHELLFNS
+# -------------------------
+# Replace existing portable implementations of several shell functions with
+# equivalent extended shell implementations where those features are available..
+m4_defun([_LT_PROG_REPLACE_SHELLFNS],
+[if test x"$xsi_shell" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_xform], [    func_xform_result=${1%.*}.lo])
+
+  _LT_PROG_FUNCTION_REPLACE([func_arith], [    func_arith_result=$(( $[*] ))])
+
+  _LT_PROG_FUNCTION_REPLACE([func_len], [    func_len_result=${#1}])
+fi
+
+if test x"$lt_shell_append" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_append], [    eval "${1}+=\\${2}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
+    func_quote_for_eval "${2}"
+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
+    eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
+
+  # Save a `func_append' function call where possible by direct use of '+='
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+else
+  # Save a `func_append' function call even when '+=' is not available
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+  AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
+fi
+])
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine which file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path).  These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+         [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+         [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltoptions.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltoptions.m4
new file mode 100644
index 0000000..5d9acd8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltoptions.m4
@@ -0,0 +1,384 @@
+# Helper functions for option handling.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
+#   Inc.
+#   Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 7 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it.  Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+        _LT_MANGLE_DEFUN([$1], [$2]),
+    [m4_warning([Unknown $1 option `$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+	    [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+		      [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME.  If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+    [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+  dnl
+  dnl Simply set some default values (i.e off) if boolean options were not
+  dnl specified:
+  _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+  ])
+  _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+  ])
+  dnl
+  dnl If no reference was made to various pairs of opposing options, then
+  dnl we run the default mode handler for the pair.  For example, if neither
+  dnl `shared' nor `disable-shared' was passed, we enable building of shared
+  dnl archives by default:
+  _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+  _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+  		   [_LT_ENABLE_FAST_INSTALL])
+  ])
+])# _LT_SET_OPTIONS
+
+
+## --------------------------------- ##
+## Macros to handle LT_INIT options. ##
+## --------------------------------- ##
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  AC_CHECK_TOOL(AS, as, false)
+  AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+  AC_CHECK_TOOL(OBJDUMP, objdump, false)
+  ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS],      [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the `shared' and
+# `disable-shared' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+    [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+	[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+    _LT_DECL([build_libtool_libs], [enable_shared], [0],
+	[Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the `static' and
+# `disable-static' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+    [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+	[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+    _LT_DECL([build_old_libs], [enable_static], [0],
+	[Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the `fast-install'
+# and `disable-fast-install' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+    [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+    [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+	 [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the `pic-only' and `no-pic'
+# LT_INIT options.
+# MODE is either `yes' or `no'.  If omitted, it defaults to `both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+    [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
+	[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+    [lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for lt_pkg in $withval; do
+	IFS="$lt_save_ifs"
+	if test "X$lt_pkg" = "X$lt_p"; then
+	  pic_mode=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [pic_mode=default])
+
+test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+## ----------------- ##
+## LTDL_INIT Options ##
+## ----------------- ##
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+		 [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+		 [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+		 [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+		 [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+		 [m4_define([_LTDL_TYPE], [convenience])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltsugar.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltsugar.m4
new file mode 100644
index 0000000..9000a05
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltsugar.m4
@@ -0,0 +1,123 @@
+# ltsugar.m4 -- libtool m4 base layer.                         -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59 which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+	   m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn.  Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+       [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+	     [m4_foreach([_Lt_suffix],
+		]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+	[_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+	  [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+		 [lt_append([$1], [$2], [$3])$4],
+		 [$5])],
+	  [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+	m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+    m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+	[$5],
+    [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+  [lt_join(m4_quote(m4_default([$4], [[, ]])),
+           lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+		      [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltversion.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltversion.m4
new file mode 100644
index 0000000..07a8602
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/ltversion.m4
@@ -0,0 +1,23 @@
+# ltversion.m4 -- version numbers			-*- Autoconf -*-
+#
+#   Copyright (C) 2004 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 3337 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4.2])
+m4_define([LT_PACKAGE_REVISION], [1.3337])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4.2'
+macro_revision='1.3337'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/lt~obsolete.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/lt~obsolete.m4
new file mode 100644
index 0000000..c573da9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/lt~obsolete.m4
@@ -0,0 +1,98 @@
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions.    -*-Autoconf-*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick.  It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else.  This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. 
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION],	[AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP],		[AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT],		[AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX],	[AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN],		[AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR],		[AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL],	[AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN],		[AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER],	[AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK],		[AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE],	[AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF],	[AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O],	[AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR],		[AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR],		[AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP],	[AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC],		[AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU],		[AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG],	[AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD],	[AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS],	[AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP],	[AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP],		[AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED],		[AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME],		[AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE],	[AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE],	[AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL],		[AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP],		[AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN],		[AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER],	[AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG],		[AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL],	[AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX],		[AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77],		[AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ],		[AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG],	[AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG],	[AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG],	[AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG],	[AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG],	[AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG],		[AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C],	[AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS],	[AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP],		[AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS],	[AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77],		[AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC],		[AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX],		[AC_DEFUN([_LT_PROG_CXX])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage.m4
new file mode 100644
index 0000000..1d064e4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage.m4
@@ -0,0 +1,107 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-coverage configuration option to the package and
+#      controls whether the package will be built for code coverage.
+#
+
+#
+# NL_ENABLE_COVERAGE(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-coverage configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and controls
+# whether the package will be built with or without code coverage.
+#
+# The value 'nl_cv_build_coverage' will be set to the result. In
+# addition, NL_COVERAGE_CPPFLAGS and NL_COVERAGE_LDFLAGS will be set
+# to the appropriate values to pass to the compiler and linker,
+# respectively.
+#
+# NOTE: This is only supported at present for GCC or GCC-compatible
+#       toolchains.
+#
+# NOTE: The behavior of this is influenced by nl_cv_build_optimized from
+#       NL_DISABLE_OPTIMIZATION
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_COVERAGE],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build code-coverage instances of programs and libraries],
+        nl_cv_build_coverage,
+        [
+            AC_ARG_ENABLE(coverage,
+                [AS_HELP_STRING([--enable-coverage],[Enable the generation of code-coverage instances @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_coverage=${enableval}
+
+                        if test "${nl_cv_build_optimized}" = "yes"; then
+                            AC_MSG_ERROR([both --enable-optimization and --enable-coverage cannot used. Please, choose one or the other to enable.])
+                        fi
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-coverage])
+                        ;;
+
+                    esac
+                ],
+                [
+                    if test "${nl_cv_build_optimized}" = "yes"; then
+                        AC_MSG_WARN([--enable-optimization was specified, coverage disabled])
+                        nl_cv_build_coverage=no
+            
+                    else
+                        nl_cv_build_coverage=$1
+            
+                    fi
+                ])
+
+            if test "${nl_cv_build_coverage}" = "yes"; then         
+                if test "${GCC}" != "yes"; then
+                    AC_MSG_ERROR([GCC or a GCC-compatible toolchain is required for --enable-coverage])
+
+                else
+                    NL_COVERAGE_CPPFLAGS="--coverage"
+                    NL_COVERAGE_LDFLAGS="-lgcov"            
+
+                fi
+            fi
+    ])
+])
+
+
+
+
+
+
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage_reporting.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage_reporting.m4
new file mode 100644
index 0000000..d20e3a5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_coverage_reporting.m4
@@ -0,0 +1,149 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-coverage configuration option to the package and
+#      controls whether the package will be built for code coverage
+#      reporting, using the LCOV package.
+#
+
+#
+# NL_ENABLE_COVERAGE_REPORTS(default)
+#
+#   default - Whether the option should be automatic (auto), enabled
+#             (yes), or disabled (no) by default.
+#
+# Adds an --enable-coverage-reports configuration option to the
+# package with a default value of 'default' (should be 'auto', 'no' or
+# 'yes') and controls whether the package will be built with or
+# without code coverage reports, using the LCOV package.
+#
+# The value 'nl_cv_build_coverage_reports' will be set to the result. In
+# addition, LCOV will be set to the path of the 'lcov' tool and GENHTML will  be set to the path of the 'genhtml' tool.
+#
+# NOTE: The behavior of this is influenced by nl_cv_build_coverage from
+#       NL_ENABLE_COVERAGE.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_COVERAGE_REPORTS],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [auto],[],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'auto', 'yes' or 'no'])])
+
+    # Check for the presence of lcov and genhtml, required
+    # to build and generate the coverage reports.
+
+    AC_PATH_PROG(LCOV, lcov)
+    AC_PATH_PROG(GENHTML, genhtml)
+
+    AC_CACHE_CHECK([whether to build graphical code coverage reports],
+        nl_cv_build_coverage_reports,
+        [
+            AC_ARG_ENABLE(coverage-reports,
+                [AS_HELP_STRING([--enable-coverage-reports],[Enable the generation of code coverage reports (requires lcov)  @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    auto|no|yes)
+                        nl_cv_build_coverage_reports=${enableval}
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-coverage])
+                        ;;
+
+                    esac
+                ],
+                [
+                    nl_cv_build_coverage_reports=$1
+                ])
+
+            # If coverage is not enabled, then coverage reports
+            # defaults to 'no' if it is 'auto' or fails if it is
+            # 'yes'. Otherwise, availability of lcov and genhtml
+            # condition behavior. Lack of availability for 'yes'
+            # results in failure; however, for 'auto' then coverage
+            # reports default to 'no'.
+
+            case "${nl_cv_build_coverage}" in
+
+            no)
+                case "${nl_cv_build_coverage_reports}" in
+
+                    auto)
+                        nl_cv_build_coverage_reports="no"
+                        ;;
+
+                    yes)
+                        AC_MSG_ERROR([--enable-coverage must be asserted to use --enable-coverage-reports.])
+                        ;;
+
+                    no)
+                        ;;
+
+                esac
+                ;;
+
+            yes)
+                case "${nl_cv_build_coverage_reports}" in
+
+                    auto)
+                        # Both lcov and genhtml must exist to successfully
+                        # enable coverage reports.
+
+                        if test "x${LCOV}" = "x" || test "x${GENHTML}" = "x"; then
+                            nl_cv_build_coverage_reports="no"
+
+                        else
+                            nl_cv_build_coverage_reports="yes"
+
+                        fi
+                        ;;
+
+                    yes)
+                        # Both lcov and genhtml must exist to successfully
+                        # enable coverage reports. Since the default or user
+                        # ask is 'yes', we must fail if lcov or genhtml cannot
+                        # be found.
+
+                        if test "x${LCOV}" = "x"; then
+                            AC_MSG_ERROR([Cannot find 'lcov'. You must have the lcov package installed to use coverage reports.])
+
+                        elif test "x${GENHTML}" = "x"; then
+                            AC_MSG_ERROR([Cannot find 'genhtml'. You must have the lcov package installed to use coverage reports.])
+
+                        elif test "${nl_cv_build_coverage_reports}" = "auto"; then
+                            nl_cv_build_coverage_reports="yes"
+
+                        fi
+                        ;;
+
+                    no)
+                        ;;
+
+                esac
+                ;;
+
+            esac
+    ])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_debug.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_debug.m4
new file mode 100644
index 0000000..e83f43d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_debug.m4
@@ -0,0 +1,79 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-debug configuration option to the package and controls
+#      whether the package will be built for debug instances of programs
+#      and libraries.
+#
+
+#
+# NL_ENABLE_DEBUG(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-debug configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and controls
+# whether the package will be built with or without -DDEBUG enabled.
+#
+# The value 'nl_cv_build_debug' will be set to the result. In
+# addition, the contents of CFLAGS, CXXFLAGS, OBJCFLAGS, and
+# OBJCXXFLAGS may be altered by the use of this macro, adding -DDEBUG
+# if this option is asserted.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_DEBUG],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build debug instances of programs and libraries],
+        nl_cv_build_debug,
+        [
+            AC_ARG_ENABLE(debug,
+                [AS_HELP_STRING([--enable-debug],[Enable the generation of debug instances @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_debug=${enableval}
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-debug])
+                        ;;
+
+                    esac
+                ],
+                [
+                    nl_cv_build_debug=$1
+                ])
+
+            if test "${nl_cv_build_debug}" = "yes"; then
+                CFLAGS="${CFLAGS} -DDEBUG"
+                CXXFLAGS="${CXXFLAGS} -DDEBUG"
+                OBJCFLAGS="${OBJCFLAGS} -DDEBUG"
+                OBJCXXFLAGS="${OBJCXXFLAGS} -DDEBUG"
+            fi
+    ])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_docs.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_docs.m4
new file mode 100644
index 0000000..a7b3566
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_docs.m4
@@ -0,0 +1,117 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --disable-docs configuration option to the package and controls
+#      whether the package will be built with or without documentation.
+#
+
+#
+# NL_ENABLE_DOCS(default, dot_default)
+#
+#   default     - Whether the option should be automatic (auto), enabled
+#                 (yes), disabled (no) by default.
+#   dot_default - Whether Doxygen should use (YES) or not use (NO)
+#                 GraphViz dot.
+#
+# Adds an --disable-docs configuration option to the package with a
+# default value of 'default' (should be 'auto', 'no' or 'yes') and
+# controls whether the package will be built with or without Doxygen-based
+# documentation.
+#
+# The value 'nl_cv_build_docs' will be set to the result. In addition:
+#
+#   DOXYGEN         - Will be set to the path of the Doxygen executable.
+#   DOT             - Will be set to the path of the GraphViz dot
+#                     executable.
+#   DOXYGEN_USE_DOT - Will be set to 'NO' or 'YES' depending on whether
+#                     GraphViz dot is available.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_DOCS],
+[
+    # Check whether or not the 'default' value is sane.
+
+    m4_case([$1],
+        [auto],[],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'auto', 'yes' or 'no'])])
+
+    # Check whether or not the 'dot_default' value is sane.
+
+    m4_case([$2],
+        [YES],[],
+        [NO],[],
+        [m4_fatal([$0: invalid default value '$2'; must be 'YES' or 'NO'])])
+
+    DOXYGEN_USE_DOT=$2
+
+    AC_ARG_VAR(DOXYGEN, [Doxygen executable])
+    AC_ARG_VAR(DOT,     [GraphViz 'dot' executable, which may be used, when present, to generate Doxygen class graphs])
+
+    AC_PATH_PROG(DOXYGEN, doxygen)
+    AC_PATH_PROG(DOT, dot)
+
+    AC_CACHE_CHECK([whether to build documentation],
+        nl_cv_build_docs,
+        [
+	    AC_ARG_ENABLE(docs,
+		[AS_HELP_STRING([--disable-docs],[Enable building documentation (requires Doxygen) @<:@default=$1@:>@.])],
+		[
+		    case "${enableval}" in 
+
+		    auto|no|yes)
+			nl_cv_build_docs=${enableval}
+			;;
+
+		    *)
+			AC_MSG_ERROR([Invalid value ${enableval} for --disable-docs])
+			;;
+
+		    esac
+		],
+		[nl_cv_build_docs=$1])
+
+	    if test "x${DOXYGEN}" != "x"; then
+		nl_cv_have_doxygen=yes
+	    else
+		nl_cv_have_doxygen=no
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "auto"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    nl_cv_build_docs=no
+		else
+		    nl_cv_build_docs=yes
+		fi
+	    fi
+
+	    if test "${nl_cv_build_docs}" = "yes"; then
+		if test "${nl_cv_have_doxygen}" = "no"; then
+		    AC_MSG_ERROR([Building docs was explicitly requested but Doxygen cannot be found])
+		elif test "${nl_cv_have_doxygen}" = "yes"; then
+		    if test "x${DOT}" != "x"; then
+			DOXYGEN_USE_DOT=YES
+		    fi
+		fi
+	    fi
+    ])
+
+    AC_SUBST(DOXYGEN_USE_DOT)
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_optimization.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_optimization.m4
new file mode 100644
index 0000000..f6c941e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_optimization.m4
@@ -0,0 +1,92 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-optimization configuration option to the package and
+#      controls whether the package will be built with or without code
+#      optimization.
+#
+
+#
+# NL_ENABLE_OPTIMIZATION(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-optimization configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and controls
+# whether the package will be built with or without code optimization.
+#
+# The value 'nl_cv_build_optimized' will be set to the result. In
+# addition, the contents of CFLAGS, CXXFLAGS, OBJCFLAGS, and OBJCXXFLAGS may
+# be altered by the use of this macro, converting -O<something> to -O0.
+#
+# NOTE: The behavior of this is influenced by nl_cv_build_coverage from
+#       NL_ENABLE_COVERAGE
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_OPTIMIZATION],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build code-optimized instances of programs and libraries],
+        nl_cv_build_optimized,
+        [
+            AC_ARG_ENABLE(optimization,
+                [AS_HELP_STRING([--enable-optimization],[Enable the generation of code-optimized instances @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_optimized=${enableval}
+
+                        if test "${nl_cv_build_coverage}" = "yes" && test "${nl_cv_build_optimized}" = "yes"; then
+                            AC_MSG_ERROR([both --enable-optimization and --enable-coverage cannot used. Please, choose one or the other to enable.])
+                        fi
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-optimized])
+                        ;;
+
+                    esac
+                ],
+                [
+                    if test "${nl_cv_build_coverage}" = "yes"; then
+                        AC_MSG_WARN([--enable-coverage was specified, optimization disabled])
+                        nl_cv_build_optimized=no
+            
+                    else
+                        nl_cv_build_optimized=$1
+            
+                    fi
+                ])
+
+            if test "${nl_cv_build_optimized}" = "no"; then
+                CFLAGS="`echo ${CFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                CXXFLAGS="`echo ${CXXFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                OBJCFLAGS="`echo ${OBJCFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+                OBJCXXFLAGS="`echo ${OBJCXXFLAGS} | sed -e 's,-O[[[:alnum:]]]*,-O0,g'`"
+            fi
+    ])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_tests.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_tests.m4
new file mode 100644
index 0000000..25b7914
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_tests.m4
@@ -0,0 +1,71 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-tests configuration option to the package and controls
+#      whether the package will be built with or without unit and
+#      integration tests.
+#
+
+#
+# NL_ENABLE_TESTS(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-tests configuration option to the package with a
+# default value of 'default' (should be either 'no' or 'yes') and
+# controls whether the package will be built with or without unit and
+# integration tests.
+#
+# The value 'nl_cv_build_tests' will be set to the result.
+#
+#------------------------------------------------------------------------------
+
+AC_DEFUN([NL_ENABLE_TESTS],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to build tests],
+        nl_cv_build_tests,
+        [
+            AC_ARG_ENABLE(tests,
+                [AS_HELP_STRING([--enable-tests],[Enable building of tests @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_build_tests=${enableval}
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-tests])
+                        ;;
+
+                    esac
+                ],
+                [
+                    nl_cv_build_tests=$1
+                ])
+    ])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_werror.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_werror.m4
new file mode 100644
index 0000000..85032ab
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_enable_werror.m4
@@ -0,0 +1,78 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro that adds an
+#      --enable-warnings-as-errors configuration option to the package
+#      and controls whether the package will be built to treat all
+#      compilation warnings as errors.  #
+
+#
+# NL_ENABLE_WERROR(default)
+#
+#   default - Whether the option should be enabled (yes) or disabled (no)
+#             by default.
+#
+# Adds an --enable-warnings-as-errors configuration option to the
+# package with a default value of 'default' (should be either 'no' or
+# 'yes') and controls whether the package will be built with or
+# without -Werror enabled.
+#
+# The value 'nl_cv_warnings_as_errors' will be set to the result. In
+# addition, the variable NL_WERROR_CPPFLAGS will be set to the
+# compiler-specific flag necessary to assert this option.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_ENABLE_WERROR],
+[
+    # Check whether or not a default value has been passed in.
+
+    m4_case([$1],
+        [yes],[],
+        [no],[],
+        [m4_fatal([$0: invalid default value '$1'; must be 'yes' or 'no'])])
+
+    AC_CACHE_CHECK([whether to treat all compilation warnings as errors],
+        nl_cv_warnings_as_errors,
+        [
+            AC_ARG_ENABLE(warnings-as-errors,
+                [AS_HELP_STRING([--enable-warnings-as-errors],[Treat all compilation warnings as errors @<:@default=$1@:>@.])],
+                [
+                    case "${enableval}" in 
+
+                    no|yes)
+                        nl_cv_warnings_as_errors=${enableval}
+                        ;;
+
+                    *)
+                        AC_MSG_ERROR([Invalid value ${enableval} for --enable-warnings-as-errors])
+                        ;;
+
+                    esac
+                ],
+                [
+                    nl_cv_warnings_as_errors=$1
+                ])
+    ])
+
+    if test "${nl_cv_warnings_as_errors}" = "yes"; then
+        AX_CHECK_COMPILER_OPTION([C], NL_WERROR_CPPFLAGS, [-Werror])
+        if test "x${NL_WERROR_CPPFLAGS}" = "x"; then
+            AC_MSG_ERROR([Could not determine how to treat warnings as errors for your compiler ${CC}])
+        fi
+    fi
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_filtered_canonical.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_filtered_canonical.m4
new file mode 100644
index 0000000..aafdb3c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_filtered_canonical.m4
@@ -0,0 +1,97 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro for filtering
+#      the autoconf canonical build, host, or target.
+#
+#      Mac OS X / Darwin ends up putting some versioning cruft on the
+#      end of its tuples that most users of these variables rarely
+#      care about.
+#
+
+#
+# _NL_FILTERED_CANONICAL(name)
+#
+#   name - The existing autoconf variable to filter
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'name'
+#   generated from AC_CANONICAL_<NAME> and saves it in
+#   'nl_filtered_<name>'.
+#
+_NL_FILTERED_CANONICAL(name)
+AC_DEFUN([_NL_FILTERED_CANONICAL],
+[
+    AC_CACHE_CHECK([filtered $1 system type],
+        nl_cv_filtered_$1,
+        nl_cv_filtered_$1=`echo ${$1} | sed -e 's/[[[[:digit:].]]]*$//g'`
+        nl_filtered_$1=${nl_cv_filtered_$1})
+])
+
+#
+# NL_FILTERED_CANONICAL_BUILD
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'build'
+#   generated from AC_CANONICAL_BUILD and saves it in
+#   'nl_filtered_build'.
+#
+NL_FILTERED_CANONICAL_BUILD
+AC_DEFUN([NL_FILTERED_CANONICAL_BUILD],
+[
+    AC_REQUIRE([AC_CANONICAL_BUILD])
+    _NL_FILTERED_CANONICAL(build)
+])
+
+#
+# NL_FILTERED_CANONICAL_HOST
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'host'
+#   generated from AC_CANONICAL_HOST and saves it in
+#   'nl_filtered_build'.
+#
+NL_FILTERED_CANONICAL_HOST
+AC_DEFUN([NL_FILTERED_CANONICAL_HOST],
+[
+    AC_REQUIRE([AC_CANONICAL_HOST])
+    _NL_FILTERED_CANONICAL(host)
+])
+
+#
+# NL_FILTERED_CANONICAL_TARGET
+#
+#   Mac OS X / Darwin ends up putting some versioning cruft on the end
+#   of its tuples that most users of these variables rarely care about.
+#
+#   This filters such versioning cruft from the variable 'target'
+#   generated from AC_CANONICAL_TARGET and saves it in
+#   'nl_filtered_target'.
+#
+NL_FILTERED_CANONICAL_TARGET
+AC_DEFUN([NL_FILTERED_CANONICAL_TARGET],
+[
+    AC_REQUIRE([AC_CANONICAL_TARGET])
+    _NL_FILTERED_CANONICAL(target)
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_prog_lndir.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_prog_lndir.m4
new file mode 100644
index 0000000..2a7b22b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_prog_lndir.m4
@@ -0,0 +1,76 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro for checking
+#      for a build host-based tool that can shadow a directory using
+#      symbolic links, ostensibly either GNU cp or X11 lndir.
+#
+
+AC_DEFUN([_NL_CHECK_CP_RS],
+[
+    $1 -Rs 2>&1 | grep 'missing file operand' > /dev/null
+])
+
+AC_DEFUN([_NL_CHECK_LNDIR],
+[
+    $1 -silent 2>&1 | grep 'usage: lndir' > /dev/null
+])
+
+#
+# NL_PROG_LNDIR([fallback GNU cp path to test, fallback GNU cp path to set])
+#
+#   test path    - The fallback GNU cp path and arguments to test if a system
+#                  GNU cp cannot be found.
+#   set path     - The fallback GNU cp path and arguments to set to LNDIR if
+#                  the test path succeeds.
+#
+# Determine and assign to LNDIR, a build host-based tool that can shadow
+# a directory using symbolic links, attempting either GNU cp or X11 lndir
+# as preferred defaults.
+#
+# If the host doesn't have GNU cp natively, the caller can specify
+# both a GNU cp path to test and a GNU cp path to set if the test path
+# was successful.
+#
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_PROG_LNDIR],
+[
+    AC_ARG_VAR(LNDIR, [Program and arguments to create a shadow directory of symbolic links to another directory tree (e.g. 'cp -Rs')])
+
+    AC_MSG_CHECKING([how to shadow a directory tree])
+
+    if test "x${LNDIR}" = "x"; then
+	if `_NL_CHECK_CP_RS(cp)`; then
+	    LNDIR="cp -Rs"
+
+	elif `_NL_CHECK_LNDIR(lndir)`; then
+	    LNDIR="lndir -silent"
+
+	elif `_NL_CHECK_CP_RS(${ac_abs_confdir}/build/tools/host/${filtered_build}/bin/cp)`; then
+	    LNDIR="\${top_srcdir}/build/tools/host/${filtered_build}/bin/cp -Rs"
+
+	else
+	    AC_MSG_ERROR([Cannot determine how to shadow a directory tree. Neither 'cp -Rs' nor 'lndir -silent' appear to be available or functional. Please consider installing or making available in your PATH one of: GNU coreutils <http://www.gnu.org/software/coreutils/>, XQuartz (Mac OS X-only) <http://xquartz.macosforge.org/>, or lndir <http://www.mit.edu/afs/sipb/project/sipbsrc/rt/lndir/>.])
+
+	fi
+    fi
+
+    AC_MSG_RESULT(${LNDIR})
+
+    AC_SUBST(LNDIR)
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_werror.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_werror.m4
new file mode 100644
index 0000000..84872d1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_werror.m4
@@ -0,0 +1,104 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines GNU autoconf M4-style macros that ensure the
+#      -Werror compiler option for GCC-based or -compatible compilers
+#      do not break some autoconf tests (see
+#      http://lists.gnu.org/archive/html/autoconf-patches/2008-09/msg00014.html).
+#
+#      If -Werror has been passed transform it into -Wno-error for
+#      CPPFLAGS, CFLAGS, CXXFLAGS, OBJCFLAGS, and OBJCXXFLAGS with
+#      NL_SAVE_WERROR. Transform them back again with
+#      NL_RESTORE_WERROR.
+#
+
+# 
+# _NL_SAVE_WERROR_FOR_VAR(variable)
+#
+#   variable - The compiler flags variable to scan for the presence of
+#              -Werror and, if present, transform to -Wno-error.
+#
+# This transforms, for the specified compiler flags variable, -Werror
+# to -Wno-error, if it was it present. The original state may be
+# restored by invoking _NL_RESTORE_WERROR_FOR_VAR([variable]).
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([_NL_SAVE_WERROR_FOR_VAR],
+[
+    if echo "${$1}" | grep -q '\-Werror'; then
+	$1="`echo ${$1} | sed -e 's,-Werror\([[[:space:]]]\),-Wno-error\1,g'`"
+	nl_had_$1_werror=yes
+    else
+	nl_had_$1_werror=no
+    fi
+])
+
+#
+# _NL_RESTORE_WERROR_FOR_VAR(variable)
+#
+#   variable - The compiler flag for which to restore -Wno-error back
+#              to -Werror if it was originally passed in by the user as
+#              such.
+#
+# This restores, for the specified compiler flags variable, -Werror
+# from -Wno-error, if it was initially set as -Werror at the time
+# _NL_SAVE_WERROR_FOR_VAR([variable]) was invoked.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([_NL_RESTORE_WERROR_FOR_VAR],
+[
+    if test "${nl_had_$1_werror}" = "yes"; then
+	$1="`echo ${$1} | sed -e 's,-Wno-error\([[[:space:]]]\),-Werror\1,g'`"
+    fi
+
+    unset nl_had_$1_werror
+])
+
+# 
+# NL_SAVE_WERROR
+#
+# This transforms, for each of CFLAGS, CXXFLAGS, OBJCFLAGS, and
+# OBJCXXFLAGS, -Werror to -Wno-error, if it was it present. The
+# original state may be restored by invoking NL_RESTORE_WERROR.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_SAVE_WERROR],
+[
+    _NL_SAVE_WERROR_FOR_VAR([CPPFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([CFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([CXXFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([OBJCFLAGS])
+    _NL_SAVE_WERROR_FOR_VAR([OBJCXXFLAGS])
+])
+
+#
+# NL_RESTORE_WERROR
+#
+# This restores, for each of OBJCXXFLAGS, OBJCFLAGS, CXXFLAGS, and
+# CFLAGS, -Werror from -Wno-error, if it was initially set as -Werror
+# at the time NL_SAVE_WERROR was invoked.
+#
+#------------------------------------------------------------------------------
+AC_DEFUN([NL_RESTORE_WERROR],
+[
+    _NL_RESTORE_WERROR_FOR_VAR([OBJCXXFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([OBJCFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CXXFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CFLAGS])
+    _NL_RESTORE_WERROR_FOR_VAR([CPPFLAGS])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_with_package.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_with_package.m4
new file mode 100644
index 0000000..fc5eaec
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/nl_with_package.m4
@@ -0,0 +1,755 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines a GNU autoconf M4-style macro for checking
+#      the presence and viability of both required and optional
+#      dependent packages, which may be internal or external to the
+#      current package.
+#
+#      Five (5) macros are made available:
+#
+#        * NL_WITH_REQUIRED_EXTERNAL_PACKAGE
+#        * NL_WITH_OPTIONAL_EXTERNAL_PACKAGE
+#        * NL_WITH_REQUIRED_INTERNAL_PACKAGE
+#        * NL_WITH_OPTIONAL_INTERNAL_PACKAGE
+#        * NL_WITH_PACKAGE
+#
+#      Note, however, that NL_WITH_PACKAGE is deprecated and maps to
+#      NL_WITH_REQUIRED_INTERNAL_PACKAGE.
+#
+
+# _NL_ARG_WITH_INCLUDES_OR_LIBS(PACKAGE-DISPOSITION,
+#                               PACKAGE-SOURCE,
+#                               PACKAGE-PRETTY-NAME,
+#                               PACKAGE-VARIABLE-PREFIX,
+#                               PACKAGE-SUCCINCT-NAME,
+#                               PACKAGE-WITH-OPTION,
+#                               PLURAL-WITH-OPTION-HELP-WORD,
+#                               WITH-DIR-VARIABLE-MODIFIER)
+# ----------------------------------------------------------------------------
+# This is a wrapper around AC_ARG_WITH that provides the ability to
+# optionally specify a dependent package include and link library
+# directories independently as
+# --with-<PACKAGE-SUCCINCT-NAME>-<PACKAGE-WITH-OPTION>=<DIR>.
+#
+# If the package is specified as required, the use of
+# --without-<PACKAGE-SUCCINCT-NAME> or the use of
+# --with-<PACKAGE-SUCCINCT-NAME>=no will result in a fatal error.
+#
+# At the successful conclusion of the execution of the macro, 
+# two variables will be defined:
+#
+#   * nl_with_<PACKAGE-SUCCINCT-NAME>
+#   * <PACKAGE-SUCCINT-NAME>_<WITH-DIR-VARIABLE-MODIFIER>_dir
+#
+# The package disposition and source are specified by:
+#
+#   PACKAGE-DISPOSITION     : This may be either 'required' if the dependent
+#                             package is required or 'optional' if
+#                             not.
+#   PACKAGE-SOURCE          : This may be either 'internal' if the dependent
+#                             package may be provided either
+#                             internally or externally to the current
+#                             package or 'external' if the dependent
+#                             package may only be provided outside of
+#                             the current package. This also serves as
+#                             the default value for where the
+#                             configuration script expects to find the
+#                             package.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_NL_ARG_WITH_INCLUDES_OR_LIBS],
+[
+    dnl Check whether or not the package is required
+
+    m4_case([$1],
+        [required],[],
+        [optional],[],
+        [m4_fatal([$0: invalid disposition value '$1'; must be 'required' or 'optional'])])
+
+    dnl Check whether or not the package is internal
+
+    m4_case([$2],
+        [internal],[],
+        [external],[],
+        [m4_fatal([$0: invalid source value '$2'; must be 'internal' or 'external'])])
+
+    AC_ARG_WITH($5-$6,
+	AS_HELP_STRING([--with-$5-$6=DIR],
+		   [Specify location of $1 $3 $7 @<:@default=$2@:>@.]),
+	[
+	    if test "${withval}" = "no"; then
+                m4_if([$1],
+                      [required],
+                      [AC_MSG_ERROR([${PACKAGE_NAME} requires the $3 package.])],
+                      [nl_with_$5=${withval}])
+	    fi
+	
+	    if test "x${$5_dir}" != "x"; then
+		AC_MSG_WARN([overriding --with-$5=${$5_dir}])
+	    fi
+
+	    if test "${withval}" = "internal"; then
+		$5_$8_dir=${withval}
+		nl_with_$5=${withval}
+	    else
+		$5_$8_dir=${withval}
+		nl_with_$5=external
+	    fi
+	],
+	[
+	    $5_$8_dir=;
+	    if test "x${nl_with_$5}" = "x"; then
+		nl_with_$5=$2
+	    fi
+	])
+])
+
+# _NL_ARG_WITH_INCLUDES(PACKAGE-DISPOSITION,
+#                       PACKAGE-SOURCE,
+#                       PACKAGE-PRETTY-NAME,
+#                       PACKAGE-VARIABLE-PREFIX,
+#                       PACKAGE-SUCCINCT-NAME)
+# ----------------------------------------------------------------------------
+# This is a wrapper around AC_ARG_WITH that provides the ability to
+# optionally specify a dependent package include directory
+# independently as --with-<PACKAGE-SUCCINCT-NAME>-includes=<DIR>.
+#
+# If the package is specified as required, the use of
+# --without-<PACKAGE-SUCCINCT-NAME>-includes or the use of
+# --with-<PACKAGE-SUCCINCT-NAME>-includes=no will result in a fatal error.
+#
+# At the successful conclusion of the execution of the macro, 
+# two variables will be defined:
+#
+#   * nl_with_<PACKAGE-SUCCINCT-NAME>
+#   * <PACKAGE-SUCCINT-NAME>_header_dir
+#
+# The package disposition and source are specified by:
+#
+#   PACKAGE-DISPOSITION     : This may be either 'required' if the dependent
+#                             package is required or 'optional' if
+#                             not.
+#   PACKAGE-SOURCE          : This may be either 'internal' if the dependent
+#                             package may be provided either
+#                             internally or externally to the current
+#                             package or 'external' if the dependent
+#                             package may only be provided outside of
+#                             the current package. This also serves as
+#                             the default value for where the
+#                             configuration script expects to find the
+#                             package.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_NL_ARG_WITH_INCLUDES],
+[
+    _NL_ARG_WITH_INCLUDES_OR_LIBS([$1], [$2], [$3], [$4], [$5], [includes], [headers], [header])dnl
+])
+
+# _NL_ARG_WITH_LIBS(PACKAGE-DISPOSITION,
+#                   PACKAGE-SOURCE,
+#                   PACKAGE-PRETTY-NAME,
+#                   PACKAGE-VARIABLE-PREFIX,
+#                   PACKAGE-SUCCINCT-NAME)
+# ----------------------------------------------------------------------------
+# This is a wrapper around AC_ARG_WITH that provides the ability to
+# optionally specify a dependent package link library directory
+# independently as --with-<PACKAGE-SUCCINCT-NAME>-libs=<DIR>.
+#
+# If the package is specified as required, the use of
+# --without-<PACKAGE-SUCCINCT-NAME>-libs or the use of
+# --with-<PACKAGE-SUCCINCT-NAME>-libs=no will result in a fatal error.
+#
+# At the successful conclusion of the execution of the macro, 
+# two variables will be defined:
+#
+#   * nl_with_<PACKAGE-SUCCINCT-NAME>
+#   * <PACKAGE-SUCCINT-NAME>_library_dir
+#
+# The package disposition and source are specified by:
+#
+#   PACKAGE-DISPOSITION     : This may be either 'required' if the dependent
+#                             package is required or 'optional' if
+#                             not.
+#   PACKAGE-SOURCE          : This may be either 'internal' if the dependent
+#                             package may be provided either
+#                             internally or externally to the current
+#                             package or 'external' if the dependent
+#                             package may only be provided outside of
+#                             the current package. This also serves as
+#                             the default value for where the
+#                             configuration script expects to find the
+#                             package.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_NL_ARG_WITH_LIBS],
+[
+    _NL_ARG_WITH_INCLUDES_OR_LIBS([$1], [$2], [$3], [$4], [$5], [libs], [libraries], [library])dnl
+])
+
+# _NL_ARG_WITH_PACKAGE(PACKAGE-DISPOSITION,
+#                      PACKAGE-SOURCE,
+#                      PACKAGE-PRETTY-NAME,
+#                      PACKAGE-VARIABLE-PREFIX,
+#                      PACKAGE-SUCCINCT-NAME)
+# ----------------------------------------------------------------------------
+# This is a wrapper around AC_ARG_WITH that provides the ability to
+# optionally specify a dependent package as
+# --with-<PACKAGE-SUCCINCT-NAME>=<DIR> or to independently specify the
+# include and link library directories independently as
+# --with-<PACKAGE-SUCCINCT-NAME>-includes=<DIR> and
+# --with-<PACKAGE-SUCCINCT-NAME>-libs=<DIR>.
+#
+# If the package is specified as required, the use of
+# --without-<PACKAGE-SUCCINCT-NAME>* or the use of
+# --with-<PACKAGE-SUCCINCT-NAME>*=no will result in a fatal error.
+#
+# At the successful conclusion of the execution of the macro, 
+# two or more variables will be defined:
+#
+#   * nl_with_<PACKAGE-SUCCINCT-NAME>
+#   * <PACKAGE-SUCCINT-NAME>_dir
+#   * <PACKAGE-SUCCINT-NAME>_header_dir
+#   * <PACKAGE-SUCCINT-NAME>_library_dir
+#
+# The package disposition and source are specified by:
+#
+#   PACKAGE-DISPOSITION     : This may be either 'required' if the dependent
+#                             package is required or 'optional' if
+#                             not.
+#   PACKAGE-SOURCE          : This may be either 'internal' if the dependent
+#                             package may be provided either
+#                             internally or externally to the current
+#                             package or 'external' if the dependent
+#                             package may only be provided outside of
+#                             the current package. This also serves as
+#                             the default value for where the
+#                             configuration script expects to find the
+#                             package.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_NL_ARG_WITH_PACKAGE],
+[
+    AC_ARG_WITH($5,
+	AS_HELP_STRING([--with-$5=DIR],
+		   [Specify location of the $1 $3 headers and libraries @<:@default=$2@:>@.]),
+	[
+	    if test "${withval}" = "no"; then
+                m4_if([$1],
+                      [required],
+                      [AC_MSG_ERROR([${PACKAGE_NAME} requires the $3 package.])],
+                      [nl_with_$5=${withval}])
+	    elif test "${withval}" = "internal"; then
+		$5_dir=${withval}
+		nl_with_$5=${withval}
+	    else
+		$5_dir=${withval}
+		nl_with_$5=external
+	    fi
+	],
+	[$5_dir=; nl_with_$5=$2])
+
+    # Allow users to specify external headers and libraries independently.
+
+    _NL_ARG_WITH_INCLUDES([$1], [$2], [$3], [$4], [$5])dnl
+
+    _NL_ARG_WITH_LIBS([$1], [$2], [$3], [$4], [$5])dnl
+])
+
+# _NL_WITH_PACKAGE(PACKAGE-DISPOSITION,
+#                  PACKAGE-SOURCE,
+#                  PACKAGE-PRETTY-NAME,
+#                  PACKAGE-VARIABLE-PREFIX,
+#                  PACKAGE-SUCCINCT-NAME,
+#                  [DEFAULT-PACKAGE-LIBS],
+#                  [ACTIONS-TO-RUN-IF-NOT-EXTERNAL],
+#                  [ACTIONS-TO-RUN-IF-NOT-INTERNAL])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified, optional or required dependent
+# package. The dependent package may be provided externally or may
+# exist within the current package itself.
+#
+# If the package is specified as required, failure to find the
+# dependent package will result in a fatal error.
+#
+# At the successful conclusion of the execution of the macro, three
+# variables will be defined:
+#
+#   * <PACKAGE-VARIABLE-PREFIX>_CPPFLAGS
+#   * <PACKAGE-VARIABLE-PREFIX>_LDFLAGS
+#   * <PACKAGE-VARIABLE-PREFIX>_LIBS
+#
+# In addition, the variable:
+#
+#   * nl_with_<PACKAGE-SUCCINCT-NAME>
+#
+# will unconditionally be set to the source of the package if it is to
+# be used and is found; otherwise, 'no' if it is not to be used.
+#
+# The package disposition and source are specified by:
+#
+#   PACKAGE-DISPOSITION     : This may be either 'required' if the dependent
+#                             package is required or 'optional' if
+#                             not.
+#   PACKAGE-SOURCE          : This may be either 'internal' if the dependent
+#                             package may be provided either
+#                             internally or externally to the current
+#                             package or 'external' if the dependent
+#                             package may only be provided outside of
+#                             the current package. This also serves as
+#                             the default value for where the
+#                             configuration script expects to find the
+#                             package.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, either internally or externally, optional
+# actions are run.
+#
+#   ACTIONS-TO-RUN-IF-NOT-EXTERNAL : Optional actions to run if the package
+#                                    is not external.
+#   ACTIONS-TO-RUN-IF-NOT-INTERNAL : Optional actions to run if the package
+#                                    is not internal.
+# ----------------------------------------------------------------------------
+AC_DEFUN([_NL_WITH_PACKAGE],
+[
+    # Influential external variables for the package support
+
+    AC_ARG_VAR($4_CPPFLAGS, [$3 C preprocessor flags])
+    AC_ARG_VAR($4_LDFLAGS,  [$3 linker flags])
+    AC_ARG_VAR($4_LIBS,     [$3 linker libraries])
+
+    # Allow the user to specify both external headers and libraries
+    # together (or internal).
+
+    _NL_ARG_WITH_PACKAGE([$1], [$2], [$3], [$4], [$5])dnl
+
+    if test "${nl_with_$5}" == "no"; then
+	AC_MSG_CHECKING([whether to use the $3 package])
+	AC_MSG_RESULT([${nl_with_$5}])
+
+    else
+	AC_MSG_CHECKING([source of the $3 package])
+	AC_MSG_RESULT([${nl_with_$5}])
+
+	# If the user has selected or has defaulted into the internal $3
+	# package, set the values appropriately. Otherwise, run through the
+	# usual routine.
+
+	if test "${nl_with_$5}" = "internal"; then
+	    $7
+
+	else
+	    # We always prefer checking the values of the various '--with-$5-...' 
+	    # options first to using pkg-config because the former might be used
+	    # in a cross-compilation environment on a system that also contains
+	    # pkg-config. In such a case, the user wants what he/she specified
+	    # rather than what pkg-config indicates.
+
+	    if test "x${$5_dir}" != "x" -o "x${$5_header_dir}" != "x" -o "x${$5_library_dir}" != "x"; then
+			if test "x${$5_dir}" != "x"; then
+		    if test -d "${$5_dir}"; then
+			if test -d "${$5_dir}/include"; then
+			    $4_CPPFLAGS="-I${$5_dir}/include"
+			else
+			    $4_CPPFLAGS="-I${$5_dir}"
+			fi
+
+			if test -d "${$5_dir}/lib"; then
+			    $4_LDFLAGS="-L${$5_dir}/lib"
+			else
+			    $4_LDFLAGS="-L${$5_dir}"
+			fi
+		    else
+			AC_MSG_ERROR([No such directory ${$5_dir}])
+		    fi
+		fi
+
+		if test "x${$5_header_dir}" != "x"; then
+		    if test -d "${$5_header_dir}"; then
+			$4_CPPFLAGS="-I${$5_header_dir}"
+		    else
+			AC_MSG_ERROR([No such directory ${$5_header_dir}])
+		    fi
+		fi
+
+		if test "x${$5_library_dir}" != "x"; then
+		    if test -d "${$5_library_dir}"; then
+			$4_LDFLAGS="-L${$5_library_dir}"
+		    else
+			AC_MSG_ERROR([No such directory ${$5_library_dir}])
+		    fi
+		fi
+
+		$4_LIBS="${$4_LDFLAGS} $6"
+
+	    elif test "x${PKG_CONFIG}" != "x"; then
+                if ${PKG_CONFIG} --exists "$5"; then
+		    $4_CPPFLAGS="`${PKG_CONFIG} --cflags $5`"
+		    $4_LDFLAGS="`${PKG_CONFIG} --libs-only-L $5`"
+		    $4_LIBS="`${PKG_CONFIG} --libs-only-l $5`"
+                else
+                    m4_if([$1],
+                          [required],
+                          [AC_MSG_ERROR([Cannot find the $5 package with ${PKG_CONFIG}. ${PACKAGE_NAME} requires the $5 package. Try installing the package or use the relevant --with options to configure.])],
+                          [nl_with_$5="no"])
+                fi
+
+	    else
+                m4_if([$1],
+                      [required],
+                      [AC_MSG_ERROR([Cannot find the $3 package. ${PACKAGE_NAME} requires the $3 package.])],
+                      [nl_with_$5="no"])
+	    fi
+	fi
+
+	AC_SUBST($4_CPPFLAGS)
+	AC_SUBST($4_LDFLAGS)
+	AC_SUBST($4_LIBS)
+
+	if test "${nl_with_$5}" != "internal" -a "${nl_with_$5}" != "no"; then
+	    nl_saved_CPPFLAGS="${CPPFLAGS}"
+	    nl_saved_LDFLAGS="${LDFLAGS}"
+	    nl_saved_LIBS="${LIBS}"
+
+	    CPPFLAGS="${CPPFLAGS} ${$4_CPPFLAGS}"
+	    LDFLAGS="${LDFLAGS} ${$4_LDFLAGS}"
+	    LIBS="${LIBS} ${$4_LIBS}"
+
+		$8
+
+	    CPPFLAGS="${nl_saved_CPPFLAGS}"
+	    LDFLAGS="${nl_saved_LDFLAGS}"
+	    LIBS="${nl_saved_LIBS}"
+	fi
+    fi
+])
+
+# NL_WITH_REQUIRED_EXTERNAL_PACKAGE(PACKAGE-PRETTY-NAME,
+#                                   PACKAGE-VARIABLE-PREFIX,
+#                                   PACKAGE-SUCCINCT-NAME,
+#                                   [DEFAULT-PACKAGE-LIBS],
+#                                   [ACTIONS-TO-RUN])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified, required external dependent package.
+#
+# Failure to find the dependent package will result in a fatal error.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, optional actions are run.
+#
+#   ACTIONS-TO-RUN-IF-FOUND : Optional actions to run if the package is found.
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_WITH_REQUIRED_EXTERNAL_PACKAGE],
+[
+    _NL_WITH_PACKAGE([required], [external], [$1], [$2], [$3], [$4], [:], [$5])dnl
+])
+
+# NL_WITH_OPTIONAL_EXTERNAL_PACKAGE(PACKAGE-PRETTY-NAME,
+#                                   PACKAGE-VARIABLE-PREFIX,
+#                                   PACKAGE-SUCCINCT-NAME,
+#                                   [DEFAULT-PACKAGE-LIBS],
+#                                   [ACTIONS-TO-RUN])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified, optional external dependent package.
+#
+# Failure to find the dependent package will NOT result in a fatal error.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, optional actions are run.
+#
+#   ACTIONS-TO-RUN-IF-FOUND : Optional actions to run if the package is found.
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_WITH_OPTIONAL_EXTERNAL_PACKAGE],
+[
+    _NL_WITH_PACKAGE([optional], [external], [$1], [$2], [$3], [$4], [:], [$5])dnl
+])
+
+# NL_WITH_REQUIRED_INTERNAL_PACKAGE(PACKAGE-PRETTY-NAME,
+#                                   PACKAGE-VARIABLE-PREFIX,
+#                                   PACKAGE-SUCCINCT-NAME,
+#                                   [DEFAULT-PACKAGE-LIBS],
+#                                   [ACTIONS-TO-RUN-IF-NOT-EXTERNAL],
+#                                   [ACTIONS-TO-RUN-IF-NOT-INTERNAL])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified, required dependent package. The dependent
+# package may be provided externally or may exist within the current
+# package itself.
+#
+# Failure to find the dependent package will result in a fatal error.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, either internally or externally, optional
+# actions are run.
+#
+#   ACTIONS-TO-RUN-IF-NOT-EXTERNAL : Optional actions to run if the package
+#                                    is not external.
+#   ACTIONS-TO-RUN-IF-NOT-INTERNAL : Optional actions to run if the package
+#                                    is not internal.
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_WITH_REQUIRED_INTERNAL_PACKAGE],
+[
+    _NL_WITH_PACKAGE([required], [internal], [$1], [$2], [$3], [$4], [$5], [$6])dnl
+])
+
+# NL_WITH_OPTIONAL_INTERNAL_PACKAGE(PACKAGE-PRETTY-NAME,
+#                                   PACKAGE-VARIABLE-PREFIX,
+#                                   PACKAGE-SUCCINCT-NAME,
+#                                   [DEFAULT-PACKAGE-LIBS],
+#                                   [ACTIONS-TO-RUN-IF-NOT-EXTERNAL],
+#                                   [ACTIONS-TO-RUN-IF-NOT-INTERNAL])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified, optional dependent package. The dependent
+# package may be provided externally or may exist within the current
+# package itself.
+#
+# Failure to find the dependent package will NOT result in a fatal error.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, either internally or externally, optional
+# actions are run.
+#
+#   ACTIONS-TO-RUN-IF-NOT-EXTERNAL : Optional actions to run if the package
+#                                    is not external.
+#   ACTIONS-TO-RUN-IF-NOT-INTERNAL : Optional actions to run if the package
+#                                    is not internal.
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_WITH_OPTIONAL_INTERNAL_PACKAGE],
+[
+    _NL_WITH_PACKAGE([optional], [internal], [$1], [$2], [$3], [$4], [$5], [$6])dnl
+])
+
+# NL_WITH_PACKAGE(PACKAGE-PRETTY-NAME, PACKAGE-VARIABLE-PREFIX,
+#                 PACKAGE-SUCCINCT-NAME, [DEFAULT-PACKAGE-LIBS],
+#                 [ACTIONS-TO-RUN-IF-NOT-EXTERNAL],
+#                 [ACTIONS-TO-RUN-IF-NOT-INTERNAL])
+# ----------------------------------------------------------------------------
+# This macro is used to test for the presence, with pkg-config if it
+# is available, of the specified dependent package. The dependent
+# package may be provided externally or may exist within the current
+# package itself.
+#
+# The dependent package is specified in three forms:
+#
+#   PACKAGE-PRETTY-NAME     : The human-readable name by which the package
+#                             will be referred for any diagnostic output. For
+#                             example, "My Great Software Package".
+#   PACKAGE-VARIABLE-PREFIX : The package-specific prefix applied to variables
+#                             defined as a result of running this macro for the
+#                             packages. For example, "MGSP" here is transformed
+#                             into MGSP_CPPFLAGS.
+#   PACKAGE-SUCCINCT-NAME   : The package-specific name used for pkg-config,
+#                             in temporary variables and on the
+#                             configure command line.  For example,
+#                             "mgsp" here is used for --with-mgsp=DIR
+#                             or 'pkg-config --cflags mgsp'.
+#
+# In addition, if any additional, default link libraries are required
+# for use with the package, these are specified as:
+#
+#   DEFAULT-PACKAGE-LIBS    : Default link libraries required for use with
+#                             the package. These are used if pkg-config is
+#                             not available or cannot identify any
+#                             such libraries. For example, '-lmgsp'.
+#
+# If the package is specified, either internally or externally, optional
+# actions are run.
+#
+#   ACTIONS-TO-RUN-IF-NOT-EXTERNAL : Optional actions to run if the package
+#                                    is not external.
+#   ACTIONS-TO-RUN-IF-NOT-INTERNAL : Optional actions to run if the package
+#                                    is not internal.
+# ----------------------------------------------------------------------------
+AC_DEFUN([NL_WITH_PACKAGE],
+[
+    m4_warn(obsolete, [$0: this macro has been deprecated. Consider using NL_WITH_REQUIRED_INTERNAL_PACKAGE instead.])
+
+    NL_WITH_REQUIRED_INTERNAL_PACKAGE([$1], [$2], [$3], [$4], [$5], [$6])dnl
+])
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/pkg.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/pkg.m4
new file mode 100644
index 0000000..9a71878
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/m4/pkg.m4
@@ -0,0 +1,159 @@
+# pkg.m4 - Macros to locate and utilise pkg-config.            -*- Autoconf -*-
+# serial 1 (pkg-config-0.24)
+# 
+# Copyright © 2004 Scott James Remnant <scott@netsplit.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# PKG_PROG_PKG_CONFIG([MIN-VERSION])
+# ----------------------------------
+AC_DEFUN([PKG_PROG_PKG_CONFIG],
+[m4_pattern_forbid([^_?PKG_[A-Z_]+$])
+m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$])
+m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$])
+AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility])
+AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path])
+AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path])
+
+if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then
+	AC_PATH_TOOL([PKG_CONFIG], [pkg-config])
+fi
+if test -n "$PKG_CONFIG"; then
+	_pkg_min_version=m4_default([$1], [0.9.0])
+	AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version])
+	if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then
+		AC_MSG_RESULT([yes])
+	else
+		AC_MSG_RESULT([no])
+		PKG_CONFIG=""
+	fi
+fi[]dnl
+])# PKG_PROG_PKG_CONFIG
+
+# PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+#
+# Check to see whether a particular set of modules exists.  Similar
+# to PKG_CHECK_MODULES(), but does not set variables or print errors.
+#
+# Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG])
+# only at the first occurence in configure.ac, so if the first place
+# it's called might be skipped (such as if it is within an "if", you
+# have to call PKG_CHECK_EXISTS manually
+# --------------------------------------------------------------
+AC_DEFUN([PKG_CHECK_EXISTS],
+[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
+if test -n "$PKG_CONFIG" && \
+    AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then
+  m4_default([$2], [:])
+m4_ifvaln([$3], [else
+  $3])dnl
+fi])
+
+# _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES])
+# ---------------------------------------------
+m4_define([_PKG_CONFIG],
+[if test -n "$$1"; then
+    pkg_cv_[]$1="$$1"
+ elif test -n "$PKG_CONFIG"; then
+    PKG_CHECK_EXISTS([$3],
+                     [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null`
+		      test "x$?" != "x0" && pkg_failed=yes ],
+		     [pkg_failed=yes])
+ else
+    pkg_failed=untried
+fi[]dnl
+])# _PKG_CONFIG
+
+# _PKG_SHORT_ERRORS_SUPPORTED
+# -----------------------------
+AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED],
+[AC_REQUIRE([PKG_PROG_PKG_CONFIG])
+if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then
+        _pkg_short_errors_supported=yes
+else
+        _pkg_short_errors_supported=no
+fi[]dnl
+])# _PKG_SHORT_ERRORS_SUPPORTED
+
+
+# PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND],
+# [ACTION-IF-NOT-FOUND])
+#
+#
+# Note that if there is a possibility the first call to
+# PKG_CHECK_MODULES might not happen, you should be sure to include an
+# explicit call to PKG_PROG_PKG_CONFIG in your configure.ac
+#
+#
+# --------------------------------------------------------------
+AC_DEFUN([PKG_CHECK_MODULES],
+[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
+AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl
+AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl
+
+pkg_failed=no
+AC_MSG_CHECKING([for $1])
+
+_PKG_CONFIG([$1][_CFLAGS], [cflags], [$2])
+_PKG_CONFIG([$1][_LIBS], [libs], [$2])
+
+m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS
+and $1[]_LIBS to avoid the need to call pkg-config.
+See the pkg-config man page for more details.])
+
+if test $pkg_failed = yes; then
+   	AC_MSG_RESULT([no])
+        _PKG_SHORT_ERRORS_SUPPORTED
+        if test $_pkg_short_errors_supported = yes; then
+	        $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1`
+        else 
+	        $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1`
+        fi
+	# Put the nasty error message in config.log where it belongs
+	echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD
+
+	m4_default([$4], [AC_MSG_ERROR(
+[Package requirements ($2) were not met:
+
+$$1_PKG_ERRORS
+
+Consider adjusting the PKG_CONFIG_PATH environment variable if you
+installed software in a non-standard prefix.
+
+_PKG_TEXT])[]dnl
+        ])
+elif test $pkg_failed = untried; then
+     	AC_MSG_RESULT([no])
+	m4_default([$4], [AC_MSG_FAILURE(
+[The pkg-config script could not be found or is too old.  Make sure it
+is in your PATH or set the PKG_CONFIG environment variable to the full
+path to pkg-config.
+
+_PKG_TEXT
+
+To get pkg-config, see <http://pkg-config.freedesktop.org/>.])[]dnl
+        ])
+else
+	$1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS
+	$1[]_LIBS=$pkg_cv_[]$1[]_LIBS
+        AC_MSG_RESULT([yes])
+	$3
+fi[]dnl
+])# PKG_CHECK_MODULES
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/missing b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/missing
new file mode 100755
index 0000000..db98974
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/missing
@@ -0,0 +1,215 @@
+#! /bin/sh
+# Common wrapper for a few potentially missing GNU programs.
+
+scriptversion=2013-10-28.13; # UTC
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+# Originally written by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+if test $# -eq 0; then
+  echo 1>&2 "Try '$0 --help' for more information"
+  exit 1
+fi
+
+case $1 in
+
+  --is-lightweight)
+    # Used by our autoconf macros to check whether the available missing
+    # script is modern enough.
+    exit 0
+    ;;
+
+  --run)
+    # Back-compat with the calling convention used by older automake.
+    shift
+    ;;
+
+  -h|--h|--he|--hel|--help)
+    echo "\
+$0 [OPTION]... PROGRAM [ARGUMENT]...
+
+Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due
+to PROGRAM being missing or too old.
+
+Options:
+  -h, --help      display this help and exit
+  -v, --version   output version information and exit
+
+Supported PROGRAM values:
+  aclocal   autoconf  autoheader   autom4te  automake  makeinfo
+  bison     yacc      flex         lex       help2man
+
+Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and
+'g' are ignored when checking the name.
+
+Send bug reports to <bug-automake@gnu.org>."
+    exit $?
+    ;;
+
+  -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
+    echo "missing $scriptversion (GNU Automake)"
+    exit $?
+    ;;
+
+  -*)
+    echo 1>&2 "$0: unknown '$1' option"
+    echo 1>&2 "Try '$0 --help' for more information"
+    exit 1
+    ;;
+
+esac
+
+# Run the given program, remember its exit status.
+"$@"; st=$?
+
+# If it succeeded, we are done.
+test $st -eq 0 && exit 0
+
+# Also exit now if we it failed (or wasn't found), and '--version' was
+# passed; such an option is passed most likely to detect whether the
+# program is present and works.
+case $2 in --version|--help) exit $st;; esac
+
+# Exit code 63 means version mismatch.  This often happens when the user
+# tries to use an ancient version of a tool on a file that requires a
+# minimum version.
+if test $st -eq 63; then
+  msg="probably too old"
+elif test $st -eq 127; then
+  # Program was missing.
+  msg="missing on your system"
+else
+  # Program was found and executed, but failed.  Give up.
+  exit $st
+fi
+
+perl_URL=http://www.perl.org/
+flex_URL=http://flex.sourceforge.net/
+gnu_software_URL=http://www.gnu.org/software
+
+program_details ()
+{
+  case $1 in
+    aclocal|automake)
+      echo "The '$1' program is part of the GNU Automake package:"
+      echo "<$gnu_software_URL/automake>"
+      echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:"
+      echo "<$gnu_software_URL/autoconf>"
+      echo "<$gnu_software_URL/m4/>"
+      echo "<$perl_URL>"
+      ;;
+    autoconf|autom4te|autoheader)
+      echo "The '$1' program is part of the GNU Autoconf package:"
+      echo "<$gnu_software_URL/autoconf/>"
+      echo "It also requires GNU m4 and Perl in order to run:"
+      echo "<$gnu_software_URL/m4/>"
+      echo "<$perl_URL>"
+      ;;
+  esac
+}
+
+give_advice ()
+{
+  # Normalize program name to check for.
+  normalized_program=`echo "$1" | sed '
+    s/^gnu-//; t
+    s/^gnu//; t
+    s/^g//; t'`
+
+  printf '%s\n' "'$1' is $msg."
+
+  configure_deps="'configure.ac' or m4 files included by 'configure.ac'"
+  case $normalized_program in
+    autoconf*)
+      echo "You should only need it if you modified 'configure.ac',"
+      echo "or m4 files included by it."
+      program_details 'autoconf'
+      ;;
+    autoheader*)
+      echo "You should only need it if you modified 'acconfig.h' or"
+      echo "$configure_deps."
+      program_details 'autoheader'
+      ;;
+    automake*)
+      echo "You should only need it if you modified 'Makefile.am' or"
+      echo "$configure_deps."
+      program_details 'automake'
+      ;;
+    aclocal*)
+      echo "You should only need it if you modified 'acinclude.m4' or"
+      echo "$configure_deps."
+      program_details 'aclocal'
+      ;;
+   autom4te*)
+      echo "You might have modified some maintainer files that require"
+      echo "the 'autom4te' program to be rebuilt."
+      program_details 'autom4te'
+      ;;
+    bison*|yacc*)
+      echo "You should only need it if you modified a '.y' file."
+      echo "You may want to install the GNU Bison package:"
+      echo "<$gnu_software_URL/bison/>"
+      ;;
+    lex*|flex*)
+      echo "You should only need it if you modified a '.l' file."
+      echo "You may want to install the Fast Lexical Analyzer package:"
+      echo "<$flex_URL>"
+      ;;
+    help2man*)
+      echo "You should only need it if you modified a dependency" \
+           "of a man page."
+      echo "You may want to install the GNU Help2man package:"
+      echo "<$gnu_software_URL/help2man/>"
+    ;;
+    makeinfo*)
+      echo "You should only need it if you modified a '.texi' file, or"
+      echo "any other file indirectly affecting the aspect of the manual."
+      echo "You might want to install the Texinfo package:"
+      echo "<$gnu_software_URL/texinfo/>"
+      echo "The spurious makeinfo call might also be the consequence of"
+      echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might"
+      echo "want to install GNU make:"
+      echo "<$gnu_software_URL/make/>"
+      ;;
+    *)
+      echo "You might have modified some files without having the proper"
+      echo "tools for further handling them.  Check the 'README' file, it"
+      echo "often tells you about the needed prerequisites for installing"
+      echo "this package.  You may also peek at any GNU archive site, in"
+      echo "case some other package contains this missing '$1' program."
+      ;;
+  esac
+}
+
+give_advice "$1" | sed -e '1s/^/WARNING: /' \
+                       -e '2,$s/^/         /' >&2
+
+# Propagate the correct exit status (expected to be 127 for a program
+# not found, 63 for a program that failed due to version mismatch).
+exit $st
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
new file mode 100755
index 0000000..4191a45
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/mkinstalldirs
@@ -0,0 +1,162 @@
+#! /bin/sh
+# mkinstalldirs --- make directory hierarchy
+
+scriptversion=2009-04-28.21; # UTC
+
+# Original author: Noah Friedman <friedman@prep.ai.mit.edu>
+# Created: 1993-05-16
+# Public domain.
+#
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+IFS=" ""	$nl"
+errstatus=0
+dirmode=
+
+usage="\
+Usage: mkinstalldirs [-h] [--help] [--version] [-m MODE] DIR ...
+
+Create each directory DIR (with mode MODE, if specified), including all
+leading file name components.
+
+Report bugs to <bug-automake@gnu.org>."
+
+# process command line arguments
+while test $# -gt 0 ; do
+  case $1 in
+    -h | --help | --h*)         # -h for help
+      echo "$usage"
+      exit $?
+      ;;
+    -m)                         # -m PERM arg
+      shift
+      test $# -eq 0 && { echo "$usage" 1>&2; exit 1; }
+      dirmode=$1
+      shift
+      ;;
+    --version)
+      echo "$0 $scriptversion"
+      exit $?
+      ;;
+    --)                         # stop option processing
+      shift
+      break
+      ;;
+    -*)                         # unknown option
+      echo "$usage" 1>&2
+      exit 1
+      ;;
+    *)                          # first non-opt arg
+      break
+      ;;
+  esac
+done
+
+for file
+do
+  if test -d "$file"; then
+    shift
+  else
+    break
+  fi
+done
+
+case $# in
+  0) exit 0 ;;
+esac
+
+# Solaris 8's mkdir -p isn't thread-safe.  If you mkdir -p a/b and
+# mkdir -p a/c at the same time, both will detect that a is missing,
+# one will create a, then the other will try to create a and die with
+# a "File exists" error.  This is a problem when calling mkinstalldirs
+# from a parallel make.  We use --version in the probe to restrict
+# ourselves to GNU mkdir, which is thread-safe.
+case $dirmode in
+  '')
+    if mkdir -p --version . >/dev/null 2>&1 && test ! -d ./--version; then
+      echo "mkdir -p -- $*"
+      exec mkdir -p -- "$@"
+    else
+      # On NextStep and OpenStep, the `mkdir' command does not
+      # recognize any option.  It will interpret all options as
+      # directories to create, and then abort because `.' already
+      # exists.
+      test -d ./-p && rmdir ./-p
+      test -d ./--version && rmdir ./--version
+    fi
+    ;;
+  *)
+    if mkdir -m "$dirmode" -p --version . >/dev/null 2>&1 &&
+       test ! -d ./--version; then
+      echo "mkdir -m $dirmode -p -- $*"
+      exec mkdir -m "$dirmode" -p -- "$@"
+    else
+      # Clean up after NextStep and OpenStep mkdir.
+      for d in ./-m ./-p ./--version "./$dirmode";
+      do
+        test -d $d && rmdir $d
+      done
+    fi
+    ;;
+esac
+
+for file
+do
+  case $file in
+    /*) pathcomp=/ ;;
+    *)  pathcomp= ;;
+  esac
+  oIFS=$IFS
+  IFS=/
+  set fnord $file
+  shift
+  IFS=$oIFS
+
+  for d
+  do
+    test "x$d" = x && continue
+
+    pathcomp=$pathcomp$d
+    case $pathcomp in
+      -*) pathcomp=./$pathcomp ;;
+    esac
+
+    if test ! -d "$pathcomp"; then
+      echo "mkdir $pathcomp"
+
+      mkdir "$pathcomp" || lasterr=$?
+
+      if test ! -d "$pathcomp"; then
+	errstatus=$lasterr
+      else
+	if test ! -z "$dirmode"; then
+	  echo "chmod $dirmode $pathcomp"
+	  lasterr=
+	  chmod "$dirmode" "$pathcomp" || lasterr=$?
+
+	  if test ! -z "$lasterr"; then
+	    errstatus=$lasterr
+	  fi
+	fi
+      fi
+    fi
+
+    pathcomp=$pathcomp/
+  done
+done
+
+exit $errstatus
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/py-compile b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/py-compile
new file mode 100755
index 0000000..46ea866
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/py-compile
@@ -0,0 +1,170 @@
+#!/bin/sh
+# py-compile - Compile a Python program
+
+scriptversion=2011-06-08.12; # UTC
+
+# Copyright (C) 2000-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+if [ -z "$PYTHON" ]; then
+  PYTHON=python
+fi
+
+me=py-compile
+
+usage_error ()
+{
+  echo "$me: $*" >&2
+  echo "Try '$me --help' for more information." >&2
+  exit 1
+}
+
+basedir=
+destdir=
+while test $# -ne 0; do
+  case "$1" in
+    --basedir)
+      if test $# -lt 2; then
+        usage_error "option '--basedir' requires an argument"
+      else
+        basedir=$2
+      fi
+      shift
+      ;;
+    --destdir)
+      if test $# -lt 2; then
+        usage_error "option '--destdir' requires an argument"
+      else
+        destdir=$2
+      fi
+      shift
+      ;;
+    -h|--help)
+      cat <<\EOF
+Usage: py-compile [--help] [--version] [--basedir DIR] [--destdir DIR] FILES..."
+
+Byte compile some python scripts FILES.  Use --destdir to specify any
+leading directory path to the FILES that you don't want to include in the
+byte compiled file.  Specify --basedir for any additional path information you
+do want to be shown in the byte compiled file.
+
+Example:
+  py-compile --destdir /tmp/pkg-root --basedir /usr/share/test test.py test2.py
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+      exit $?
+      ;;
+    -v|--version)
+      echo "$me $scriptversion"
+      exit $?
+      ;;
+    --)
+      shift
+      break
+      ;;
+    -*)
+      usage_error "unrecognized option '$1'"
+      ;;
+    *)
+      break
+      ;;
+  esac
+  shift
+done
+
+files=$*
+if test -z "$files"; then
+    usage_error "no files given"
+fi
+
+# if basedir was given, then it should be prepended to filenames before
+# byte compilation.
+if [ -z "$basedir" ]; then
+    pathtrans="path = file"
+else
+    pathtrans="path = os.path.join('$basedir', file)"
+fi
+
+# if destdir was given, then it needs to be prepended to the filename to
+# byte compile but not go into the compiled file.
+if [ -z "$destdir" ]; then
+    filetrans="filepath = path"
+else
+    filetrans="filepath = os.path.normpath('$destdir' + os.sep + path)"
+fi
+
+$PYTHON -c "
+import sys, os, py_compile, imp
+
+files = '''$files'''
+
+sys.stdout.write('Byte-compiling python modules...\n')
+for file in files.split():
+    $pathtrans
+    $filetrans
+    if not os.path.exists(filepath) or not (len(filepath) >= 3
+                                            and filepath[-3:] == '.py'):
+	    continue
+    sys.stdout.write(file)
+    sys.stdout.flush()
+    if hasattr(imp, 'get_tag'):
+        py_compile.compile(filepath, imp.cache_from_source(filepath), path)
+    else:
+        py_compile.compile(filepath, filepath + 'c', path)
+sys.stdout.write('\n')" || exit $?
+
+# this will fail for python < 1.5, but that doesn't matter ...
+$PYTHON -O -c "
+import sys, os, py_compile, imp
+
+# pypy does not use .pyo optimization
+if hasattr(sys, 'pypy_translation_info'):
+    sys.exit(0)
+
+files = '''$files'''
+sys.stdout.write('Byte-compiling python modules (optimized versions) ...\n')
+for file in files.split():
+    $pathtrans
+    $filetrans
+    if not os.path.exists(filepath) or not (len(filepath) >= 3
+                                            and filepath[-3:] == '.py'):
+	    continue
+    sys.stdout.write(file)
+    sys.stdout.flush()
+    if hasattr(imp, 'get_tag'):
+        py_compile.compile(filepath, imp.cache_from_source(filepath, False), path)
+    else:
+        py_compile.compile(filepath, filepath + 'o', path)
+sys.stdout.write('\n')" 2>/dev/null || :
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/test-driver b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/test-driver
new file mode 100755
index 0000000..d306056
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/autoconf/test-driver
@@ -0,0 +1,139 @@
+#! /bin/sh
+# test-driver - basic testsuite driver script.
+
+scriptversion=2013-07-13.22; # UTC
+
+# Copyright (C) 2011-2013 Free Software Foundation, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+# Make unconditional expansion of undefined variables an error.  This
+# helps a lot in preventing typo-related bugs.
+set -u
+
+usage_error ()
+{
+  echo "$0: $*" >&2
+  print_usage >&2
+  exit 2
+}
+
+print_usage ()
+{
+  cat <<END
+Usage:
+  test-driver --test-name=NAME --log-file=PATH --trs-file=PATH
+              [--expect-failure={yes|no}] [--color-tests={yes|no}]
+              [--enable-hard-errors={yes|no}] [--]
+              TEST-SCRIPT [TEST-SCRIPT-ARGUMENTS]
+The '--test-name', '--log-file' and '--trs-file' options are mandatory.
+END
+}
+
+test_name= # Used for reporting.
+log_file=  # Where to save the output of the test script.
+trs_file=  # Where to save the metadata of the test run.
+expect_failure=no
+color_tests=no
+enable_hard_errors=yes
+while test $# -gt 0; do
+  case $1 in
+  --help) print_usage; exit $?;;
+  --version) echo "test-driver $scriptversion"; exit $?;;
+  --test-name) test_name=$2; shift;;
+  --log-file) log_file=$2; shift;;
+  --trs-file) trs_file=$2; shift;;
+  --color-tests) color_tests=$2; shift;;
+  --expect-failure) expect_failure=$2; shift;;
+  --enable-hard-errors) enable_hard_errors=$2; shift;;
+  --) shift; break;;
+  -*) usage_error "invalid option: '$1'";;
+   *) break;;
+  esac
+  shift
+done
+
+missing_opts=
+test x"$test_name" = x && missing_opts="$missing_opts --test-name"
+test x"$log_file"  = x && missing_opts="$missing_opts --log-file"
+test x"$trs_file"  = x && missing_opts="$missing_opts --trs-file"
+if test x"$missing_opts" != x; then
+  usage_error "the following mandatory options are missing:$missing_opts"
+fi
+
+if test $# -eq 0; then
+  usage_error "missing argument"
+fi
+
+if test $color_tests = yes; then
+  # Keep this in sync with 'lib/am/check.am:$(am__tty_colors)'.
+  red='' # Red.
+  grn='' # Green.
+  lgn='' # Light green.
+  blu='' # Blue.
+  mgn='' # Magenta.
+  std=''     # No color.
+else
+  red= grn= lgn= blu= mgn= std=
+fi
+
+do_exit='rm -f $log_file $trs_file; (exit $st); exit $st'
+trap "st=129; $do_exit" 1
+trap "st=130; $do_exit" 2
+trap "st=141; $do_exit" 13
+trap "st=143; $do_exit" 15
+
+# Test script is run here.
+"$@" >$log_file 2>&1
+estatus=$?
+if test $enable_hard_errors = no && test $estatus -eq 99; then
+  estatus=1
+fi
+
+case $estatus:$expect_failure in
+  0:yes) col=$red res=XPASS recheck=yes gcopy=yes;;
+  0:*)   col=$grn res=PASS  recheck=no  gcopy=no;;
+  77:*)  col=$blu res=SKIP  recheck=no  gcopy=yes;;
+  99:*)  col=$mgn res=ERROR recheck=yes gcopy=yes;;
+  *:yes) col=$lgn res=XFAIL recheck=no  gcopy=yes;;
+  *:*)   col=$red res=FAIL  recheck=yes gcopy=yes;;
+esac
+
+# Report outcome to console.
+echo "${col}${res}${std}: $test_name"
+
+# Register the test result, and other relevant metadata.
+echo ":test-result: $res" > $trs_file
+echo ":global-test-result: $res" >> $trs_file
+echo ":recheck: $recheck" >> $trs_file
+echo ":copy-in-global-log: $gcopy" >> $trs_file
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post.am
new file mode 100644
index 0000000..245fa8a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post.am
@@ -0,0 +1,23 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the generic "tail" or post automake header that may
+#      be included in any automakefile used in the build tree.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post/rules.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules.am
new file mode 100644
index 0000000..1836910
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules.am
@@ -0,0 +1,25 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the automake footer for all common
+#      (i.e. non-toolchain-specific) rules.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post/rules/coverage.am
+include $(abs_top_nlbuild_autotools_dir)/automake/post/rules/pretty.am
+include $(abs_top_nlbuild_autotools_dir)/automake/post/rules/headers.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/coverage.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/coverage.am
new file mode 100644
index 0000000..aa54820
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/coverage.am
@@ -0,0 +1,39 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the automake footer for all code-coverage related
+#      targets and rules.
+#
+#      This represents the minimum integration with GNU autotools
+#      (automake in particular) such that 'make coverage' may be invoked
+#      at the top of the tree and all the prerequisites occur such
+#      that it executes successfully with no intervening make target
+#      invocations. The 'check-am' and '$(BUILT_SOURCES)' are the key
+#      automake-specific dependencies to ensure that happens.
+#
+
+.PHONY: coverage coverage-recursive
+ 
+coverage: coverage-recursive
+
+coverage: check-am
+
+coverage: $(BUILT_SOURCES)
+
+coverage-recursive:
+	$(nl-make-subdirs)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/headers.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/headers.am
new file mode 100644
index 0000000..41ed0c6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/headers.am
@@ -0,0 +1,45 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the automake footer for installing header files
+#      independently of the rest of a package.
+#
+#      Packages that wish to avail themselves of this target may wish
+#      to add the following goals and dependencies:
+#
+#        install-headers: install-includeHEADERS
+#
+#      in places where the package uses and defines 'include_HEADERS' or
+#
+#        install-headers: install-data
+#
+#      where the package uses and defines a more complex 'dist_*_HEADERS'.
+#
+#      This represents the minimum integration with GNU autotools
+#      (automake inparticular) such that 'make install-headers' may be
+#      invoked at the top of the tree and all the prerequisites occur 
+#      such that it executes successfully with no intervening make 
+#      target invocations.
+#
+
+.PHONY: install-headers install-headers-recursive
+
+install-headers: install-headers-recursive
+
+install-headers-recursive:
+	$(nl-make-subdirs)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/pretty.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/pretty.am
new file mode 100644
index 0000000..0d8afc6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/post/rules/pretty.am
@@ -0,0 +1,79 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the automake footer for all coding style-related
+#      targets and rules.
+#
+#      The recursive target 'pretty', invoked against '$(PRETTY_SUBDIRS)', 
+#      is intended to reformat a collection of source files, defined by 
+#      '$(PRETTY_FILES)' using the program '$(PRETTY)' with the arguments 
+#      '$(PRETTY_ARGS)'.
+#
+#      The recursive target 'pretty-check' (and its alias 'lint'),
+#      invoked against '$(PRETTY_SUBDIRS)', is intended to only check 
+#      but NOT reformat a collection of source files, defined by
+#      '$(PRETTY_FILES)' using the program '$(PRETTY_CHECK)' with the
+#      arguments '$(PRETTY_CHECK_ARGS)'.
+#
+#      This represents the minimum integration with GNU autotools
+#      (automake inparticular) such that 'make pretty' and 'make
+#      pretty-check' may be invoked at the top of the tree and all 
+#      the prerequisites occur such that it executes successfully 
+#      with no intervening make target invocations. '$(BUILT_SOURCES)' 
+#      are the key automake-specific dependencies to ensure that happens.
+#
+
+# nl-make-pretty <TERSE OUTPUT COMMAND> <COMMAND> <COMMAND ARGUMENTS> <PATHS>
+#
+# This function iterates over PATHS, invoking COMMAND with
+# COMMAND ARGUEMENTS on each file. If a non-verbose make has been
+# requested TERSE OUTPUT COMMAND is emitted to standard output.
+
+define nl-make-pretty
+$(AM_V_at)for file in $(4); do \
+    $(1) \
+    if test -f $${file}; then d=.; else d=$(srcdir); fi; \
+    $(2) $(3) $${d}/$${file} \
+    || exit 1; \
+done
+endef
+
+.PHONY: pretty pretty-recursive pretty-check pretty-check-recursive lint
+
+pretty: pretty-recursive
+
+pretty pretty-check: $(BUILT_SOURCES)
+
+# Map the build action 'lint' to the more vernacular 'pretty-check'.
+
+lint: pretty-check
+
+pretty-check: pretty-check-recursive
+
+pretty-recursive pretty-check-recursive:
+	$(call nl-make-subdirs-with-dirs,$(PRETTY_SUBDIRS))
+
+pretty: $(PRETTY_FILES)
+ifneq ($(PRETTY),)
+	$(call nl-make-pretty,$(AM_V_PRETTY),$(PRETTY),$(PRETTY_ARGS),$(filter-out $(@)-recursive,$(^)))
+endif
+
+pretty-check: $(PRETTY_FILES)
+ifneq ($(PRETTY_CHECK),)
+	$(call nl-make-pretty,$(AM_V_PRETTY_CHECK),$(PRETTY_CHECK),$(PRETTY_CHECK_ARGS),$(filter-out $(@)-recursive,$(^)))
+endif
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre.am
new file mode 100644
index 0000000..2745b5a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre.am
@@ -0,0 +1,23 @@
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the generic "head" or pre automake header that may
+#      be included in any automakefile used in the build tree.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros.am
new file mode 100644
index 0000000..8122bea
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros.am
@@ -0,0 +1,85 @@
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake macros common to all other automake
+#      headers and files.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/constants.am
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/paths.am
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/pretty.am
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/verbosity.am
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/coverage.am
+include $(abs_top_nlbuild_autotools_dir)/automake/pre/macros/subdirs.am
+
+#
+# create-directory
+#
+# Create the directory with the name of the $(@) variable.
+#
+define create-directory
+$(AM_V_MKDIR_P)$(MKDIR_P) $(@)
+endef # create-directory
+
+#
+# create-link
+#
+# Create the symbolic link with the source of the $(<) variable and
+# the destination of the $(@) variable.
+#
+define create-link
+$(AM_V_LN_S)$(LN_S) $(<) $(@)
+endef # create-link
+
+#
+# check-file <macro suffix>
+#
+# Check whether a file, referenced by the $(@) variable, should be
+# updated / regenerated based on its dependencies, referenced by the
+# $(<) variable by running the make macro check-file-<macro suffix>.
+#
+# The $(<) is passed as the first argument if the macro wants to process
+# it and the prospective new output file, which the macro MUST
+# generate, as the second.
+#
+# This macro will ensure that any required parent directories are created
+# prior to invoking check-file-<macro suffix>.
+#
+# This macro is similar to and inspired by that from Linux Kbuild and
+# elsewhere.
+#
+#   <macro suffix> - The name, suffixed to "check-file-", which indicates
+#                    the make macro to invoke.
+#
+#
+define check-file
+$(AM_V_at)set -e;                                   \
+echo '  CHECK    $(@)';                             \
+$(MKDIR_P) $(dir $(@));                             \
+$(call check-file-$(1),$(<),$(@).N);                \
+if [ -r "$(@)" ] && $(CMP) -s "$(@)" "$(@).N"; then \
+    rm -f "$(@).N";                                 \
+else                                                \
+    echo '  GEN      $(@)';                         \
+    mv -f "$(@).N" "$(@)";                          \
+fi
+endef # check-file
+
+
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/constants.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/constants.am
new file mode 100644
index 0000000..1277a29
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/constants.am
@@ -0,0 +1,30 @@
+#
+#    Copyright 2010-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake file constants common to all other
+#      automake headers and files.
+#
+
+#
+# Character constants for those make does not otherwise handle well or
+# naturally.
+#
+
+# Path separator
+
+Slash                   := /
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/coverage.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/coverage.am
new file mode 100644
index 0000000..f3a762a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/coverage.am
@@ -0,0 +1,106 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake variables and macros common to all
+#      other automake headers and files for code coverage.
+#
+
+# Suffixes
+
+#
+# Suffix for the code coverage report "bundle".
+#
+NL_COVERAGE_BUNDLE_SUFFIX = .lcov
+
+#
+# Suffix for the lcov "info" file inside the code coverage report bundle.
+#
+NL_COVERAGE_INFO_SUFFIX   = .info
+
+#
+# Verbosity macros and flags
+#
+
+NL_V_LCOV             = $(nl__v_LCOV_$(V))
+nl__v_LCOV_           = $(nl__v_LCOV_$(AM_DEFAULT_VERBOSITY))
+nl__v_LCOV_0          = @echo "  LCOV     $(@)";
+nl__v_LCOV_1          = 
+
+NL_V_LCOV_FLAGS       = $(nl__v_LCOV_FLAGS_$(V))
+nl__v_LCOV_FLAGS_     = $(nl__v_LCOV_FLAGS_$(AM_DEFAULT_VERBOSITY))
+nl__v_LCOV_FLAGS_0    = --quiet
+nl__v_LCOV_FLAGS_1    =
+
+NL_V_GENHTML          = $(nl__v_GENHTML_$(V))
+nl__v_GENHTML_        = $(nl__v_GENHTML_$(AM_DEFAULT_VERBOSITY))
+nl__v_GENHTML_0       = @echo "  GENHTML  $(@)";
+nl__v_GENHTML_1       = 
+
+NL_V_GENHTML_FLAGS    = $(nl__v_GENHTML_FLAGS_$(V))
+nl__v_GENHTML_FLAGS_  = $(nl__v_GENHTML_FLAGS_$(AM_DEFAULT_VERBOSITY))
+nl__v_GENHTML_FLAGS_0 = --quiet 
+nl__v_GENHTML_FLAGS_1 =
+
+#
+# generate-coverage-report <directory>
+#
+# Capture, using lcov, a coverage report from the specified directory 'directory'
+# with an final output "info" file as specified by the target variable.
+#
+#   <directory> - The directory from which lcov should search for coverage data (*.gcno & *.gcda)
+#
+#   - create baseline coverage data file (base.info) with '-i|--initial' option
+#   - create test coverage data file (test.info)
+#   - combine baseline and test coverage data to create the final "info" file
+#
+# Then, on success, generate an HTML-based coverage report using genhtml.
+#
+define generate-coverage-report
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --initial --capture --directory "$(1)" --output-file "base.info"
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --capture --directory "$(1)" --output-file "test.info"
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --add-tracefile "base.info" --add-tracefile "test.info" --output-file "$(@)"
+$(NL_V_GENHTML)$(GENHTML) $(NL_V_GENHTML_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" "$(@)" --output-directory "$(@D)"
+endef # generate-coverage-report
+
+#
+# generate-coverage-report-with-filter <directory> <remove_filter>
+#
+# Capture, using lcov, a coverage report from the specified directory 'directory' and a filter 'remove_filter'
+# with an final output "info" file as specified by the target variable.
+#
+#   <directory>      - The directory from which lcov should search for coverage data (*.gcno & *.gcda)
+#
+#   <remove_filter>  - The filter is a whitespace-separated list of shell wildcard patterns. (note that they may need to be escaped accordingly to prevent
+#                      the shell from expanding them first). Every file entry in tracefile which matches at least one of those patterns will be removed.
+#
+#   - create baseline coverage data file (base.info) with '-i|--initial' option
+#   - create test coverage data file (test.info)
+#   - combine baseline and test coverage data to create the final "info" file
+#   - remove particular coverage data specifiled by the remove_filter (no change if remove_filter is null)
+#
+# Then, on success, generate an HTML-based coverage report using genhtml.
+#
+define generate-coverage-report-with-filter
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --initial --capture --directory "$(1)" --output-file "base.info"
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --capture --directory "$(1)" --output-file "test.info"
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --add-tracefile "base.info" --add-tracefile "test.info" --output-file "$(@)"
+$(NL_V_LCOV)$(LCOV) $(NL_V_LCOV_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" --remove "$(@)" $(foreach pattern,$(2),"$(pattern)") --output-file "$(@)"
+$(NL_V_GENHTML)$(GENHTML) $(NL_V_GENHTML_FLAGS) --config-file="$(abs_top_nlbuild_autotools_dir)/etc/lcov.config" "$(@)" --output-directory "$(@D)"
+endef # generate-coverage-report-with-filter
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/paths.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/paths.am
new file mode 100644
index 0000000..892b7cc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/paths.am
@@ -0,0 +1,42 @@
+#
+#    Copyright 2010-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake macros for manipulating and querying
+#      paths.
+#
+
+# Deslashify <path> ...
+#
+# Ensure that there is NO trailing directory delimiter at the end of
+# the specified path.
+
+Deslashify          = $(patsubst %$(Slash),%,$(1))
+
+# Slashify <path> ...
+#
+# Ensure that there is a single trailing directory delimiter at the
+# end of the specified path.
+
+Slashify            = $(addsuffix $(Slash),$(call Deslashify,$(1)))
+
+#
+# IsRealPath <path>
+#
+# If the path is a real rather than symbolic link path, the path is returned. 
+
+IsRealPath          = $(if $(patsubst $(realpath $(1)),,$(abspath $(1))),,$(1))
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/pretty.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/pretty.am
new file mode 100644
index 0000000..a983c29
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/pretty.am
@@ -0,0 +1,52 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake variables for performing coding
+#      style formatting and checking.
+#
+
+AM_V_PRETTY          = $(am__v_PRETTY_$(V))
+am__v_PRETTY_        = $(am__v_PRETTY_$(AM_DEFAULT_VERBOSITY))
+am__v_PRETTY_0       = echo "  PRETTY   $${file}";
+am__v_PRETTY_1       = 
+
+AM_V_PRETTY_CHECK    = $(am__v_PRETTY_CHECK_$(V))
+am__v_PRETTY_CHECK_  = $(am__v_PRETTY_CHECK_$(AM_DEFAULT_VERBOSITY))
+am__v_PRETTY_CHECK_0 = echo "  PRETTY   $${file}";
+am__v_PRETTY_CHECK_1 = 
+
+# PRETTY_FILES
+#
+# This defines the collection of files against which the PRETTY and
+# PRETTY_CHECK profiles will be invoked for the 'pretty' and
+# 'pretty-check' targets, respectively.
+#
+# This defaults to SOURCES and HEADERS and may be overriden or
+# appended to.
+
+PRETTY_FILES    ?= $(SOURCES) $(HEADERS)
+
+# PRETTY_SUBDIRS
+#
+# This defines the subdirectories for which the recursive 'pretty' and
+# 'pretty-check' targets will run against.
+#
+# This defaults to SUBDIRS and may be overridden or appended to.
+
+PRETTY_SUBDIRS  ?= $(SUBDIRS)
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/subdirs.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/subdirs.am
new file mode 100644
index 0000000..174cefe
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/subdirs.am
@@ -0,0 +1,78 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake variables and macros common to all
+#      other automake headers and files for invoking make in a list
+#      of subdirectories with a list of target goals.
+#
+
+#
+# nl-make-subdirs-with-dirs-and-goals <subdirectories> <goals>
+#
+# This iteratively invokes make with the provided target goals in each
+# subdirectory specified.
+#
+#   subdirectories  A space-delimited list of subdirectories in which to
+#                   invoke make with the provided target goals.
+#
+#   goals           A space-delimited list of target goals to
+#                   attempt to make in each specified subdirectory.
+#
+define nl-make-subdirs-with-dirs-and-goals
++$(AM_V_at)for subdir in $(1); do \
+    echo "Making $(2) in $${subdir}"; \
+    $(MAKE) -C "$${subdir}" $(AM_MAKEFLAGS) $(2) \
+    || exit 1; \
+done
+endef # nl-make-subdirs-with-dirs-and-goals
+
+#
+# nl-make-subdirs-with-dirs <subdirectories>
+#
+# This iteratively invokes make with the target goals defined in
+# MAKECMDGOALS in each subdirectory specified.
+#
+#   subdirectories  A space-delimited list of subdirectories in which to
+#                   invoke make with the provided target goals.
+#
+define nl-make-subdirs-with-dirs
+$(call nl-make-subdirs-with-dirs-and-goals,$(1),$(MAKECMDGOALS))
+endef # nl-make-subdirs-with-dirs
+
+#
+# nl-make-subdirs-with-goals <goals>
+#
+# This iteratively invokes make with the provided target goals in the
+# subdirectories defined in SUBDIRS.
+#
+#   goals           A space-delimited list of target goals to
+#                   attempt to make in SUBDIRS.
+#
+define nl-make-subdirs-with-goals
+$(call nl-make-subdirs-with-dirs-and-goals,$(SUBDIRS),$(1))
+endef # nl-make-subdirs-with-goals
+
+#
+# nl-make-subdirs
+#
+# This iteratively invokes make with the target goals defined in
+# MAKECMDGOALS in the subdirectories defined in SUBDIRS.
+#
+define nl-make-subdirs
+$(call nl-make-subdirs-with-dirs-and-goals,$(SUBDIRS),$(MAKECMDGOALS))
+endef # nl-make-subdirs
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/verbosity.am b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/verbosity.am
new file mode 100644
index 0000000..6f72bd6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/automake/pre/macros/verbosity.am
@@ -0,0 +1,35 @@
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file defines automake variables common to all other
+#      automake headers and files for silent-rules-style (V=[0|1]) build
+#      verbosity output.
+#
+
+AM_V_LN_S       = $(am__v_LN_S_$(V))
+am__v_LN_S_     = $(am__v_LN_S_$(AM_DEFAULT_VERBOSITY))
+am__v_LN_S_0    = @echo "  LN       $(@)";
+am__v_LN_S_1    = 
+
+AM_V_MKDIR_P    = $(am__v_MKDIR_P_$(V))
+am__v_MKDIR_P_  = $(am__v_MKDIR_P_$(AM_DEFAULT_VERBOSITY))
+am__v_MKDIR_P_0 = @echo "  MKDIR    $(@)";
+am__v_MKDIR_P_1 = 
+
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/etc/lcov.config b/nl-unit-test/third_party/nlbuild-autotools/repo/etc/lcov.config
new file mode 100644
index 0000000..d4ee495
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/etc/lcov.config
@@ -0,0 +1,148 @@
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is an lcov configuration file based on the default
+#      from /etc/lcovrc. The key difference is the inclusion of
+#      'genhtml_branch_coverage = 1' and lcov_branch_coverage = 1' to
+#      ensure that branch coverage is reported.
+#
+
+# External style sheet file
+#genhtml_css_file = gcov.css
+
+# Coverage rate limits
+genhtml_hi_limit = 90
+genhtml_med_limit = 75
+
+# Width of line coverage field in source code view
+genhtml_line_field_width = 12
+
+# Width of branch coverage field in source code view
+genhtml_branch_field_width = 16
+
+# Width of overview image
+genhtml_overview_width = 80
+
+# Resolution of overview navigation
+genhtml_nav_resolution = 4
+
+# Offset for source code navigation
+genhtml_nav_offset = 10
+
+# Do not remove unused test descriptions if non-zero
+genhtml_keep_descriptions = 0
+
+# Do not remove prefix from directory names if non-zero
+genhtml_no_prefix = 0
+
+# Do not create source code view if non-zero
+genhtml_no_source = 0
+
+# Specify size of tabs
+genhtml_num_spaces = 8
+
+# Highlight lines with converted-only data if non-zero
+genhtml_highlight = 0
+
+# Include color legend in HTML output if non-zero
+genhtml_legend = 0
+
+# Include HTML file at start of HTML output
+#genhtml_html_prolog = prolog.php
+
+# Include HTML file at end of HTML output
+#genhtml_html_epilog = epilog.php
+
+# Use custom HTML file extension
+#genhtml_html_extension = html
+
+# Compress all generated html files with gzip.
+#genhtml_html_gzip = 1
+
+# Include sorted overview pages
+genhtml_sort = 1
+
+# Include function coverage data display
+#genhtml_function_coverage = 1
+
+# Include branch coverage data display
+genhtml_branch_coverage = 1
+
+# Specify the character set of all generated HTML pages
+genhtml_charset=UTF-8
+
+# Allow HTML markup in test case description text if non-zero
+genhtml_desc_html=0
+
+# Location of the gcov tool
+#geninfo_gcov_tool = gcov
+
+# Adjust test names if non-zero
+#geninfo_adjust_testname = 0
+
+# Calculate a checksum for each line if non-zero
+geninfo_checksum = 0
+
+# Enable libtool compatibility mode if non-zero
+geninfo_compat_libtool = 0
+
+# Specify whether to capture coverage data for external source
+# files
+#geninfo_external = 1
+
+# Use gcov’s --all-blocks option if non-zero
+#geninfo_gcov_all_blocks = 1
+
+# Specify compatiblity modes (same as --compat option
+# of geninfo)
+#geninfo_compat = libtool=on, hammer=auto, split_crc=auto
+
+# Adjust path to source files by removing or changing path
+# components that match the specified pattern (Perl regular
+# expression format)
+#geninfo_adjust_src_path = /tmp/build => /usr/src
+
+# Specify if geninfo should try to automatically determine
+# the base-directory when collecting coverage data.
+geninfo_auto_base = 1
+
+# Directory containing gcov kernel files
+lcov_gcov_dir = /proc/gcov
+
+# Location for temporary directories
+lcov_tmp_dir = /tmp
+
+# Show full paths during list operation if non-zero
+lcov_list_full_path = 0
+
+# Specify the maximum width for list output. This value is
+# ignored when lcov_list_full_path is non-zero.
+lcov_list_width = 80
+
+# Specify the maximum percentage of file names which may be
+# truncated when choosing a directory prefix in list output.
+# This value is ignored when lcov_list_full_path is non-zero.
+lcov_list_truncate_max = 20
+
+# Specify if function coverage data should be collected and
+# processed.
+lcov_function_coverage = 1
+
+# Specify if branch coverage data should be collected and
+# processed.
+lcov_branch_coverage = 1
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Doxyfile.in b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Doxyfile.in
new file mode 100644
index 0000000..dd1c6ea
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Doxyfile.in
@@ -0,0 +1,2371 @@
+# Doxyfile 1.8.6
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file describes the settings to be used by the
+#      documentation system # doxygen (www.doxygen.org)
+#
+#      This was initially autogenerated 'doxywizard' and then hand-tuned.
+#
+#      All text after a hash (#) is considered a comment and will be
+#      ignored.
+#
+#      The format is:
+#
+#          TAG = value [value, ...]
+#
+#      For lists items can also be appended using:
+#
+#          TAG += value [value, ...]
+#
+#      Values that contain spaces should be placed between quotes (" ")
+#
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME           = @PACKAGE_NAME@
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER         = @PACKAGE_VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF          =
+
+# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
+# the documentation. The maximum height of the logo should not exceed 55 pixels
+# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
+# to the output directory.
+
+PROJECT_LOGO           =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = @abs_builddir@
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS         = YES
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF       = "The $name class" \
+                         "The $name widget" \
+                         "The $name file" \
+                         is \
+                         provides \
+                         specifies \
+                         contains \
+                         represents \
+                         a \
+                         an \
+                         the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB  = NO
+
+# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES        = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH        = @abs_top_srcdir@ \
+                         @abs_top_builddir@
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH    = @abs_top_srcdir@
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF      = YES
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF           = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
+# new page for each member. If set to NO, the documentation of a member will be
+# part of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE               = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES                =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST              =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make
+# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
+# (default is Fortran), use: inc=Fortran f=C.
+#
+# Note For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING      =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT       = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by by putting a % sign in front of the word
+# or globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT       = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT    = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING            = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS  = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT   = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL            = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC         = YES
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. When set to YES local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO these classes will be included in the various overviews. This option has
+# no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS          = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES       = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES       = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC  = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES   = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING  = NO
+
+# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
+# todo list. This list is created by putting \todo commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
+# test list. This list is created by putting \test commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS       =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES the list
+# will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES        = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE            =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. Do not use file names with spaces, bibtex cannot handle them. See
+# also \cite for info how to create references.
+
+CITE_BIB_FILES         =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET                  = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS               = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED   = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO doxygen will only warn about wrong or incomplete parameter
+# documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC       = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE           =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces.
+# Note: If this tag is empty the current directory is searched.
+
+INPUT                  = @abs_top_builddir@/src \
+                         @abs_top_srcdir@/include \
+                         @abs_top_srcdir@/doc
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank the
+# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
+# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
+# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
+# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
+# *.qsf, *.as and *.js.
+
+FILE_PATTERNS          = *.c \
+                         *.cc \
+                         *.cxx \
+                         *.cpp \
+                         *.c++ \
+                         *.d \
+                         *.java \
+                         *.ii \
+                         *.ixx \
+                         *.ipp \
+                         *.i++ \
+                         *.inl \
+                         *.h \
+                         *.hh \
+                         *.hxx \
+                         *.hpp \
+                         *.h++ \
+                         *.idl \
+                         *.odl \
+                         *.cs \
+                         *.php \
+                         *.php3 \
+                         *.inc \
+                         *.m \
+                         *.mm \
+                         *.dox \
+                         *.py \
+                         *.f90 \
+                         *.f \
+                         *.for \
+                         *.vhd \
+                         *.vhdl
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE              = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE                =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS        =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH           =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS       = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH             =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+
+FILTER_PATTERNS        =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER ) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES    = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER         = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION    = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS        = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS       = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX     = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX          =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT            = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET        =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user-
+# defined cascading style sheet that is included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefor more robust against future updates.
+# Doxygen will copy the style sheet file to the output directory. For an example
+# see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET  =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES       =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the stylesheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE    = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT    = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA  = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP         = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS  = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET        = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME  = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP      = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE               =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler ( hhc.exe). If non-empty
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION           =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated (
+# YES) or that it should be included in the master .chm file ( NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI           = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING     =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated (
+# YES) or a normal table of contents ( NO) in the .chm file.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE               =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE          = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME   =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS  =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS  =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION           =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP   = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID         = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX          = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW      = YES
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE   = 1
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH         = 250
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW    = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE       = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT    = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using prerendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX            = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT         = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS     =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE       =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavours of web server based searching depending on the
+# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
+# searching and an index file used by the script. When EXTERNAL_SEARCH is
+# enabled the indexing and searching needs to be provided by external tools. See
+# the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH    = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH        = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL       =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE        = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID     =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS  =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX         = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE             = letter
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. To get the times font for
+# instance you can specify
+# EXTRA_PACKAGES=times
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES         =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will
+# replace them by respectively the title of the page, the current date and time,
+# only the current date, the version number of doxygen, the project name (see
+# PROJECT_NAME), or the project number (see PROJECT_NUMBER).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER           =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER           =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES      =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS         = YES
+
+# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX           = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE        = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES     = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE      = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE        = plain
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE    =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE    =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION          = .3
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT             = xml
+
+# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a
+# validating XML parser to check the syntax of the XML files.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_SCHEMA             =
+
+# The XML_DTD tag can be used to specify a XML DTD, which can be used by a
+# validating XML parser to check the syntax of the XML files.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_DTD                =
+
+# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK       = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT         = docbook
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
+# Definitions (see http://autogen.sf.net) file that captures the structure of
+# the code including all documentation. Note that this feature is still
+# experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
+# in the source code. If set to NO only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION        = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF     = NO
+
+# If the SEARCH_INCLUDES tag is set to YES the includes files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH           =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS  =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED             =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED      =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all refrences to function-like macros that are alone on a line, have an
+# all uppercase name, and do not end with a semicolon. Such function macros are
+# typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have an unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES               =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE       =
+
+# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
+# class index. If set to NO only the inherited external classes will be listed.
+# The default value is: NO.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
+# the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS        = YES
+
+# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES         = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH              = @PERL@
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH               =
+
+# If set to YES, the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT               = @DOXYGEN_USE_DOT@
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS        = 0
+
+# When you want a differently looking font n the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME           = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH           =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK               = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS   = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH          = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH             = YES
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH           = YES
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot.
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif and svg.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT       = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG        = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS           =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS           =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS           =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS      = YES
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP            = YES
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-doc.am b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-doc.am
new file mode 100644
index 0000000..ffd7ef1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-doc.am
@@ -0,0 +1,102 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the @PACKAGE_DESCRIPTION@
+#      in-package, documentation.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+EXTRA_DIST                                      = \
+    $(srcdir)/Doxyfile.in                         \
+    $(NULL)
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a doc distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+PACKAGE_VERSION                                 = $(shell cat $(top_builddir)/.local-version)
+VERSION                                         = $(PACKAGE_VERSION)
+
+
+docdistdir                                     ?= .
+
+@PACKAGE_SHORT_LOWER@_docdist_alias             = \
+    $(PACKAGE_TARNAME)-docs
+
+@PACKAGE_SHORT_LOWER@_docdist_name              = \
+    $(@PACKAGE_SHORT_LOWER@_docdist_alias)-$(VERSION)
+
+@PACKAGE_SHORT_LOWER@_docdist_archive           = \
+    $(docdistdir)/$(@PACKAGE_SHORT_LOWER@_docdist_name).tar.gz
+
+CLEANFILES                                      = \
+    Doxyfile                                      \
+    $(@PACKAGE_SHORT_LOWER@_docdist_archive)      \
+    $(NULL)
+
+if @PACKAGE_SHORT_UPPER@_BUILD_DOCS
+
+all-local: html/index.html
+
+#
+# We choose to manually transform Doxyfile.in into Doxyfile here in
+# the makefile rather than in the configure script so that we can take
+# advantage of live, at build time (rather than at configure time),
+# updating of the package version number.
+#
+
+Doxyfile: $(srcdir)/Doxyfile.in Makefile
+	$(AM_V_GEN)$(SED)                                     \
+	    -e "s,\@DOXYGEN_USE_DOT\@,$(DOXYGEN_USE_DOT),g"   \
+	    -e "s,\@PACKAGE_NAME\@,$(PACKAGE_NAME),g"         \
+	    -e "s,\@PACKAGE_VERSION\@,$(PACKAGE_VERSION),g"   \
+	    -e "s,\@PERL\@,$(PERL),g"                         \
+	    -e "s,\@abs_builddir\@,$(abs_builddir),g"         \
+	    -e "s,\@abs_srcdir\@,$(abs_srcdir),g"             \
+	    -e "s,\@abs_top_builddir\@,$(abs_top_builddir),g" \
+	    -e "s,\@abs_top_srcdir\@,$(abs_top_srcdir),g"     \
+	    < "$(srcdir)/Doxyfile.in" > "$(@)"
+
+html/index.html: Doxyfile
+	$(AM_V_GEN)$(DOXYGEN) $(<)
+
+#
+# Addition rules and commands to create a documentation-only
+# distribution of @PACKAGE_SHORT_LOWER@
+#
+
+$(@PACKAGE_SHORT_LOWER@_docdist_name): html/index.html
+	$(AM_V_at)rm -f -r $(@)
+	$(call create-directory)
+	$(AM_V_at)cp -R html $(@)
+
+$(@PACKAGE_SHORT_LOWER@_docdist_archive): $(@PACKAGE_SHORT_LOWER@_docdist_name)
+	$(AM_V_at)echo "  TAR      $(@)"
+	$(AM_V_at)tardir="$(<)" && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c > "$(@)" && rm -rf $(<)
+
+docdist $(@PACKAGE_SHORT_LOWER@_docdist_alias): $(@PACKAGE_SHORT_LOWER@_docdist_archive)
+
+clean-local:
+	$(AM_V_at)rm -f -r html
+
+endif # @PACKAGE_SHORT_UPPER@_BUILD_DOCS
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-fps.am b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-fps.am
new file mode 100644
index 0000000..4d51baf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-fps.am
@@ -0,0 +1,28 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the @PACKAGE_DESCRIPTION@
+#      first-party (e.g. Nest Labs) software directory.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+SUBDIRS                  = \
+    $(NULL)
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-src.am b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-src.am
new file mode 100644
index 0000000..655cbcb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-src.am
@@ -0,0 +1,42 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the @PACKAGE_DESCRIPTION@.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+lib_LIBRARIES                       = lib@PACKAGE_SHORT_LOWER@.a
+
+lib@PACKAGE_SHORT_LOWER@_a_CPPFLAGS = \
+    -I$(top_srcdir)/include           \
+    $(NULL)
+
+lib@PACKAGE_SHORT_LOWER@_a_SOURCES  = @PACKAGE_SHORT_LOWER@.c
+
+include_HEADERS                     = \
+    @PACKAGE_SHORT_LOWER@.h           \
+    $(NULL)
+
+install-headers: install-includeHEADERS
+
+if @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
+CLEANFILES                          = $(wildcard *.gcda *.gcno)
+endif # @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-tests.am b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-tests.am
new file mode 100644
index 0000000..74b94d2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-tests.am
@@ -0,0 +1,107 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the @PACKAGE_DESCRIPTION@
+#      unit tests.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+#
+# Local headers to build against and distribute but not to install
+# since they are not part of the package.
+#
+noinst_HEADERS                                 = \
+    @PACKAGE_SHORT_LOWER@-test.h                 \
+    $(NULL)
+
+#
+# Other files we do want to distribute with the package.
+#
+EXTRA_DIST                                     = \
+    $(NULL)
+
+if @PACKAGE_SHORT_UPPER@_BUILD_TESTS
+# C preprocessor option flags that will apply to all compiled objects in this
+# makefile.
+
+AM_CPPFLAGS                                    = \
+    -I$(top_srcdir)/include                      \
+    $(NULL)
+
+COMMON_LDADD                                   = \
+    -L${top_builddir}/src -l@PACKAGE_SHORT_LOWER@
+    $(NULL)
+
+# Test applications that should be run when the 'check' target is run.
+
+check_PROGRAMS                                 = \
+    @PACKAGE_SHORT_LOWER@-test                   \
+    @PACKAGE_SHORT_LOWER@-test-cxx               \
+    $(NULL)
+
+# Test applications and scripts that should be built and run when the
+# 'check' target is run.
+
+TESTS                                          = \
+    $(check_PROGRAMS)                            \
+    $(NULL)
+
+# The additional environment variables and their values that will be
+# made available to all programs and scripts in TESTS.
+
+TESTS_ENVIRONMENT                              = \
+    $(NULL)
+
+# Source, compiler, and linker options for test programs.
+
+@PACKAGE_SHORT_LOWER@_test_LDADD               = $(COMMON_LDADD)
+@PACKAGE_SHORT_LOWER@_test_SOURCES             = @PACKAGE_SHORT_LOWER@-test.c                 
+
+@PACKAGE_SHORT_LOWER@_test_cxx_LDADD           = $(COMMON_LDADD)
+@PACKAGE_SHORT_LOWER@_test_cxx_SOURCES         = @PACKAGE_SHORT_LOWER@-test-cxx.cpp           
+
+if @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
+CLEANFILES                                     = $(wildcard *.gcda *.gcno)
+
+if @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE_REPORTS
+# The bundle should positively be qualified with the absolute build
+# path. Otherwise, VPATH will get auto-prefixed to it if there is
+# already such a directory in the non-colocated source tree.
+
+@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE                           = ${abs_builddir}/${PACKAGE}${NL_COVERAGE_BUNDLE_SUFFIX}
+@PACKAGE_SHORT_UPPER@_COVERAGE_INFO                             = ${@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE}/${PACKAGE}${NL_COVERAGE_INFO_SUFFIX}
+
+$(@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE):
+	$(call create-directory)
+
+$(@PACKAGE_SHORT_UPPER@_COVERAGE_INFO): check | $(dir $(@PACKAGE_SHORT_UPPER@_COVERAGE_INFO))
+	$(call generate-coverage-report,${top_builddir})
+
+coverage: $(@PACKAGE_SHORT_UPPER@_COVERAGE_INFO)
+
+clean-local: clean-local-coverage
+
+.PHONY: clean-local-coverage
+clean-local-coverage:
+	-$(AM_V_at)rm -rf $(@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE)
+endif # @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE_REPORTS
+endif # @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
+endif # @PACKAGE_SHORT_UPPER@_BUILD_TESTS
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-toplevel.am b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-toplevel.am
new file mode 100644
index 0000000..5f058ae
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/Makefile-toplevel.am
@@ -0,0 +1,153 @@
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU automake template for the @PACKAGE_DESCRIPTION@.
+#
+
+include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
+
+AM_MAKEFLAGS                              = --no-print-directory
+
+SUBDIRS                                   = \
+    include                                 \
+    src                                     \
+    tests                                   \
+    doc                                     \
+    $(NULL)
+
+EXTRA_DIST                                = \
+    .default-version                        \
+    bootstrap                               \
+    bootstrap-configure                     \
+    $(srcdir)/third_party/nlbuild-autotools \
+    $(NULL)
+
+BUILT_SOURCES                             = \
+    .local-version                          \
+    $(NULL)
+
+dist_doc_DATA                             = \
+    $(NULL)
+
+DISTCLEANFILES                            = \
+    .local-version                          \
+    $(NULL)
+
+#
+# Package version files:
+#
+# .default-version - The default package version. This file is ALWAYS checked
+#                    in and should always represent the current baseline
+#                    version of the package.
+#
+# .dist-version    - The distributed package version. This file is NEVER
+#                    checked in within the upstream repository, is auto-
+#                    generated, and is only found in the package distribution.
+#
+# .local-version   - The current source code controlled package version. This
+#                    file is NEVER checked in within the upstream repository,
+#                    is auto-generated, and can always be found in both the
+#                    build tree and distribution.
+#
+# When present, the .local-version file is preferred first, the
+# .dist-version second, and the .default-version last.
+#
+
+VERSION_FILE                      := $(if $(wildcard $(builddir)/.local-version),$(builddir)/.local-version,$(if $(wildcard $(srcdir)/.dist-version),$(srcdir)/.dist-version,$(srcdir)/.default-version))
+
+#
+# Override autotool's default notion of the package version variables.
+# This ensures that when we create a source distribution that the
+# version is always the current version, not the version when the
+# package was bootstrapped.
+#
+
+@PACKAGE_SHORT_UPPER@_VERSION     ?= $(shell cat $(VERSION_FILE) 2> /dev/null)
+
+PACKAGE_VERSION                    = $(@PACKAGE_SHORT_UPPER@_VERSION)
+VERSION                            = $(PACKAGE_VERSION)
+
+#
+# check-file-.local-version
+#
+# Speculatively regenerate .local-version and check to see if it needs
+# to be updated.
+#
+# If @PACKAGE_SHORT_UPPER@_VERSION has been supplied anywhere other than in this file
+# (which is implicitly the contents of .local-version), then use that;
+# otherwise, attempt to generate it from the SCM system.
+#
+# This is called from $(call check-file,.local-version).
+#
+define check-file-.local-version
+if [ "$(origin @PACKAGE_SHORT_UPPER@_VERSION)" != "file" ]; then \
+    echo "$(@PACKAGE_SHORT_UPPER@_VERSION)" > "$(2)";            \
+else                                                             \
+    $(abs_top_nlbuild_autotools_dir)/scripts/mkversion           \
+        -b "$(@PACKAGE_SHORT_UPPER@_VERSION)" "$(top_srcdir)"    \
+        > "$(2)";                                                \
+fi
+endef
+
+#
+# check-file-.dist-version
+#
+# Speculatively regenerate .dist-version and check to see if it needs
+# to be updated.
+#
+# This is called from $(call check-file,.dist-version).
+#
+define check-file-.dist-version
+cat "$(1)" > "$(2)"
+endef
+
+#
+# A convenience target to allow package users to easily rerun the
+# package configuration according to the current configuration.
+#
+.PHONY: reconfigure
+reconfigure: $(builddir)/config.status
+	$(AM_V_at)$(<) --recheck
+
+#
+# Version file regeneration rules.
+#
+.PHONY: force
+
+$(builddir)/.local-version: $(srcdir)/.default-version force
+
+$(distdir)/.dist-version: $(builddir)/.local-version force
+
+$(distdir)/.dist-version $(builddir)/.local-version:
+	$(call check-file,$(@F))
+
+dist distcheck: $(BUILT_SOURCES)
+
+dist-hook: $(distdir)/.dist-version
+
+#
+# Top-level convenience target for making a documentation-only
+# distribution whose results appear at the top level of the build tree
+# in the same fashion that the distribution would be for 'make dist'.
+#
+
+.PHONY: docdist
+docdist: $(BUILT_SOURCES)
+	$(MAKE) -C doc docdistdir=$(abs_builddir) $(@)
+
+include $(abs_top_nlbuild_autotools_dir)/automake/post.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/bootstrap b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/bootstrap
new file mode 100755
index 0000000..60504b4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/bootstrap
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a trampoline script to the nlbuild-autotools
+#      bootstrap script and augments it by providing the path to the
+#      nlbuild-autotools repository for this project.
+#
+
+# Set this to the relative location of nlbuild-autotools to this script
+
+nlbuild_autotools_stem="third_party/nlbuild-autotools/repo"
+
+# Establish some key directories
+
+srcdir=`dirname ${0}`
+abs_srcdir=`pwd`
+abs_top_srcdir="${abs_srcdir}"
+
+exec ${srcdir}/${nlbuild_autotools_stem}/scripts/bootstrap -I "${abs_top_srcdir}/${nlbuild_autotools_stem}" $*
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/examples/configure.ac b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/configure.ac
new file mode 100644
index 0000000..e94d9df
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/examples/configure.ac
@@ -0,0 +1,382 @@
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+
+#
+#    Copyright 2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is the GNU autoconf input source file for
+#      @PACKAGE_DESCRIPTION@.
+#
+
+#
+# Declare autoconf version requirements
+#
+AC_PREREQ([2.68])
+
+#
+# Initialize autoconf for the package
+#
+AC_INIT([@PACKAGE_SHORT_UPPER@],
+        m4_esyscmd([@NLBUILD_AUTOTOOLS_STEM@/scripts/mkversion -b `cat .default-version` .]),
+        [@PACKAGE_EMAIL@],
+        [@PACKAGE_SHORT_LOWER@],
+        [@PACKAGE_URL@])
+
+# Tell the rest of the build system the absolute path where the
+# nlbuild-autotools repository is rooted at.
+
+AC_SUBST(nlbuild_autotools_stem,[@NLBUILD_AUTOTOOLS_STEM@])
+AC_SUBST(abs_top_nlbuild_autotools_dir,[\${abs_top_srcdir}/\${nlbuild_autotools_stem}])
+
+#
+# @PACKAGE_SHORT_UPPER@ interface current, revision, and age versions.
+#
+# Maintainters: Please manage these fields as follows:
+#
+#   Interfaces removed:    CURRENT++, AGE = 0, REVISION = 0
+#   Interfaces added:      CURRENT++, AGE++,   REVISION = 0
+#   No interfaces changed:                     REVISION++
+#
+#
+AC_SUBST(LIB@PACKAGE_SHORT_UPPER@_VERSION_CURRENT,  [1])
+AC_SUBST(LIB@PACKAGE_SHORT_UPPER@_VERSION_AGE,      [0])
+AC_SUBST(LIB@PACKAGE_SHORT_UPPER@_VERSION_REVISION, [0])
+AC_SUBST(LIB@PACKAGE_SHORT_UPPER@_VERSION_INFO,     [${LIB@PACKAGE_SHORT_UPPER@_VERSION_CURRENT}:${LIB@PACKAGE_SHORT_UPPER@_VERSION_REVISION}:${LIB@PACKAGE_SHORT_UPPER@_VERSION_AGE}])
+
+#
+# Check the sanity of the source directory by checking for the
+# presence of a key watch file
+#
+AC_CONFIG_SRCDIR([include/@PACKAGE_SHORT_LOWER@.h])
+
+#
+# Tell autoconf where to find auxilliary build tools (e.g. config.guess,
+# install-sh, missing, etc.)
+#
+AC_CONFIG_AUX_DIR([@NLBUILD_AUTOTOOLS_STEM@/autoconf])
+
+#
+# Tell autoconf where to find auxilliary M4 macros
+#
+AC_CONFIG_MACRO_DIR([@NLBUILD_AUTOTOOLS_STEM@/autoconf/m4])
+
+#
+# Tell autoconf what file the package is using to aggregate C preprocessor
+# defines.
+#
+AC_CONFIG_HEADERS([include/@PACKAGE_SHORT_LOWER@-config.h])
+
+#
+# Figure out what the canonical build, host and target tuples are.
+#
+AC_CANONICAL_BUILD
+AC_CANONICAL_HOST
+AC_CANONICAL_TARGET
+
+#
+# Mac OS X / Darwin ends up putting some versioning cruft on the end of its
+# tuple that we don't care about in this script. Create "clean" variables
+# devoid of it.
+#
+
+NL_FILTERED_CANONICAL_BUILD
+NL_FILTERED_CANONICAL_HOST
+NL_FILTERED_CANONICAL_TARGET
+
+#
+# Configure automake with the desired options, indicating that this is not
+# a native GNU package, that we want "silent" build rules, and that we want
+# objects built in the same subdirectory as their source rather than collapsed
+# together at the top-level directory.
+#
+# Disable silent build rules by either passing --disable-silent-rules to
+# configure or passing V=1 to make
+#
+AM_INIT_AUTOMAKE([1.14 foreign silent-rules subdir-objects tar-pax])
+
+#
+# Silent build rules requires at least automake-1.11. Employ
+# techniques for not breaking earlier versions of automake.
+#
+m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])])
+AM_SILENT_RULES([yes])
+
+#
+# Enable maintainer mode to prevent the package from constantly trying
+# to rebuild configure, Makefile.in, etc. Rebuilding such files rarely,
+# if ever, needs to be done "in the field".
+#
+# Use the included 'bootstrap' script instead when necessary.
+#
+AM_MAINTAINER_MODE
+
+#
+# Checks for build host programs
+#
+
+# If we are cross-compiling and we are on an embedded target that
+# doesn't support independent, standalone executables, then all
+# compiler tests that attempt to create an executable will fail. In
+# such circumstances, set AC_NO_EXECUTABLES (see http://sourceware.org/
+# ml/newlib/2006/msg00353.html).
+
+AC_MSG_CHECKING([whether to disable executable checking])
+if test "$cross_compiling" = yes; then
+    AC_NO_EXECUTABLES
+    AC_MSG_RESULT([yes])
+else
+    AC_MSG_RESULT([no])
+fi
+
+# Passing -Werror to GCC-based or -compatible compilers breaks some
+# autoconf tests (see
+# http://lists.gnu.org/archive/html/autoconf-patches/2008-09/msg00014.html).
+#
+# If -Werror has been passed transform it into -Wno-error. We'll
+# transform it back later with NL_RESTORE_WERROR.
+
+NL_SAVE_WERROR
+
+# Check for compilers.
+#
+# These should be checked BEFORE we check for and, implicitly,
+# initialize libtool such that libtool knows what languages it has to
+# work with.
+
+AC_PROG_CPP
+AC_PROG_CPP_WERROR
+
+AC_PROG_CC
+AC_PROG_CC_C_O
+
+AC_PROG_CXXCPP
+
+AC_PROG_CXX
+AC_PROG_CXX_C_O
+
+# Check for other compiler toolchain tools.
+
+AC_CHECK_TOOL(AR, ar)
+AC_CHECK_TOOL(RANLIB, ranlib)
+AC_CHECK_TOOL(OBJCOPY, objcopy)
+AC_CHECK_TOOL(STRIP, strip)
+
+# Check for other host tools.
+
+AC_PROG_INSTALL
+AC_PROG_LN_S
+
+AC_PATH_PROG(CMP, cmp)
+AC_PATH_PROG(PERL, perl)
+
+#
+# Checks for specific compiler characteristics
+#
+
+#
+# Common compiler flags we would like to have.
+#
+#   -Wall                        CC, CXX
+#
+
+PROSPECTIVE_CFLAGS="-Wall"
+PROSPECTIVE_CXXFLAGS=""
+
+AX_CHECK_COMPILER_OPTIONS([C],   ${PROSPECTIVE_CFLAGS})
+AX_CHECK_COMPILER_OPTIONS([C++], ${PROSPECTIVE_CFLAGS} ${PROSPECTIVE_CXXFLAGS})
+
+# Check for and initialize libtool
+
+LT_INIT
+
+#
+# Debug instances
+#
+AC_MSG_NOTICE([checking whether to build debug instances])
+
+# Debug
+
+NL_ENABLE_DEBUG([no])
+
+AM_CONDITIONAL([@PACKAGE_SHORT_UPPER@_BUILD_DEBUG], [test "${nl_cv_build_debug}" = "yes"])
+
+#
+# Code coverage and compiler optimization
+#
+
+# Coverage
+
+NL_ENABLE_COVERAGE([no])
+
+AM_CONDITIONAL([@PACKAGE_SHORT_UPPER@_BUILD_COVERAGE], [test "${nl_cv_build_coverage}" = "yes"])
+
+NL_ENABLE_COVERAGE_REPORTS([auto])
+
+AM_CONDITIONAL([@PACKAGE_SHORT_UPPER@_BUILD_COVERAGE_REPORTS], [test "${nl_cv_build_coverage_reports}" = "yes"])
+
+# Optimization
+
+NL_ENABLE_OPTIMIZATION([yes])
+
+AM_CONDITIONAL([@PACKAGE_SHORT_UPPER@_BUILD_OPTIMIZED], [test "${nl_cv_build_optimized}" = "yes"])
+
+#
+# Tests
+#
+AC_MSG_NOTICE([checking whether to build tests])
+
+# Tests
+
+NL_ENABLE_TESTS([yes])
+
+AM_CONDITIONAL([@PACKAGE_SHORT_UPPER@_BUILD_TESTS], [test "${nl_cv_build_tests}" = "yes"])
+
+#
+# Documentation
+#
+
+# Determine whether or not documentation (via Doxygen) should be built
+# or not, with 'auto' as the default and establish a default support
+# value for GraphViz 'dot' support.
+
+NL_ENABLE_DOCS([auto],[NO])
+
+AM_CONDITIONAL(@PACKAGE_SHORT_UPPER@_BUILD_DOCS, [test "${nl_cv_build_docs}" = "yes"])
+
+#
+# Checks for libraries and packages.
+#
+# At minimum, the following packages are optional, depending on
+# configuration:
+#
+#   * TBD
+#
+AC_MSG_NOTICE([checking required package dependencies])
+
+# NL_WITH_PACKAGE(...)
+
+# Check if the build host has pkg-config
+
+AC_PATH_PROG([PKG_CONFIG],[pkg-config])
+
+#
+# Check for headers
+#
+AC_HEADER_STDBOOL
+AC_HEADER_STDC
+
+AC_CHECK_HEADERS([stdint.h])
+AC_CHECK_HEADERS([string.h])
+
+#
+# Check for types and structures
+#
+AC_TYPE_INT8_T
+AC_TYPE_INT16_T
+AC_TYPE_INT32_T
+AC_TYPE_INT64_T
+AC_TYPE_UINT8_T
+AC_TYPE_UINT16_T
+AC_TYPE_UINT32_T
+AC_TYPE_UINT64_T
+
+#
+# Checks for library functions
+#
+
+if test "${ac_no_link}" != "yes"; then
+    AC_CHECK_FUNCS([memcpy])
+fi
+
+# Add any code coverage CPPFLAGS and LDFLAGS
+
+CPPFLAGS="${CPPFLAGS} ${NL_COVERAGE_CPPFLAGS}"
+LDFLAGS="${LDFLAGS} ${NL_COVERAGE_LDFLAGS}"
+
+# At this point, we can restore the compiler flags to whatever the
+# user passed in, now that we're clear of an -Werror issues by
+# transforming -Wno-error back to -Werror.
+
+NL_RESTORE_WERROR
+
+#
+# Identify the various makefiles and auto-generated files for the package
+#
+AC_CONFIG_FILES([
+Makefile
+third_party/Makefile
+src/Makefile
+tests/Makefile
+doc/Makefile
+])
+
+#
+# Generate the auto-generated files for the package
+#
+AC_OUTPUT
+
+#
+# Summarize the package configuration
+#
+
+AC_MSG_NOTICE([
+
+  Configuration Summary
+  ---------------------
+  Package                                   : ${PACKAGE_NAME}
+  Version                                   : ${PACKAGE_VERSION}
+  Interface                                 : ${LIB@PACKAGE_SHORT_UPPER@_VERSION_INFO//:/.}
+  Build system                              : ${build}
+  Host system                               : ${host}
+  Target system                             : ${target}
+  Target architecture                       : ${target_cpu}
+  Target OS                                 : ${target_os}
+  Cross compiling                           : ${cross_compiling}
+  Build shared libraries                    : ${enable_shared}
+  Build static libraries                    : ${enable_static}
+  Build debug libraries                     : ${nl_cv_build_debug}
+  Build optimized libraries                 : ${nl_cv_build_optimized}
+  Build coverage libraries                  : ${nl_cv_build_coverage}
+  Build coverage reports                    : ${nl_cv_build_coverage_reports}
+  Lcov                                      : ${LCOV:--}
+  Genhtml                                   : ${GENHTML:--}
+  Build tests                               : ${nl_cv_build_tests}
+  Prefix                                    : ${prefix}
+  Shadow directory program                  : ${LNDIR}
+  Documentation support                     : ${nl_cv_build_docs}
+  Doxygen                                   : ${DOXYGEN:--}
+  GraphViz dot                              : ${DOT:--}
+  PERL                                      : ${PERL:--}
+  C Preprocessor                            : ${CPP}
+  C Compiler                                : ${CC}
+  C++ Preprocessor                          : ${CXXCPP}
+  C++ Compiler                              : ${CXX}
+  Archiver                                  : ${AR}
+  Archive Indexer                           : ${RANLIB}
+  Symbol Stripper                           : ${STRIP}
+  Object Copier                             : ${OBJCOPY}
+  C Preprocessor flags                      : ${CPPFLAGS:--}
+  C Compile flags                           : ${CFLAGS:--}
+  C++ Compile flags                         : ${CXXFLAGS:--}
+  Link flags                                : ${LDFLAGS:--}
+  Link libraries                            : ${LIBS}
+
+])
+
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap
new file mode 100755
index 0000000..40c168e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap
@@ -0,0 +1,243 @@
+#!/bin/sh
+
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a convenience script that will bootstrap the GNU
+#      autotools system for a project after any build system changes.
+#
+
+#
+# usage
+#
+# Display program usage.
+#
+usage() {
+    name=`basename $0`
+
+    echo "Usage: ${name} [ options ] [ -w <what> ]"
+
+    if [ $1 -ne 0 ]; then
+        echo "Try '${name} -h' for more information."
+    fi
+
+    if [ $1 -ne 1 ]; then
+        echo ""
+        echo "  -h, --help       Print this help, then exit."
+        echo "  -I DIR           Specify directory DIR as the root of the "
+        echo "                   nlbuild-autotools repository."
+        echo "  -v, --verbose    Verbosely report bootstrap progress."
+        echo "  -w, --what WHAT  Specify what part of the package should be "
+        echo "                   bootstrapped: all, config, make, or none "
+        echo "                   (default: all)."
+        echo ""
+    fi
+
+    exit $1
+}
+
+#
+# removetmp
+#
+# Remove temporary files and directories used during the run of this
+# script.
+#
+removetmp() {
+    rm -f "${LIBTOOLIZE}"
+    rm -f "${AUTOM4TE_CFG}"
+    rm -r -f "${BOOTSTRAP_TMPDIR}"
+}
+
+what="all"
+verbose=
+nlbuild_autotools_dir=
+
+# Parse out any command line options
+
+while [ ${#} -gt 0 ]; do
+    case ${1} in
+    -h|--help)
+        usage 0
+        ;;
+
+    -I)
+        nlbuild_autotools_dir="${2}"
+        shift 2
+        ;;
+
+    -v|--verbose)
+        verbose="--verbose"
+        shift 1
+        ;;
+
+    -w|--what)
+        case "${2}" in
+        all|make*|conf*|none)
+            what="${2}"
+            shift 2
+            ;;
+
+        *)
+	    echo "Unknown what value '${2}'."
+            usage 1
+            ;;
+
+        esac
+        ;;
+
+    *)
+        usage 1
+        ;;
+
+    esac
+done
+
+# Check to ensure that the location of the nlbuild-autotools directory
+# is sane.
+
+if [ -z "${nlbuild_autotools_dir}" ]; then
+    echo "$0: No -I option specified. Please provide the location of the nlbuild-autotools directory."
+    exit 1
+
+elif [ ! -d "${nlbuild_autotools_dir}" ]; then
+    echo "$0: No such directory: ${nlbuild_autotools_dir}. Please provide a valid path to the nlbuild-autotools directory."
+    exit 1
+
+fi
+
+# Establish some key directories
+
+srcdir=`dirname ${0}`
+abs_srcdir=`pwd`
+abs_top_srcdir="${abs_srcdir}"
+
+abs_top_hostdir="${nlbuild_autotools_dir}/tools/host"
+
+# Figure out what sort of build host we are running on, stripping off
+# any trailing version number information typically included on Darwin
+# / Mac OS X.
+
+host=`${nlbuild_autotools_dir}/autoconf/config.guess | sed -e 's/[[:digit:].]*$//g'`
+
+# Attempt to be self-sufficient, relying on GNU autotools executables
+# installed along with the SDK itself.
+
+export PATH="${abs_top_hostdir}/bin:${abs_top_hostdir}/${host}/bin:${PATH}"
+
+export ACLOCAL=`which aclocal`
+export AUTOCONF="`which autoconf`"
+export AUTOHEADER="`which autoheader`"
+export AUTOM4TE="`which autom4te`"
+export AUTOMAKE="`which automake`"
+export M4=`which m4`
+
+# Establish some SDK-specific directories needed to override various
+# paths in GNU autotools that otherwise expect to be absolute
+# (e.g. /usr/share, etc.).
+
+export AC_MACRODIR="${abs_top_hostdir}/share/autoconf"
+
+export autom4te_perllibdir="${abs_top_hostdir}/share/autoconf"
+export PERL5LIB="${abs_top_hostdir}/share/automake-1.14:${PERL5LIB}"
+
+# Both autom4te.cfg and libtoolize, as installed from source, want to
+# use absolute file system paths that cannot be
+# overridden. Consequently, we create temporary, local versions of
+# these, patched up with SDK-specific paths.
+
+BOOTSTRAP_TMPDIR="`mktemp -d /tmp/tmp.bootstrapXXXXXX`"
+
+trap "removetmp" 1 2 3 9 15
+
+export AUTOM4TE_CFG="${BOOTSTRAP_TMPDIR}/autom4te.cfg"
+export LIBTOOLIZE="${BOOTSTRAP_TMPDIR}/libtoolize"
+
+#
+# Generate any temporary files that need to be patched at run time
+# with the location of the SDK tree, including:
+#
+#   -  The autom4te configuration file
+#   -  The libtoolize executable script
+#
+
+sed -e "s,//share/autoconf,${abs_top_hostdir}/share/autoconf,g" < "${abs_top_hostdir}/share/autoconf/autom4te.cfg" > "${AUTOM4TE_CFG}"
+
+sed -e "s,//share/libtool,${abs_top_hostdir}/share/libtool,g" -e "s,//share/aclocal,${abs_top_hostdir}/share/aclocal,g" < "${abs_top_hostdir}/${host}/bin/libtoolize" > "${LIBTOOLIZE}"
+chmod 775 "${LIBTOOLIZE}"
+
+if [ -n "${verbose}" ]; then
+    echo ACLOCAL="${ACLOCAL}"
+    echo AUTOCONF="${AUTOCONF}"
+    echo AUTOHEADER="${AUTOHEADER}"
+    echo AUTOM4TE="${AUTOM4TE}"
+    echo AUTOMAKE="${AUTOMAKE}"
+    echo LIBTOOLIZE="${LIBTOOLIZE}"
+    echo M4="${M4}"
+
+    echo AC_MACRODIR="${AC_MACRODIR}"
+    echo AUTOM4TE_CFG="${AUTOM4TE_CFG}"
+    echo PERL5LIB="${PERL5LIB}"
+    echo autom4te_perllibdir="${autom4te_perllibdir}"
+fi
+
+# Set up the default actions for each bootstrap stage.
+
+local_action="${ACLOCAL} ${verbose} --automake-acdir=${abs_top_hostdir}/share/aclocal-1.14 --system-acdir=${abs_top_hostdir}/share/aclocal -I${nlbuild_autotools_dir}/autoconf/m4"
+header_action="${AUTOHEADER} ${verbose}"
+tool_action="${LIBTOOLIZE} ${verbose} --automake --copy --force"
+make_action="${AUTOMAKE} ${verbose} --libdir ${abs_top_hostdir}/share/automake-1.14 --add-missing --copy"
+config_action="${AUTOCONF} ${verbose}"
+
+# Determine what needs to be short-circuited based on the
+# user-specified "what".
+
+case "${what}" in
+
+    all)
+        ;;
+
+    conf*)
+        local_action=true
+        header_action=true
+        tool_action=true
+        make_action=true
+        ;;
+
+    make*)
+        local_action=true
+        header_action=true
+        config_action=true
+        ;;
+
+    none)
+        local_action=true
+        header_action=true
+        tool_action=true
+        make_action=true
+        config_action=true
+        ;;
+
+esac
+
+# Bootstrap the package.
+
+${local_action} && ${header_action} && ${tool_action} && ${make_action} && ${config_action}
+
+# Clean up any temporary files created.
+
+removetmp
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap-configure b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap-configure
new file mode 100755
index 0000000..b745e1e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/bootstrap-configure
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file implements a script which, when run from a project
+#      build directory (either disparate or colocated with the
+#      source), will attempt to clean the build directory, rebootstrap
+#      the package, and then rerun the configuration script for the
+#      package with the provided arguments.
+#
+#      This script is particularly useful when you are changing the
+#      configuration script and testing those changes.
+#
+
+srcdir=`dirname ${0}`
+builddir=.
+
+# Bring the package build back to a pristine state.
+
+if [ -f config.status ]; then
+	make maintainer-clean
+fi
+
+# Change directories to the package source and rebootstrap the package.
+
+pushd ${srcdir}
+
+./bootstrap
+bootstrap_status=$?
+
+popd
+
+# If the package was successfully bootstrapped, configure it.
+
+if [ ${bootstrap_status} -eq 0 ]; then
+    ${srcdir}/configure $*
+else
+   exit ${bootstrap_status}
+fi
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkskeleton b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkskeleton
new file mode 100755
index 0000000..7349044
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkskeleton
@@ -0,0 +1,237 @@
+#!/bin/bash
+
+#
+#    Copyright 2015-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a convenience script that will create a skeleton
+#      build layout using the nlbuild-autotools package.
+#
+
+YEAR=`date "+%Y"`
+NAME=""
+DESCRIPTION=""
+DIR=""
+VERSION="1.0"
+verbose=1
+
+#
+# usage
+#
+# Display program usage.
+#
+usage() {
+    name=`basename $0`
+
+    echo "Usage: ${name} [ options ]"
+
+    if [ $1 -ne 0 ]; then
+        echo "Try '${name} -h' for more information."
+    fi
+
+    if [ $1 -ne 1 ]; then
+        echo ""
+        echo "  -h, --help                  Print this help, then exit."
+        echo "  -I DIR                      Specify directory DIR as the root of the "
+        echo "                              nlbuild-autotools repository."
+        echo "  -v, --verbose               Verbosely report mkskeleton progress (default: yes)."
+        echo "  --package-description DESC  Specify description DESC as a the "
+        echo "                              package description."
+        echo "  --package-name NAME         Specify name NAME as the package name."
+        echo "  --package-version VERSION   Specify version VERSION as the initial "
+        echo "                              version for the package (default: ${VERSION})."
+        echo "  --package-year YEAR         Specify year YEAR as the creation "
+        echo "                              year for the package (default: ${YEAR})."
+        echo "  -q, --quiet                 Do not verbosely report mkskeleton progress (default: no)."
+
+        echo ""
+    fi
+
+    exit $1
+}
+
+#
+# log
+#
+log() {
+    if [ ! -z ${verbose} ] && [ ${verbose} -eq 1 ]; then
+        echo $*
+    fi
+}
+
+transform() {
+    local from="${1}"
+    local to="${2}"
+
+    sed                                                   \
+        -e "s,\@NLBUILD_AUTOTOOLS_STEM\@,${DIR},g"        \
+        -e "s,\@PACKAGE_DESCRIPTION\@,${DESCRIPTION},g"   \
+        -e "s,\@PACKAGE_SHORT_LOWER\@,${LOWER},g"         \
+        -e "s,\@PACKAGE_SHORT_UPPER\@,${UPPER},g"         \
+        -e "s,\@PACKAGE_YEAR\@,${YEAR},g"                 \
+        < "${from}" > "${to}"
+}
+
+populate() {
+    local nlbuild="${1}"
+    local perm=${2}
+    local file="${3}"
+    local directory="${4}"
+    local stem="${file%.*}"
+    local extension="${file##*.}"
+    local from
+    local to
+
+    if [ -r "${nlbuild}/examples/${file}" ]; then
+        from="${nlbuild}/examples/${file}"
+
+    elif [ -r "${nlbuild}/examples/${stem}-${directory}.${extension}" ]; then
+        from="${nlbuild}/examples/${stem}-${directory}.${extension}"
+
+    elif [ "${directory}" = "." ] && [ -r "${nlbuild}/examples/${stem}-toplevel.${extension}" ]; then
+        from="${nlbuild}/examples/${stem}-toplevel.${extension}"
+
+    else
+        echo "Cannot find an example file \"${file}\" for directory \"${directory}\"."
+        exit 1
+
+    fi
+
+    to="${directory}/${file}"
+
+    log "Creating \"${to}\"..."
+
+    transform "${from}" "${to}"
+
+    if [ $? = 0 ]; then
+        chmod ${perm} "${to}"
+    fi
+}
+
+#
+# link
+#
+link() {
+    local nlbuild="${1}"
+    local perm=${2}
+    local source="${3}"
+    local directory="${4}"
+    local from="${nlbuild}/scripts/${source}"
+    local to="${directory}/${source}"
+
+    log "Creating \"${to}\"..."
+
+    ln -sf "${from}" "${to}"
+}
+
+# Parse out any command line options
+
+while [ ${#} -gt 0 ]; do
+    if [ ${1} == "-h" ] || [ ${1} == "--help" ]; then
+        usage 0
+
+    elif [ ${1} == "-I" ]; then
+        DIR="${2}"
+        shift 2
+
+    elif [ ${1} == "--package-description" ]; then
+        DESCRIPTION="${2}"
+        shift 2
+
+    elif [ ${1} == "--package-name" ]; then
+        NAME="${2}"
+        shift 2
+
+        UPPER="`echo ${NAME} | tr '[[:lower:]]' '[[:upper:]]'`"
+        LOWER="`echo ${NAME} | tr '[[:upper:]]' '[[:lower:]]'`"
+
+    elif [ ${1} == "--package-version" ]; then
+        VERSION="${2}"
+        shift 2
+
+    elif [ ${1} == "--package-year" ]; then
+        YEAR="${2}"
+        shift 2
+
+    elif [ ${1} == "-q" ] || [ ${1} == "--quiet" ]; then
+        verbose=0
+        shift 1
+
+    elif [ ${1} == "-v" ] || [ ${1} == "--verbose" ]; then
+        verbose=1
+        shift 1
+
+    else
+        usage 1
+
+    fi
+done
+
+# Sanity check the command line arguments
+
+if [ -z "${DIR}" ]; then
+    echo "$0: No -I option specified. Please provide the location of the nlbuild-autotools directory."
+    exit 1
+
+fi
+
+if [ -z "${DESCRIPTION}" ]; then
+    echo "$0: Please provide a package description via --package-description."
+    exit 1
+fi
+
+if [ -z "${NAME}" ]; then
+    echo "$0: Please provide a package name via --package-name."
+    exit 1
+fi
+
+if [ -z "${VERSION}" ]; then
+    echo "$0: Please provide a package default version via --package-version."
+    exit 1
+fi
+
+if [ -z "${YEAR}" ]; then
+    echo "$0: Please provide a package creation year via --package-year."
+    exit 1
+fi
+
+# Create the skeleton directories
+
+DIRS="doc third_party include src tests"
+
+for dir in ${DIRS}; do
+    log "Creating \"${dir}\"..."
+    mkdir -p "${dir}"
+done
+
+# Populate the skeleton directories
+
+populate "${DIR}" 664 configure.ac        .
+populate "${DIR}" 775 bootstrap           .
+populate "${DIR}" 664 Makefile.am         .
+populate "${DIR}" 664 Makefile.am         doc
+populate "${DIR}" 664 Doxyfile.in         doc
+populate "${DIR}" 664 Makefile.am         third_party 
+populate "${DIR}" 664 Makefile.am         src
+populate "${DIR}" 664 Makefile.am         tests
+link     "${DIR}" 775 bootstrap-configure .
+
+# Create the default package version
+
+log "Creating default version ${VERSION}..."
+
+echo "${VERSION}" > ".default-version"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkversion b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkversion
new file mode 100755
index 0000000..aa0b9f8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/scripts/mkversion
@@ -0,0 +1,245 @@
+#!/bin/bash
+
+#
+#    Copyright 2011-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file attempts to find and generate a package version
+#      including, if necessary, the number of commits from the last
+#      GIT tag and the current GIT hash corresponding to HEAD for the
+#      current branch.
+#
+#      This is largely cobbled together from similar scripts in other
+#      packages that are maintained in GIT (linux, u-boot, parted, etc.).
+#
+#      This can produce version information such as:
+#
+#        1.0.1
+#        1.0.1-dirty
+#        1.0.1-00032-gab50dbb
+#        1.0.1-00032-gab50dbb-dirty
+#
+
+# Constants
+
+ROOTDIR=${PREFIX}/
+
+BINDIR=${ROOTDIR}bin
+DATADIR=${ROOTDIR}share
+DEVICEDIR=${ROOTDIR}dev
+CONFDIR=${ROOTDIR}etc
+LIBDIR=${ROOTDIR}lib
+LIBEXECDIR=${ROOTDIR}libexec
+VARDIR=${ROOTDIR}var
+LOGDIR=${VARDIR}/log
+MANDIR=${ROOTDIR}man
+SBINDIR=${ROOTDIR}sbin
+
+USRDIR=${ROOTDIR}usr
+USRBINDIR=${USRDIR}/bin
+USRDATADIR=${USRDIR}/share
+USRLIBDIR=${USRDIR}/lib
+USRLIBEXECDIR=${USRDIR}/libexec
+USRSBINDIR=${USRDIR}/sbin
+
+AWK=${USRBINDIR}/awk
+BASENAME=${USRBINDIR}/basename
+CAT=${BINDIR}/cat
+ECHO="${BINDIR}/echo"
+NULL=${DEVICEDIR}/null
+PRINTF=${USRBINDIR}/printf
+RM=${BINDIR}/rm
+SED=${BINDIR}/sed
+
+VERSION=""
+
+#
+# usage <status>
+#
+# Description:
+#   This routine prints out the proper command line usage for this
+#   program and then exits with the specified status.
+#
+# Input(s):
+#   status - Exit status to exit the program with.
+#
+# Returns:
+#   This subroutine does not return.
+#
+usage() {
+	local name=`${BASENAME} ${0}`
+
+	${ECHO} "Usage: ${name} [options] [ <project root> ]"
+
+	if [ ${1} -ne 0 ]; then
+		${ECHO} "Try '${name} -h' for more information."
+	fi
+
+	if [ ${1} -ne 1 ]; then
+${CAT} << EOF
+  -b, --build-version=VERSION  Specify VERSION as the build version to generate
+                               extra build information against.
+  -h, --help                   Print this help, then exit.
+EOF
+	fi
+
+	exit ${1}
+}
+
+#
+# gitversion <string> <directory> <version>
+#
+# Description:
+#   This routine prints out any GIT version information appended to the
+#   end of the package version, including the number of commits from
+#   the last GIT tag and the current GIT hash corresponding to HEAD
+#   for the current branch.
+#
+# Input(s):
+#   string    - The current version string which may be empty.
+#   directory - The current directory.
+#   version   - The optional current package version.
+#
+# Returns:
+#   N/A
+#
+gitversion() {
+	local string="${1}"
+	local dir="${2}"
+	local version="${3}"
+	local head
+	local exact
+	local dtag
+	local gitversion
+
+	# Retrieve the shortened, unique GIT hash associated with the
+	# 'HEAD' GIT object
+
+	head=`test -d .git && git rev-parse --verify --short HEAD 2> ${NULL}`
+
+	# If we found a hash, we are actually in a GIT repository; continue.
+
+	if [ -n "${head}" ]; then
+	    	# Check to see if we have a position in GIT that is
+	    	# exactly at an existing tag (e.g. 1.0.2). If we are,
+	    	# just use it and add a dirty qualifier. Otherwise,
+	    	# work through the logic to determine how far off the
+	    	# tag the tree is.
+
+	    	exact="`git describe --exact-match 2> ${NULL}`"
+
+		if [ -z "${exact}" ] || [ -n "${version}" ] && [ "${version}" != "${exact}" ]; then
+			dtag="`git describe 2> ${NULL}`"
+
+			# If we are n commits away from a tag, then
+			# print n and a shortened version of the
+			# hash. Otherwise, just print the hash.
+			#
+			# If we are at an exact version, then there
+			# won't be a delta or a hash, just use the
+			# exact tag.
+
+			if [ -n "${dtag}" ]; then
+				if [ "${dtag}" == "${exact}" ]; then
+					gitversion="${dtag}"
+				else
+					gitversion=`${PRINTF} "${dtag}" | ${AWK} -F '-' '{printf("%s-%05d-%s", $(NF-2),$(NF-1),$(NF))}' 2> ${NULL}`
+				fi
+
+			else
+				gitversion=`${PRINTF} "g${head}"`
+
+			fi
+
+                else
+			gitversion="${exact}"
+
+		fi
+
+		# Update the index if we are in a writable directory
+		# so that we can successfully check for a dirty (has
+		# uncommitted changes or unresolved merges) tree.
+
+		if [ -w "${dir}" ]; then
+			git update-index --refresh --unmerged > ${NULL}
+		fi
+
+		# Now check for such a dirty tree and add to the "string"
+		# if we found one.
+
+		if git diff-index --name-only HEAD | read dummy; then
+		    	if [ -n "${gitversion}" ]; then
+				gitversion="${gitversion}-dirty"
+			else
+				gitversion="dirty"
+			fi
+    		fi
+
+	else
+		gitversion="${version}"
+
+	fi
+
+	if [ -n "${string}" ] && [ -n "${gitversion}" ]; then
+		string="${string}-${gitversion}"
+	else
+		string="${gitversion}"
+	fi
+
+	${PRINTF} "${string}"
+}
+
+#
+# Main Program Body
+#
+
+while [ ${#} -gt 0 ]; do
+    	if [ ${1:0:1} == "-" ]; then
+		if [ "${1}" == "-h" ] || [ "${1}" == "--help" ]; then
+			usage 0
+
+		elif [ "${1}" == "-b" ] || [ "${1}" == "--build-version" ]; then
+			version="${2}"
+			shift 2
+
+		else
+		    	${ECHO} "Unknown argument '${1}'."
+			usage 1
+
+		fi
+
+	else
+		break
+
+	fi
+done
+
+if [ ${#} -gt 1 ]; then
+	usage 1
+elif [ ${#} -eq 1 ]; then
+	tree="${1}"
+else
+	tree="."
+fi
+
+if [ "${tree}" != "." ]; then
+	cd "${tree}"
+fi
+
+VERSION="`gitversion \"${VERSION}\" . ${version}`"
+
+${PRINTF} "${VERSION}"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal-1.14
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/aclocal-1.14
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoconf
new file mode 100755
index 0000000..8bfd5fc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoconf
@@ -0,0 +1,500 @@
+#! /bin/sh
+# Generated from autoconf.in; do not edit by hand.
+# autoconf -- create `configure' using m4 macros
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2003,
+# 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+
+# as_fn_error STATUS ERROR
+# ------------------------
+# Output "`basename $0`: error: ERROR" to stderr. Then exit the script with
+# STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+usage="\
+Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Generate a configuration script from a TEMPLATE-FILE if given, or
+\`configure.ac' if present, or else \`configure.in'.  Output is sent
+to the standard output if TEMPLATE-FILE is given, else into
+\`configure'.
+
+Operation modes:
+  -h, --help                print this help, then exit
+  -V, --version             print version number, then exit
+  -v, --verbose             verbosely report processing
+  -d, --debug               don't remove temporary files
+  -f, --force               consider all files obsolete
+  -o, --output=FILE         save output in FILE (stdout is the default)
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY [syntax]
+
+Warning categories include:
+  \`cross'         cross compilation issues
+  \`obsolete'      obsolete constructs
+  \`syntax'        dubious syntactic constructs
+  \`all'           all the warnings
+  \`no-CATEGORY'   turn off the warnings on CATEGORY
+  \`none'          turn off all the warnings
+  \`error'         warnings are error
+
+The environment variables \`M4' and \`WARNINGS' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the list of calls to MACRO
+  -i, --initialization        also trace Autoconf's initialization process
+
+In tracing mode, no configuration script is created.  FORMAT defaults
+to \`\$f:\$l:\$n:\$%'; see \`autom4te --help' for information about FORMAT.
+
+Report bugs to <bug-autoconf@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>."
+
+version="\
+autoconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille."
+
+help="\
+Try \`$as_me --help' for more information."
+
+exit_missing_arg='
+  as_fn_error $? "option \`$1'\'' requires an argument$as_nl$help"'
+# restore font-lock: '
+
+# Variables.
+: ${AUTOM4TE='/i686-pc-cygwin/bin/autom4te'}
+autom4te_options=
+outfile=
+verbose=false
+
+# Parse command line.
+while test $# -gt 0 ; do
+  option=`expr "x$1" : 'x\(--[^=]*\)' \| \
+	       "x$1" : 'x\(-.\)'`
+  optarg=`expr "x$1" : 'x--[^=]*=\(.*\)' \| \
+	       "x$1" : 'x-.\(.*\)'`
+  case $1 in
+    --version | -V )
+       echo "$version" ; exit ;;
+    --help | -h )
+       $as_echo "$usage"; exit ;;
+
+    --verbose | -v )
+       verbose=:
+       autom4te_options="$autom4te_options $1"; shift ;;
+
+    # Arguments passed as is to autom4te.
+    --debug      | -d   | \
+    --force      | -f   | \
+    --include=*  | -I?* | \
+    --prepend-include=* | -B?* | \
+    --warnings=* | -W?* )
+       case $1 in
+	 *\'*) arg=`$as_echo "$1" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$1 ;;
+       esac
+       autom4te_options="$autom4te_options '$arg'"; shift ;;
+    # Options with separated arg passed as is to autom4te.
+    --include  | -I | \
+    --prepend-include  | -B | \
+    --warnings | -W )
+       test $# = 1 && eval "$exit_missing_arg"
+       case $2 in
+	 *\'*) arg=`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$2 ;;
+       esac
+       autom4te_options="$autom4te_options $option '$arg'"
+       shift; shift ;;
+
+    --trace=* | -t?* )
+       traces="$traces --trace='"`$as_echo "$optarg" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift ;;
+    --trace | -t )
+       test $# = 1 && eval "$exit_missing_arg"
+       traces="$traces --trace='"`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift; shift ;;
+    --initialization | -i )
+       autom4te_options="$autom4te_options --melt"
+       shift;;
+
+    --output=* | -o?* )
+       outfile=$optarg
+       shift ;;
+    --output | -o )
+       test $# = 1 && eval "$exit_missing_arg"
+       outfile=$2
+       shift; shift ;;
+
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       exec >&2
+       as_fn_error $? "invalid option \`$1'$as_nl$help" ;; #`
+    * )
+       break ;;
+  esac
+done
+
+# Find the input file.
+case $# in
+  0)
+    if test -f configure.ac; then
+      if test -f configure.in; then
+	$as_echo "$as_me: warning: both \`configure.ac' and \`configure.in' are present." >&2
+	$as_echo "$as_me: warning: proceeding with \`configure.ac'." >&2
+      fi
+      infile=configure.ac
+    elif test -f configure.in; then
+      infile=configure.in
+    else
+      as_fn_error $? "no input file"
+    fi
+    test -z "$traces" && test -z "$outfile" && outfile=configure;;
+  1)
+    infile=$1 ;;
+  *) exec >&2
+     as_fn_error $? "invalid number of arguments$as_nl$help" ;;
+esac
+
+# Unless specified, the output is stdout.
+test -z "$outfile" && outfile=-
+
+# Run autom4te with expansion.
+eval set x "$autom4te_options" \
+  --language=autoconf --output=\"\$outfile\" "$traces" \"\$infile\"
+shift
+$verbose && $as_echo "$as_me: running $AUTOM4TE $*" >&2
+exec "$AUTOM4TE" "$@"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoheader b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoheader
new file mode 100755
index 0000000..5537fc6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoheader
@@ -0,0 +1,304 @@
+#! /usr/bin/perl
+# -*- Perl -*-
+# Generated from autoheader.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoheader -- create `config.h.in' from `configure.ac'
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Roland McGrath.
+# Rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, "$pkgdatadir";
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use strict;
+
+# Using `do FILE', we need `local' vars.
+use vars qw ($config_h %verbatim %symbol);
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-cygwin/bin/autom4te';
+local $config_h;
+my $config_h_in;
+my @prepend_include;
+my @include;
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Create a template file of C \`\#define\' statements for \`configure\' to
+use.  To this end, scan TEMPLATE-FILE, or \`configure.ac\' if present,
+or else \`configure.in\'.
+
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -f, --force              consider all files obsolete
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+" . Autom4te::ChannelDefs::usage () . "
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "autoheader (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Roland McGrath and Akim Demaille.
+";
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  parse_WARNINGS;
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'W|warnings=s'        => \&parse_warnings);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('ah');
+switch_warning 'obsolete';
+parse_args;
+
+# Preach.
+my $config_h_top = find_file ("config.h.top?",
+			      reverse (@prepend_include), @include);
+my $config_h_bot = find_file ("config.h.bot?",
+			      reverse (@prepend_include), @include);
+my $acconfig_h = find_file ("acconfig.h?",
+			    reverse (@prepend_include), @include);
+if ($config_h_top || $config_h_bot || $acconfig_h)
+  {
+    my $msg = << "END";
+    Using auxiliary files such as \`acconfig.h\', \`config.h.bot\'
+    and \`config.h.top\', to define templates for \`config.h.in\'
+    is deprecated and discouraged.
+
+    Using the third argument of \`AC_DEFINE\' and
+    \`AC_DEFINE_UNQUOTED\' allows one to define a template without
+    \`acconfig.h\':
+
+      AC_DEFINE([NEED_FUNC_MAIN], 1,
+		[Define if a function \`main\' is needed.])
+
+    More sophisticated templates can also be produced, see the
+    documentation.
+END
+    $msg =~ s/^    /WARNING: /gm;
+    msg 'obsolete', $msg;
+  }
+
+# Set up autoconf.
+my $autoconf = "'$autom4te' --language=autoconf ";
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+$autoconf .= ' --debug' if $debug;
+$autoconf .= ' --force' if $force;
+$autoconf .= ' --verbose' if $verbose;
+
+# ----------------------- #
+# Real work starts here.  #
+# ----------------------- #
+
+# Source what the traces are trying to tell us.
+verb "$me: running $autoconf to trace from $ARGV[0]";
+my $quoted_tmp = shell_quote ($tmp);
+xsystem ("$autoconf"
+	 # If you change this list, update the
+	 # `Autoheader-preselections' section of autom4te.in.
+	 . ' --trace AC_CONFIG_HEADERS:\'$$config_h ||= \'"\'"\'$1\'"\'"\';\''
+	 . ' --trace AH_OUTPUT:\'$$verbatim{\'"\'"\'$1\'"\'"\'} = \'"\'"\'$2\'"\'"\';\''
+	 . ' --trace AC_DEFINE_TRACE_LITERAL:\'$$symbol{\'"\'"\'$1\'"\'"\'} = 1;\''
+	 . " " . shell_quote ($ARGV[0]) . " >$quoted_tmp/traces.pl");
+
+local (%verbatim, %symbol);
+debug "$me: \`do'ing $tmp/traces.pl:\n" . `sed 's/^/| /' $quoted_tmp/traces.pl`;
+do "$tmp/traces.pl";
+warn "couldn't parse $tmp/traces.pl: $@" if $@;
+unless ($config_h)
+  {
+    error "error: AC_CONFIG_HEADERS not found in $ARGV[0]";
+    exit 1;
+  }
+
+# We template only the first CONFIG_HEADER.
+$config_h =~ s/ .*//;
+# Support "outfile[:infile]", defaulting infile="outfile.in".
+($config_h, $config_h_in) = split (':', $config_h, 2);
+$config_h_in ||= "$config_h.in";
+
+# %SYMBOL might contain things like `F77_FUNC(name,NAME)', but we keep
+# only the name of the macro.
+%symbol = map { s/\(.*//; $_ => 1 } keys %symbol;
+
+my $out = new Autom4te::XFile ("> " . open_quote ("$tmp/config.hin"));
+
+# Don't write "do not edit" -- it will get copied into the
+# config.h, which it's ok to edit.
+print $out "/* $config_h_in.  Generated from $ARGV[0] by autoheader.  */\n";
+
+# Dump the top.
+if ($config_h_top)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_top));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+# Dump `acconfig.h', except for its bottom portion.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    while ($_ = $in->getline)
+      {
+	last if /\@BOTTOM\@/;
+	next if /\@TOP\@/;
+	print $out $_;
+      }
+  }
+
+# Dump the templates from `configure.ac'.
+foreach (sort keys %verbatim)
+  {
+    print $out "\n$verbatim{$_}\n";
+  }
+
+# Dump bottom portion of `acconfig.h'.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    my $dump = 0;
+    while ($_ = $in->getline)
+      {
+	print $out $_ if $dump;
+	$dump = 1  if /\@BOTTOM\@/;
+      }
+  }
+
+# Dump the bottom.
+if ($config_h_bot)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_bot));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+$out->close;
+
+# Check that all the symbols have a template.
+{
+  my $in = new Autom4te::XFile ("< " . open_quote ("$tmp/config.hin"));
+  my $suggest_ac_define = 1;
+  while ($_ = $in->getline)
+    {
+      my ($symbol) = /^\#\s*\w+\s+(\w+)/
+	or next;
+      delete $symbol{$symbol};
+    }
+  foreach (sort keys %symbol)
+    {
+      msg 'syntax', "warning: missing template: $_";
+      if ($suggest_ac_define)
+	{
+	  msg 'syntax',  "Use AC_DEFINE([$_], [], [Description])";
+	  $suggest_ac_define = 0;
+	}
+
+    }
+  exit 1
+    if keys %symbol;
+}
+
+update_file ("$tmp/config.hin", "$config_h_in", $force);
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autom4te b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autom4te
new file mode 100755
index 0000000..d29ca29
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autom4te
@@ -0,0 +1,1075 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autom4te.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autom4te - Wrapper around M4 libraries.
+# Copyright (C) 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::C4che;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Data directory.
+my $pkgdatadir = $ENV{'AC_MACRODIR'} || '//share/autoconf';
+
+# $LANGUAGE{LANGUAGE} -- Automatic options for LANGUAGE.
+my %language;
+
+my $output = '-';
+
+# Mode of the output file except for traces.
+my $mode = "0666";
+
+# If melt, don't use frozen files.
+my $melt = 0;
+
+# Names of the cache directory, cache directory index, trace cache
+# prefix, and output cache prefix.  And the IO object for the index.
+my $cache;
+my $icache;
+my $tcache;
+my $ocache;
+my $icache_file;
+
+my $flock_implemented = 'yes';
+
+# The macros to trace mapped to their format, as specified by the
+# user.
+my %trace;
+
+# The macros the user will want to trace in the future.
+# We need `include' to get the included file, `m4_pattern_forbid' and
+# `m4_pattern_allow' to check the output.
+#
+# FIXME: What about `sinclude'?
+my @preselect = ('include',
+		 'm4_pattern_allow', 'm4_pattern_forbid',
+		 '_m4_warn');
+
+# M4 include path.
+my @include;
+
+# Do we freeze?
+my $freeze = 0;
+
+# $M4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+# Some non-GNU m4's don't reject the --help option, so give them /dev/null.
+fatal "need GNU m4 1.4 or later: $m4"
+  if system "$m4 --help </dev/null 2>&1 | grep reload-state >/dev/null";
+
+# Set some high recursion limit as the default limit, 250, has already
+# been hit with AC_OUTPUT.  Don't override the user's choice.
+$m4 .= ' --nesting-limit=1024'
+  if " $m4 " !~ / (--nesting-limit(=[0-9]+)?|-L[0-9]*) /;
+
+
+# @M4_BUILTIN -- M4 builtins and a useful comment.
+my @m4_builtin = `echo dumpdef | $m4 2>&1 >/dev/null`;
+map { s/:.*//;s/\W// } @m4_builtin;
+
+
+# %M4_BUILTIN_ALTERNATE_NAME
+# --------------------------
+# The builtins are renamed, e.g., `define' is renamed `m4_define'.
+# So map `define' to `m4_define' and conversely.
+# Some macros don't follow this scheme: be sure to properly map to their
+# alternate name too.
+#
+# FIXME: Trace status of renamed builtins was fixed in M4 1.4.5, which
+# we now depend on; do we still need to do this mapping?
+#
+# So we will merge them, i.e., tracing `BUILTIN' or tracing
+# `m4_BUILTIN' will be the same: tracing both, but honoring the
+# *last* trace specification.
+#
+# FIXME: This is not enough: in the output `$0' will be `BUILTIN'
+# sometimes and `m4_BUILTIN' at others.  We should return a unique name,
+# the one specified by the user.
+#
+# FIXME: To be absolutely rigorous, I would say that given that we
+# _redefine_ divert (instead of _copying_ it), divert and the like
+# should not be part of this list.
+my %m4_builtin_alternate_name;
+@m4_builtin_alternate_name{"$_", "m4_$_"} = ("m4_$_", "$_")
+  foreach (grep { !/m4wrap|m4exit|dnl|ifelse|__.*__/ } @m4_builtin);
+@m4_builtin_alternate_name{"ifelse", "m4_if"}   = ("m4_if", "ifelse");
+@m4_builtin_alternate_name{"m4exit", "m4_exit"} = ("m4_exit", "m4exit");
+@m4_builtin_alternate_name{"m4wrap", "m4_wrap"} = ("m4_wrap", "m4wrap");
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILES]
+
+Run GNU M4 on the FILES, avoiding useless runs.  Output the traces if tracing,
+the frozen file if freezing, otherwise the expansion of the FILES.
+
+If some of the FILES are named \`FILE.m4f\' they are considered to be M4
+frozen files of all the previous files (which are therefore not loaded).
+If \`FILE.m4f\' is not found, then \`FILE.m4\' will be used, together with
+all the previous files.
+
+Some files may be optional, i.e., will only be processed if found in the
+include path, but then must end in \`.m4?\';  the question mark is not part of
+the actual file name.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -o, --output=FILE        save output in FILE (defaults to \`-\', stdout)
+  -f, --force              don\'t rely on cached values
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+  -l, --language=LANG      specify the set of M4 macros to use
+  -C, --cache=DIRECTORY    preserve results for future runs in DIRECTORY
+      --no-cache           disable the cache
+  -m, --mode=OCTAL         change the non trace output file mode (0666)
+  -M, --melt               don\'t use M4 frozen files
+
+Languages include:
+  \`Autoconf\'   create Autoconf configure scripts
+  \`Autotest\'   create Autotest test suites
+  \`M4sh\'       create M4sh shell scripts
+  \`M4sugar\'    create M4sugar output
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variables \`M4\' and \`WARNINGS\' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the MACRO invocations
+  -p, --preselect=MACRO       prepare to trace MACRO in a future run
+
+Freezing:
+  -F, --freeze   produce an M4 frozen state file for FILES
+
+FORMAT defaults to \`\$f:\$l:\$n:\$%\', and can use the following escapes:
+  \$\$     literal \$
+  \$f     file where macro was called
+  \$l     line where macro was called
+  \$d     nesting depth of macro call
+  \$n     name of the macro
+  \$NUM   argument NUM, unquoted and with newlines
+  \$SEP\@  all arguments, with newlines, quoted, and separated by SEP
+  \$SEP*  all arguments, with newlines, unquoted, and separated by SEP
+  \$SEP%  all arguments, without newlines, unquoted, and separated by SEP
+SEP can be empty for the default (comma for \@ and *, colon for %),
+a single character for that character, or {STRING} to use a string.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version =  <<"EOF";
+autom4te (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Akim Demaille.
+EOF
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# $OPTION
+# files_to_options (@FILE)
+# ------------------------
+# Transform Autom4te conventions (e.g., using foo.m4f to designate a frozen
+# file) into a suitable command line for M4 (e.g., using --reload-state).
+# parse_args guarantees that we will see at most one frozen file, and that
+# if a frozen file is present, it is the first argument.
+sub files_to_options (@)
+{
+  my (@file) = @_;
+  my @res;
+  foreach my $file (@file)
+    {
+      my $arg = shell_quote ($file);
+      if ($file =~ /\.m4f$/)
+	{
+	  $arg = "--reload-state=$arg";
+	  # If the user downgraded M4 from 1.6 to 1.4.x after freezing
+	  # the file, then we ensure the frozen __m4_version__ will
+	  # not cause m4_init to make the wrong decision about the
+	  # current M4 version.
+	  $arg .= " --undefine=__m4_version__"
+	    unless grep {/__m4_version__/} @m4_builtin;
+	}
+      push @res, $arg;
+    }
+  return join ' ', @res;
+}
+
+
+# load_configuration ($FILE)
+# --------------------------
+# Load the configuration $FILE.
+sub load_configuration ($)
+{
+  my ($file) = @_;
+  use Text::ParseWords;
+
+  my $cfg = new Autom4te::XFile ("< " . open_quote ($file));
+  my $lang;
+  while ($_ = $cfg->getline)
+    {
+      chomp;
+      # Comments.
+      next
+	if /^\s*(\#.*)?$/;
+
+      my @words = shellwords ($_);
+      my $type = shift @words;
+      if ($type eq 'begin-language:')
+	{
+	  fatal "$file:$.: end-language missing for: $lang"
+	    if defined $lang;
+	  $lang = lc $words[0];
+	}
+      elsif ($type eq 'end-language:')
+	{
+	  error "$file:$.: end-language mismatch: $lang"
+	    if $lang ne lc $words[0];
+	  $lang = undef;
+	}
+      elsif ($type eq 'args:')
+	{
+	  fatal "$file:$.: no current language"
+	    unless defined $lang;
+	  push @{$language{$lang}}, @words;
+	}
+      else
+	{
+	  error "$file:$.: unknown directive: $type";
+	}
+    }
+}
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  # We want to look for the early options, which should not be found
+  # in the configuration file.  Prepend to the user arguments.
+  # Perform this repeatedly so that we can use --language in language
+  # definitions.  Beware that there can be several --language
+  # invocations.
+  my @language;
+  do {
+    @language = ();
+    use Getopt::Long;
+    Getopt::Long::Configure ("pass_through", "permute");
+    GetOptions ("l|language=s" => \@language);
+
+    foreach (@language)
+      {
+	error "unknown language: $_"
+	  unless exists $language{lc $_};
+	unshift @ARGV, @{$language{lc $_}};
+      }
+  } while @language;
+
+  # --debug is useless: it is parsed below.
+  if (exists $ENV{'AUTOM4TE_DEBUG'})
+    {
+      print STDERR "$me: concrete arguments:\n";
+      foreach my $arg (@ARGV)
+	{
+	  print STDERR "| $arg\n";
+	}
+    }
+
+  # Process the arguments for real this time.
+  my @trace;
+  my @prepend_include;
+  parse_WARNINGS;
+  getopt
+    (
+     # Operation modes:
+     "o|output=s"   => \$output,
+     "W|warnings=s" => \&parse_warnings,
+     "m|mode=s"     => \$mode,
+     "M|melt"       => \$melt,
+
+     # Library directories:
+     "B|prepend-include=s" => \@prepend_include,
+     "I|include=s"         => \@include,
+
+     # Tracing:
+     # Using a hash for traces is seducing.  Unfortunately, upon `-t FOO',
+     # instead of mapping `FOO' to undef, Getopt maps it to `1', preventing
+     # us from distinguishing `-t FOO' from `-t FOO=1'.  So let's do it
+     # by hand.
+     "t|trace=s"     => \@trace,
+     "p|preselect=s" => \@preselect,
+
+     # Freezing.
+     "F|freeze" => \$freeze,
+
+     # Caching.
+     "C|cache=s" => \$cache,
+     "no-cache"  => sub { $cache = undef; },
+    );
+
+  fatal "too few arguments
+Try `$me --help' for more information."
+    unless @ARGV;
+
+  # Freezing:
+  # We cannot trace at the same time (well, we can, but it sounds insane).
+  # And it implies melting: there is risk not to update properly using
+  # old frozen files, and worse yet: we could load a frozen file and
+  # refreeze it!  A sort of caching :)
+  fatal "cannot freeze and trace"
+    if $freeze && @trace;
+  $melt = 1
+    if $freeze;
+
+  # Names of the cache directory, cache directory index, trace cache
+  # prefix, and output cache prefix.  If the cache is not to be
+  # preserved, default to a temporary directory (automatically removed
+  # on exit).
+  $cache = $tmp
+    unless $cache;
+  $icache = "$cache/requests";
+  $tcache = "$cache/traces.";
+  $ocache = "$cache/output.";
+
+  # Normalize the includes: the first occurrence is enough, several is
+  # a pain since it introduces a useless difference in the path which
+  # invalidates the cache.  And strip `.' which is implicit and always
+  # first.
+  @include = grep { !/^\.$/ } uniq (reverse(@prepend_include), @include);
+
+  # Convert @trace to %trace, and work around the M4 builtins tracing
+  # problem.
+  # The default format is `$f:$l:$n:$%'.
+  foreach (@trace)
+    {
+      /^([^:]+)(?::(.*))?$/ms;
+      $trace{$1} = defined $2 ? $2 : '$f:$l:$n:$%';
+      $trace{$m4_builtin_alternate_name{$1}} = $trace{$1}
+	if exists $m4_builtin_alternate_name{$1};
+    }
+
+  # Work around the M4 builtins tracing problem for @PRESELECT.
+  # FIXME: Is this still needed, now that we rely on M4 1.4.5?
+  push (@preselect,
+	map { $m4_builtin_alternate_name{$_} }
+	grep { exists $m4_builtin_alternate_name{$_} } @preselect);
+
+  # If we find frozen files, then all the files before it are
+  # discarded: the frozen file is supposed to include them all.
+  #
+  # We don't want to depend upon m4's --include to find the top level
+  # files, so we use `find_file' here.  Try to get a canonical name,
+  # as it's part of the key for caching.  And some files are optional
+  # (also handled by `find_file').
+  my @argv;
+  foreach (@ARGV)
+    {
+      if ($_ eq '-')
+	{
+	  push @argv, $_;
+	}
+      elsif (/\.m4f$/)
+	{
+	  # Frozen files are optional => pass a `?' to `find_file'.
+	  my $file = find_file ("$_?", @include);
+	  if (!$melt && $file)
+	    {
+	      @argv = ($file);
+	    }
+	  else
+	    {
+	      s/\.m4f$/.m4/;
+	      push @argv, find_file ($_, @include);
+	    }
+	}
+      else
+	{
+	  my $file = find_file ($_, @include);
+	  push @argv, $file
+	    if $file;
+	}
+    }
+  @ARGV = @argv;
+}
+
+
+# handle_m4 ($REQ, @MACRO)
+# ------------------------
+# Run m4 on the input files, and save the traces on the @MACRO.
+sub handle_m4 ($@)
+{
+  my ($req, @macro) = @_;
+
+  # GNU m4 appends when using --debugfile/--error-output.
+  unlink ($tcache . $req->id . "t");
+
+  # Run m4.
+  #
+  # We don't output directly to the cache files, to avoid problems
+  # when we are interrupted (that leaves corrupted files).
+  xsystem ("$m4 --gnu"
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . ' --debug=aflq'
+	   . (!exists $ENV{'AUTOM4TE_NO_FATAL'} ? ' --fatal-warning' : '')
+	   . " --debugfile=" . shell_quote ("$tcache" . $req->id . "t")
+	   . join (' --trace=', '', map { shell_quote ($_) } sort @macro)
+	   . " " . files_to_options (@ARGV)
+	   . " > " . shell_quote ("$ocache" . $req->id . "t"));
+
+  # Everything went ok: preserve the outputs.
+  foreach my $file (map { $_ . $req->id } ($tcache, $ocache))
+    {
+      use File::Copy;
+      move ("${file}t", "$file")
+	or fatal "cannot rename ${file}t as $file: $!";
+    }
+}
+
+
+# warn_forbidden ($WHERE, $WORD, %FORBIDDEN)
+# ------------------------------------------
+# $WORD is forbidden.  Warn with a dedicated error message if in
+# %FORBIDDEN, otherwise a simple `error: possibly undefined macro'
+# will do.
+my $first_warn_forbidden = 1;
+sub warn_forbidden ($$%)
+{
+  my ($where, $word, %forbidden) = @_;
+  my $message;
+
+  for my $re (sort keys %forbidden)
+    {
+      if ($word =~ $re)
+	{
+	  $message = $forbidden{$re};
+	  last;
+	}
+    }
+  $message ||= "possibly undefined macro: $word";
+  warn "$where: error: $message\n";
+  if ($first_warn_forbidden)
+    {
+      warn <<EOF;
+      If this token and others are legitimate, please use m4_pattern_allow.
+      See the Autoconf documentation.
+EOF
+      $first_warn_forbidden = 0;
+    }
+}
+
+
+# handle_output ($REQ, $OUTPUT)
+# -----------------------------
+# Run m4 on the input files, perform quadrigraphs substitution, check for
+# forbidden tokens, and save into $OUTPUT.
+sub handle_output ($$)
+{
+  my ($req, $output) = @_;
+
+  verb "creating $output";
+
+  # Load the forbidden/allowed patterns.
+  handle_traces ($req, "$tmp/patterns",
+		 ('m4_pattern_forbid' => 'forbid:$1:$2',
+		  'm4_pattern_allow'  => 'allow:$1'));
+  my @patterns = new Autom4te::XFile ("< " . open_quote ("$tmp/patterns"))->getlines;
+  chomp @patterns;
+  my %forbidden =
+    map { /^forbid:([^:]+):.+$/ => /^forbid:[^:]+:(.+)$/ } @patterns;
+  my $forbidden = join ('|', map { /^forbid:([^:]+)/ } @patterns) || "^\$";
+  my $allowed   = join ('|', map { /^allow:([^:]+)/  } @patterns) || "^\$";
+
+  verb "forbidden tokens: $forbidden";
+  verb "forbidden token : $_ => $forbidden{$_}"
+    foreach (sort keys %forbidden);
+  verb "allowed   tokens: $allowed";
+
+  # Read the (cached) raw M4 output, produce the actual result.  We
+  # have to use the 2nd arg to have Autom4te::XFile honor the third, but then
+  # stdout is to be handled by hand :(.  Don't use fdopen as it means
+  # we will close STDOUT, which we already do in END.
+  my $out = new Autom4te::XFile;
+  if ($output eq '-')
+    {
+      $out->open (">$output");
+    }
+  else
+    {
+      $out->open($output, O_CREAT | O_WRONLY | O_TRUNC, oct ($mode));
+    }
+  fatal "cannot create $output: $!"
+    unless $out;
+  my $in = new Autom4te::XFile ("< " . open_quote ($ocache . $req->id));
+
+  my %prohibited;
+  my $res;
+  while ($_ = $in->getline)
+    {
+      s/\s+$//;
+      s/__oline__/$./g;
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+
+      $res = $_;
+
+      # Don't complain in comments.  Well, until we have something
+      # better, don't consider `#include' etc. are comments.
+      s/\#.*//
+	unless /^\#\s*(if|include|endif|ifdef|ifndef|define)\b/;
+      foreach (split (/\W+/))
+	{
+	  $prohibited{$_} = $.
+	    if !/^$/ && /$forbidden/o && !/$allowed/o && ! exists $prohibited{$_};
+	}
+
+      # Performed *last*: the empty quadrigraph.
+      $res =~ s/\@&t\@//g;
+
+      print $out "$res\n";
+    }
+
+  $out->close();
+
+  # If no forbidden words, we're done.
+  return
+    if ! %prohibited;
+
+  # Locate the forbidden words in the last input file.
+  # This is unsatisfying but...
+  $exit_code = 1;
+  if ($ARGV[$#ARGV] ne '-')
+    {
+      my $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+      my $file = new Autom4te::XFile ("< " . open_quote ($ARGV[$#ARGV]));
+
+      while ($_ = $file->getline)
+	{
+	  # Don't complain in comments.  Well, until we have something
+	  # better, don't consider `#include' etc. to be comments.
+	  s/\#.*//
+	    unless /^\#(if|include|endif|ifdef|ifndef|define)\b/;
+
+	  # Complain once per word, but possibly several times per line.
+	  while (/$prohibited/)
+	    {
+	      my $word = $1;
+	      warn_forbidden ("$ARGV[$#ARGV]:$.", $word, %forbidden);
+	      delete $prohibited{$word};
+	      # If we're done, exit.
+	      return
+		if ! %prohibited;
+	      $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+	    }
+	}
+    }
+  warn_forbidden ("$output:$prohibited{$_}", $_, %forbidden)
+    foreach (sort { $prohibited{$a} <=> $prohibited{$b} } keys %prohibited);
+}
+
+
+## --------------------- ##
+## Handling the traces.  ##
+## --------------------- ##
+
+
+# $M4_MACRO
+# trace_format_to_m4 ($FORMAT)
+# ----------------------------
+# Convert a trace $FORMAT into a M4 trace processing macro's body.
+sub trace_format_to_m4 ($)
+{
+  my ($format) = @_;
+  my $underscore = $_;
+  my %escape = (# File name.
+		'f' => '$1',
+		# Line number.
+		'l' => '$2',
+		# Depth.
+		'd' => '$3',
+		# Name (also available as $0).
+		'n' => '$4',
+		# Escaped dollar.
+		'$' => '$');
+
+  my $res = '';
+  $_ = $format;
+  while ($_)
+    {
+      # $n -> $(n + 4)
+      if (s/^\$(\d+)//)
+	{
+	  $res .= "\$" . ($1 + 4);
+	}
+      # $x, no separator given.
+      elsif (s/^\$([fldn\$])//)
+	{
+	  $res .= $escape{$1};
+	}
+      # $.x or ${sep}x.
+      elsif (s/^\$\{([^}]*)\}([@*%])//
+	    || s/^\$(.?)([@*%])//)
+	{
+	  # $@, list of quoted effective arguments.
+	  if ($2 eq '@')
+	    {
+	      $res .= ']at_at([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $*, list of unquoted effective arguments.
+	  elsif ($2 eq '*')
+	    {
+	      $res .= ']at_star([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $%, list of flattened unquoted effective arguments.
+	  elsif ($2 eq '%')
+	    {
+	      $res .= ']at_percent([' . ($1 ? $1 : ':') . '], $@)[';
+	    }
+	}
+      elsif (/^(\$.)/)
+	{
+	  error "invalid escape: $1";
+	}
+      else
+	{
+	  s/^([^\$]+)//;
+	  $res .= $1;
+	}
+    }
+
+  $_ = $underscore;
+  return '[[' . $res . ']]';
+}
+
+
+# handle_traces($REQ, $OUTPUT, %TRACE)
+# ------------------------------------
+# We use M4 itself to process the traces.  But to avoid name clashes when
+# processing the traces, the builtins are disabled, and moved into `at_'.
+# Actually, all the low level processing macros are in `at_' (and `_at_').
+# To avoid clashes between user macros and `at_' macros, the macros which
+# implement tracing are in `AT_'.
+#
+# Having $REQ is needed to neutralize the macros which have been traced,
+# but are not wanted now.
+sub handle_traces ($$%)
+{
+  my ($req, $output, %trace) = @_;
+
+  verb "formatting traces for `$output': " . join (', ', sort keys %trace);
+
+  # Processing the traces.
+  my $trace_m4 = new Autom4te::XFile ("> " . open_quote ("$tmp/traces.m4"));
+
+  $_ = <<'EOF';
+  divert(-1)
+  changequote([, ])
+  # _at_MODE(SEPARATOR, ELT1, ELT2...)
+  # ----------------------------------
+  # List the elements, separating then with SEPARATOR.
+  # MODE can be:
+  #  `at'       -- the elements are enclosed in brackets.
+  #  `star'     -- the elements are listed as are.
+  #  `percent'  -- the elements are `flattened': spaces are singled out,
+  #                and no new line remains.
+  define([_at_at],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[[$2]]],
+	     [[[$2]][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_percent],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [at_flatten([$2])],
+	     [at_flatten([$2])[$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_star],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[$2]],
+	     [[$2][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  # FLATTEN quotes its result.
+  # Note that the second pattern is `newline, tab or space'.  Don't lose
+  # the tab!
+  define([at_flatten],
+  [at_patsubst(at_patsubst([[[$1]]], [\\\n]), [[\n\t ]+], [ ])])
+
+  define([at_args],    [at_shift(at_shift(at_shift(at_shift(at_shift($@)))))])
+  define([at_at],      [_$0([$1], at_args($@))])
+  define([at_percent], [_$0([$1], at_args($@))])
+  define([at_star],    [_$0([$1], at_args($@))])
+
+EOF
+  s/^  //mg;s/\\t/\t/mg;s/\\n/\n/mg;
+  print $trace_m4 $_;
+
+  # If you trace `define', then on `define([m4_exit], defn([m4exit])' you
+  # will produce
+  #
+  #    AT_define([m4sugar.m4], [115], [1], [define], [m4_exit], <m4exit>)
+  #
+  # Since `<m4exit>' is not quoted, the outer m4, when processing
+  # `trace.m4' will exit prematurely.  Hence, move all the builtins to
+  # the `at_' name space.
+
+  print $trace_m4 "# Copy the builtins.\n";
+  map { print $trace_m4 "define([at_$_], defn([$_]))\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+  print $trace_m4 "# Disable them.\n";
+  map { print $trace_m4 "at_undefine([$_])\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+
+  # Neutralize traces: we don't want traces of cached requests (%REQUEST).
+  print $trace_m4
+   "## -------------------------------------- ##\n",
+   "## By default neutralize all the traces.  ##\n",
+   "## -------------------------------------- ##\n",
+   "\n";
+  print $trace_m4 "at_define([AT_$_], [at_dnl])\n"
+    foreach (sort keys %{$req->macro});
+  print $trace_m4 "\n";
+
+  # Implement traces for current requests (%TRACE).
+  print $trace_m4
+    "## ------------------------- ##\n",
+    "## Trace processing macros.  ##\n",
+    "## ------------------------- ##\n",
+    "\n";
+  foreach (sort keys %trace)
+    {
+      # Trace request can be embed \n.
+      (my $comment = "Trace $_:$trace{$_}") =~ s/^/\# /;
+      print $trace_m4 "$comment\n";
+      print $trace_m4 "at_define([AT_$_],\n";
+      print $trace_m4 trace_format_to_m4 ($trace{$_}) . ")\n\n";
+    }
+  print $trace_m4 "\n";
+
+  # Reenable output.
+  print $trace_m4 "at_divert(0)at_dnl\n";
+
+  # Transform the traces from m4 into an m4 input file.
+  # Typically, transform:
+  #
+  # | m4trace:configure.ac:3: -1- AC_SUBST([exec_prefix], [NONE])
+  #
+  # into
+  #
+  # | AT_AC_SUBST([configure.ac], [3], [1], [AC_SUBST], [exec_prefix], [NONE])
+  #
+  # Pay attention that the file name might include colons, if under DOS
+  # for instance, so we don't use `[^:]+'.
+  my $traces = new Autom4te::XFile ("< " . open_quote ($tcache . $req->id));
+  while ($_ = $traces->getline)
+    {
+      # Trace with arguments, as the example above.  We don't try
+      # to match the trailing parenthesis as it might be on a
+      # separate line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^(]+)\((.*)$}
+       {AT_$4([$1], [$2], [$3], [$4], $5};
+      # Traces without arguments, always on a single line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^)]*)\n$}
+       {AT_$4([$1], [$2], [$3], [$4])\n};
+      print $trace_m4 "$_";
+    }
+  $trace_m4->close;
+
+  my $in = new Autom4te::XFile ("$m4 " . shell_quote ("$tmp/traces.m4") . " |");
+  my $out = new Autom4te::XFile ("> " . open_quote ($output));
+
+  # This is dubious: should we really transform the quadrigraphs in
+  # traces?  It might break balanced [ ] etc. in the output.  The
+  # consensus seeems to be that traces are more useful this way.
+  while ($_ = $in->getline)
+    {
+      # It makes no sense to try to transform __oline__.
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+      s/\@&t\@//g;
+      print $out $_;
+    }
+}
+
+
+# $BOOL
+# up_to_date ($REQ)
+# -----------------
+# Are the cache files of $REQ up to date?
+# $REQ is `valid' if it corresponds to the request and exists, which
+# does not mean it is up to date.  It is up to date if, in addition,
+# its files are younger than its dependencies.
+sub up_to_date ($)
+{
+  my ($req) = @_;
+
+  return 0
+    if ! $req->valid;
+
+  my $tfile = $tcache . $req->id;
+  my $ofile = $ocache . $req->id;
+
+  # We can't answer properly if the traces are not computed since we
+  # need to know what other files were included.  Actually, if any of
+  # the cache files is missing, we are not up to date.
+  return 0
+    if ! -f $tfile || ! -f $ofile;
+
+  # The youngest of the cache files must be older than the oldest of
+  # the dependencies.
+  my $tmtime = mtime ($tfile);
+  my $omtime = mtime ($ofile);
+  my ($file, $mtime) = ($tmtime < $omtime
+			? ($ofile, $omtime) : ($tfile, $tmtime));
+
+  # We depend at least upon the arguments.
+  my @dep = @ARGV;
+
+  # stdin is always out of date.
+  if (grep { $_ eq '-' } @dep)
+    { return 0 }
+
+  # Files may include others.  We can use traces since we just checked
+  # if they are available.
+  handle_traces ($req, "$tmp/dependencies",
+		 ('include'    => '$1',
+		  'm4_include' => '$1'));
+  my $deps = new Autom4te::XFile ("< " . open_quote ("$tmp/dependencies"));
+  while ($_ = $deps->getline)
+    {
+      chomp;
+      my $file = find_file ("$_?", @include);
+      # If a file which used to be included is no longer there, then
+      # don't say it's missing (it might no longer be included).  But
+      # of course, that causes the output to be outdated (as if the
+      # time stamp of that missing file was newer).
+      return 0
+	if ! $file;
+      push @dep, $file;
+    }
+
+  # If $FILE is younger than one of its dependencies, it is outdated.
+  return up_to_date_p ($file, @dep);
+}
+
+
+## ---------- ##
+## Freezing.  ##
+## ---------- ##
+
+# freeze ($OUTPUT)
+# ----------------
+sub freeze ($)
+{
+  my ($output) = @_;
+
+  # When processing the file with diversion disabled, there must be no
+  # output but comments and empty lines.
+  my $result = xqx ("$m4"
+		    . ' --fatal-warning'
+		    . join (' --include=', '', map { shell_quote ($_) } @include)
+		    . ' --define=divert'
+		    . " " . files_to_options (@ARGV)
+		    . ' </dev/null');
+  $result =~ s/#.*\n//g;
+  $result =~ s/^\n//mg;
+
+  fatal "freezing produced output:\n$result"
+    if $result;
+
+  # If freezing produces output, something went wrong: a bad `divert',
+  # or an improper paren etc.
+  xsystem ("$m4"
+	   . ' --fatal-warning'
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . " --freeze-state=" . shell_quote ($output)
+	   . " " . files_to_options (@ARGV)
+	   . ' </dev/null');
+}
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('am4t');
+load_configuration ($ENV{'AUTOM4TE_CFG'} || "$pkgdatadir/autom4te.cfg");
+load_configuration ("$ENV{'HOME'}/.autom4te.cfg")
+  if exists $ENV{'HOME'} && -f "$ENV{'HOME'}/.autom4te.cfg";
+load_configuration (".autom4te.cfg")
+  if -f ".autom4te.cfg";
+parse_args;
+
+# Freezing does not involve the cache.
+if ($freeze)
+  {
+    freeze ($output);
+    exit $exit_code;
+  }
+
+# We need our cache directory.  Don't fail with parallel creation.
+if (! -d "$cache")
+  {
+    mkdir "$cache", 0755
+      or -d "$cache"
+      or fatal "cannot create $cache: $!";
+  }
+
+# Open the index for update, and lock it.  autom4te handles several
+# files, but the index is the first and last file to be updated, so
+# locking it is sufficient.
+$icache_file = new Autom4te::XFile $icache, O_RDWR|O_CREAT;
+$icache_file->lock (LOCK_EX)
+  if ($flock_implemented eq "yes");
+
+# Read the cache index if available and older than autom4te itself.
+# If autom4te is younger, then some structures such as C4che might
+# have changed, which would corrupt its processing.
+Autom4te::C4che->load ($icache_file)
+  if -f $icache && mtime ($icache) > mtime ($0);
+
+# Add the new trace requests.
+my $req = Autom4te::C4che->request ('input' => \@ARGV,
+				    'path'  => \@include,
+				    'macro' => [keys %trace, @preselect]);
+
+# If $REQ's cache files are not up to date, or simply if the user
+# discarded them (-f), declare it invalid.
+$req->valid (0)
+  if $force || ! up_to_date ($req);
+
+# We now know whether we can trust the Request object.  Say it.
+verb "the trace request object is:\n" . $req->marshall;
+
+# We need to run M4 if (i) the user wants it (--force), (ii) $REQ is
+# invalid.
+handle_m4 ($req, keys %{$req->macro})
+  if $force || ! $req->valid;
+
+# Issue the warnings each time autom4te was run.
+my $separator = "\n" . ('-' x 25) . " END OF WARNING " . ('-' x 25) . "\n\n";
+handle_traces ($req, "$tmp/warnings",
+	       ('_m4_warn' => "\$1::\$f:\$l::\$2::\$3$separator"));
+# Swallow excessive newlines.
+for (split (/\n*$separator\n*/o, contents ("$tmp/warnings")))
+{
+  # The message looks like:
+  # | syntax::input.as:5::ouch
+  # | ::input.as:4: baz is expanded from...
+  # | input.as:2: bar is expanded from...
+  # | input.as:3: foo is expanded from...
+  # | input.as:5: the top level
+  # In particular, m4_warn guarantees that either $stackdump is empty, or
+  # it consists of lines where only the last line ends in "top level".
+  my ($cat, $loc, $msg, $stacktrace) = split ('::', $_, 4);
+  msg $cat, $loc, "warning: $msg",
+    partial => ($stacktrace =~ /top level$/) + 0;
+  for (split /\n/, $stacktrace)
+    {
+      my ($loc, $trace) = split (': ', $_, 2);
+      msg $cat, $loc, $trace, partial => ($trace !~ /top level$/) + 0;
+    }
+}
+
+# Now output...
+if (%trace)
+  {
+    # Always produce traces, since even if the output is young enough,
+    # there is no guarantee that the traces use the same *format*
+    # (e.g., `-t FOO:foo' and `-t FOO:bar' are both using the same M4
+    # traces, hence the M4 traces cache is usable, but its formatting
+    # will yield different results).
+    handle_traces ($req, $output, %trace);
+  }
+else
+  {
+    # Actual M4 expansion, if the user wants it, or if $output is old
+    # (STDOUT is pretty old).
+    handle_output ($req, $output)
+      if $force || mtime ($output) < mtime ($ocache . $req->id);
+  }
+
+# If we ran up to here, the cache is valid.
+$req->valid (1);
+Autom4te::C4che->save ($icache_file);
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake-1.14
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/automake-1.14
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoreconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoreconf
new file mode 100755
index 0000000..46372b5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoreconf
@@ -0,0 +1,718 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoreconf.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoreconf - install the GNU Build System in a directory tree
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David J. MacKenzie.
+# Extended and rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+# Do not use Cwd::chdir, since it might hang.
+use Cwd 'cwd';
+use strict;
+
+## ----------- ##
+## Variables.  ##
+## ----------- ##
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [DIRECTORY]...
+
+Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
+(formerly `gettextize'), and `libtoolize' where appropriate)
+repeatedly to remake the GNU Build System files in specified
+DIRECTORIES and their subdirectories (defaulting to `.').
+
+By default, it only remakes those files that are older than their
+sources.  If you install new versions of the GNU Build System,
+you can make `autoreconf' remake all of the files by giving it the
+`--force' option.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don't remove temporary files
+  -f, --force              consider all files obsolete
+  -i, --install            copy missing auxiliary files
+      --no-recursive       don't rebuild sub-packages
+  -s, --symlink            with -i, install symbolic links instead of copies
+  -m, --make               when applicable, re-run ./configure && make
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY [syntax]
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variable \`WARNINGS\' is honored.  Some subtools might
+support other warning types, using \`all' is encouraged.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+The environment variables AUTOM4TE, AUTOCONF, AUTOHEADER, AUTOMAKE,
+ACLOCAL, AUTOPOINT, LIBTOOLIZE, M4, and MAKE are honored.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoreconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+# Lib files.
+my $autoconf   = $ENV{'AUTOCONF'}   || '/i686-pc-cygwin/bin/autoconf';
+my $autoheader = $ENV{'AUTOHEADER'} || '/i686-pc-cygwin/bin/autoheader';
+my $autom4te   = $ENV{'AUTOM4TE'}   || '/i686-pc-cygwin/bin/autom4te';
+my $automake   = $ENV{'AUTOMAKE'}   || 'automake';
+my $aclocal    = $ENV{'ACLOCAL'}    || 'aclocal';
+my $libtoolize = $ENV{'LIBTOOLIZE'} || 'libtoolize';
+my $autopoint  = $ENV{'AUTOPOINT'}  || 'autopoint';
+my $make       = $ENV{'MAKE'}       || 'make';
+
+# --install -- as --add-missing in other tools.
+my $install = 0;
+# symlink -- when --install, use symlinks instead.
+my $symlink = 0;
+# Does aclocal support --force?
+my $aclocal_supports_force = 0;
+# Does aclocal support -Wfoo?
+my $aclocal_supports_warnings = 0;
+# Does automake support --force-missing?
+my $automake_supports_force_missing = 0;
+# Does automake support -Wfoo?
+my $automake_supports_warnings = 0;
+
+my @prepend_include;
+my @include;
+
+# List of command line warning requests.
+my @warning;
+
+# Rerun `./configure && make'?
+my $run_make = 0;
+
+# Recurse into subpackages
+my $recursive = 1;
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ("W|warnings=s"         => \@warning,
+	  'I|include=s'          => \@include,
+	  'B|prepend-include=s'	 => \@prepend_include,
+	  'i|install'            => \$install,
+	  's|symlink'            => \$symlink,
+	  'm|make'               => \$run_make,
+	  'recursive!'           => \$recursive);
+
+  # Split the warnings as a list of elements instead of a list of
+  # lists.
+  @warning = map { split /,/ } @warning;
+  parse_WARNINGS;
+  parse_warnings '--warnings', @warning;
+
+  # Even if the user specified a configure.ac, trim to get the
+  # directory, and look for configure.ac again.  Because (i) the code
+  # is simpler, and (ii) we are still able to diagnose simultaneous
+  # presence of configure.ac and configure.in.
+  @ARGV = map { /configure\.(ac|in)$/ ? dirname ($_) : $_ } @ARGV;
+  push @ARGV, '.' unless @ARGV;
+
+  if ($verbose && $debug)
+    {
+      for my $prog ($autoconf, $autoheader,
+		    $automake, $aclocal,
+		    $autopoint,
+		    $libtoolize)
+	{
+	  xsystem ("$prog --version | sed 1q >&2");
+	  print STDERR "\n";
+	}
+    }
+
+  my $aclocal_help = `$aclocal --help 2>/dev/null`;
+  my $automake_help = `$automake --help 2>/dev/null`;
+  $aclocal_supports_force = $aclocal_help =~ /--force/;
+  $aclocal_supports_warnings = $aclocal_help =~ /--warnings/;
+  $automake_supports_force_missing = $automake_help =~ /--force-missing/;
+  $automake_supports_warnings = $automake_help =~ /--warnings/;
+
+  # Dispatch autoreconf's option to the tools.
+  # --include;
+  $aclocal    .= join (' -I ', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+  $autoheader .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoheader .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+  # --install and --symlink;
+  if ($install)
+    {
+      $automake   .= ' --add-missing';
+      $automake   .= ' --copy' unless $symlink;
+      $libtoolize .= ' --copy' unless $symlink;
+    }
+  # --force;
+  if ($force)
+    {
+      $aclocal    .= ' --force'
+	if $aclocal_supports_force;
+      $autoconf   .= ' --force';
+      $autoheader .= ' --force';
+      $automake   .= ' --force-missing'
+	if $automake_supports_force_missing;
+      $autopoint  .= ' --force';
+      $libtoolize .= ' --force';
+    }
+  else
+    {
+      # The implementation of --no-force is bogus in all implementations
+      # of Automake up to 1.8, so we avoid it in these cases.  (Automake
+      # 1.8 is the first version where aclocal supports force, hence
+      # the condition.)
+      $automake .= ' --no-force'
+	if $aclocal_supports_force;
+    }
+  # --verbose --verbose or --debug;
+  if ($verbose > 1 || $debug)
+    {
+      $autoconf   .= ' --verbose';
+      $autoheader .= ' --verbose';
+      $automake   .= ' --verbose';
+      $aclocal    .= ' --verbose';
+    }
+  if ($debug)
+    {
+      $autoconf   .= ' --debug';
+      $autoheader .= ' --debug';
+      $libtoolize .= ' --debug';
+    }
+  # --warnings;
+  if (@warning)
+    {
+      my $warn = ' --warnings=' . join (',', @warning);
+      $autoconf   .= $warn;
+      $autoheader .= $warn;
+      $automake   .= $warn
+	if $automake_supports_warnings;
+      $aclocal    .= $warn
+        if $aclocal_supports_warnings;
+    }
+}
+
+
+# &run_aclocal ($ACLOCAL, $FLAGS)
+# -------------------------------
+# Update aclocal.m4 as lazily as possible, as aclocal pre-1.8 always
+# overwrites aclocal.m4, hence triggers autoconf, autoheader, automake
+# etc. uselessly.  aclocal 1.8+ does not need this.
+sub run_aclocal ($$)
+{
+  my ($aclocal, $flags) = @_;
+
+  # aclocal 1.8+ does all this for free.  It can be recognized by its
+  # --force support.
+  if ($aclocal_supports_force)
+    {
+      xsystem ("$aclocal $flags");
+    }
+  else
+    {
+      xsystem ("$aclocal $flags --output=aclocal.m4t");
+      # aclocal may produce no output.
+      if (-f 'aclocal.m4t')
+	{
+	  update_file ('aclocal.m4t', 'aclocal.m4');
+	  # Make sure that the local m4 files are older than
+	  # aclocal.m4.
+	  #
+	  # Why is not always the case?  Because we already run
+	  # aclocal at first (before tracing), which, for instance,
+	  # can find Gettext's macros in .../share/aclocal, so we may
+	  # have had the right aclocal.m4 already.  Then autopoint is
+	  # run, and installs locally these M4 files.  Then
+	  # autoreconf, via update_file, sees it is the _same_
+	  # aclocal.m4, and doesn't change its timestamp.  But later,
+	  # Automake's Makefile expresses that aclocal.m4 depends on
+	  # these local files, which are newer, so it triggers aclocal
+	  # again.
+	  #
+	  # To make sure aclocal.m4 is no older, we change the
+	  # modification times of the local M4 files to be not newer
+	  # than it.
+	  #
+	  # First, where are the local files?
+	  my $aclocal_local_dir = '.';
+	  if ($flags =~ /-I\s+(\S+)/)
+	    {
+	      $aclocal_local_dir = $1;
+	    }
+	  # All the local files newer than aclocal.m4 are to be
+	  # made not newer than it.
+	  my $aclocal_m4_mtime = mtime ('aclocal.m4');
+	  for my $file (glob ("$aclocal_local_dir/*.m4"), 'acinclude.m4')
+	    {
+	      if ($aclocal_m4_mtime < mtime ($file))
+		{
+		  debug "aging $file to be not newer than aclocal.m4";
+		  utime $aclocal_m4_mtime, $aclocal_m4_mtime, $file;
+		}
+	    }
+	}
+    }
+}
+
+# &autoreconf_current_directory
+# -----------------------------
+sub autoreconf_current_directory ()
+{
+  my $configure_ac = find_configure_ac;
+
+  # ---------------------- #
+  # Is it using Autoconf?  #
+  # ---------------------- #
+
+  my $uses_autoconf;
+  my $uses_gettext;
+  if (-f $configure_ac)
+    {
+      my $configure_ac_file = new Autom4te::XFile "< $configure_ac";
+      while ($_ = $configure_ac_file->getline)
+	{
+	  s/#.*//;
+	  s/dnl.*//;
+	  $uses_autoconf = 1 if /AC_INIT/;
+	  # See below for why we look for gettext here.
+	  $uses_gettext = 1  if /^AM_GNU_GETTEXT_VERSION/;
+	}
+    }
+  if (!$uses_autoconf)
+    {
+      verb "$configure_ac: not using Autoconf";
+      return;
+    }
+
+
+  # ------------------- #
+  # Running autopoint.  #
+  # ------------------- #
+
+  # Gettext is a bit of a problem: its macros are not necessarily
+  # visible to aclocal, so if we start with a completely striped down
+  # package (think of a fresh CVS checkout), running `aclocal' first
+  # will fail: the Gettext macros are missing.
+  #
+  # Therefore, we can't use the traces to decide if we use Gettext or
+  # not.  I guess that once Gettext move to 2.5x we will be able to,
+  # but in the meanwhile forget it.
+  #
+  # We can only grep for AM_GNU_GETTEXT_VERSION in configure.ac.  You
+  # might think this approach is naive, and indeed it is, as it
+  # prevents one to embed AM_GNU_GETTEXT_VERSION in another *.m4, but
+  # anyway we don't limit the generality, since... that's what
+  # autopoint does.  Actually, it is even more restrictive, as it
+  # greps for `^AM_GNU_GETTEXT_VERSION('.  We did this above, while
+  # scanning configure.ac.
+  if (!$uses_gettext)
+    {
+      verb "$configure_ac: not using Gettext";
+    }
+  elsif (!$install)
+    {
+      verb "$configure_ac: not running autopoint: --install not given";
+    }
+  else
+    {
+      xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
+    }
+
+
+  # ----------------- #
+  # Running aclocal.  #
+  # ----------------- #
+
+  # Run it first: it might discover new macros to add, e.g.,
+  # AC_PROG_LIBTOOL, which we will trace later to see if Libtool is
+  # used.
+  #
+  # Always run it.  Tracking its sources for up-to-dateness is too
+  # complex and too error prone.  The best we can do is avoiding
+  # nuking the time stamp.
+  my $uses_aclocal = 1;
+
+  # Nevertheless, if aclocal.m4 exists and is not made by aclocal,
+  # don't run aclocal.
+
+  if (-f 'aclocal.m4')
+    {
+      my $aclocal_m4 = new Autom4te::XFile 'aclocal.m4';
+      $_ = $aclocal_m4->getline;
+      $uses_aclocal = 0
+	unless defined ($_) && /generated.*by aclocal/;
+    }
+
+  # If there are flags for aclocal in Makefile.am, use them.
+  my $aclocal_flags = '';
+  if ($uses_aclocal && -f 'Makefile.am')
+    {
+      my $makefile = new Autom4te::XFile 'Makefile.am';
+      while ($_ = $makefile->getline)
+	{
+	  if (/^ACLOCAL_[A-Z_]*FLAGS\s*=\s*(.*)/)
+	    {
+	      $aclocal_flags = $1;
+	      last;
+	    }
+	}
+    }
+
+  if (!$uses_aclocal)
+    {
+      verb "$configure_ac: not using aclocal";
+    }
+  else
+    {
+      # Some file systems have sub-second time stamps, and if so we may
+      # run into trouble later, after we rerun autoconf and set the
+      # time stamps of input files to be no greater than aclocal.m4,
+      # because the time-stamp-setting operation (utime) has a
+      # resolution of only 1 second.  Work around the problem by
+      # ensuring that there is at least a one-second window before the
+      # time stamp of aclocal.m4t in which no file time stamps can
+      # fall.
+      sleep 1;
+
+      run_aclocal ($aclocal, $aclocal_flags);
+    }
+
+  # We might have to rerun aclocal if Libtool (or others) imports new
+  # macros.
+  my $rerun_aclocal = 0;
+
+
+
+  # ------------------------------- #
+  # See what tools will be needed.  #
+  # ------------------------------- #
+
+  # Perform a single trace reading to avoid --force forcing a rerun
+  # between two --trace, that's useless.  If there is no AC_INIT, then
+  # we are not interested: it looks like a Cygnus thingy.
+  my $aux_dir;
+  my $uses_gettext_via_traces;
+  my $uses_libtool;
+  my $uses_libltdl;
+  my $uses_autoheader;
+  my $uses_automake;
+  my @subdir;
+  verb "$configure_ac: tracing";
+  my $traces = new Autom4te::XFile
+    ("$autoconf"
+     . join (' ',
+	     map { ' --trace=' . $_ . ':\$n::\${::}%' }
+	     # If you change this list, update the
+	     # `Autoreconf-preselections' section of autom4te.in.
+	     'AC_CONFIG_AUX_DIR',
+	     'AC_CONFIG_HEADERS',
+	     'AC_CONFIG_SUBDIRS',
+	     'AC_INIT',
+	     'AC_PROG_LIBTOOL',
+	     'LT_INIT',
+	     'LT_CONFIG_LTDL_DIR',
+	     'AM_GNU_GETTEXT',
+	     'AM_INIT_AUTOMAKE',
+	    )
+     . ' |');
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($macro, @args) = split (/::/);
+      $aux_dir = $args[0]           if $macro eq "AC_CONFIG_AUX_DIR";
+      $uses_autoconf = 1            if $macro eq "AC_INIT";
+      $uses_gettext_via_traces = 1  if $macro eq "AM_GNU_GETTEXT";
+      $uses_libtool = 1             if $macro eq "AC_PROG_LIBTOOL"
+                                       || $macro eq "LT_INIT";
+      $uses_libltdl = 1             if $macro eq "LT_CONFIG_LTDL_DIR";
+      $uses_autoheader = 1          if $macro eq "AC_CONFIG_HEADERS";
+      $uses_automake = 1            if $macro eq "AM_INIT_AUTOMAKE";
+      push @subdir, split (' ', $args[0])
+                                    if $macro eq "AC_CONFIG_SUBDIRS" && $recursive;
+    }
+
+  # The subdirs are *optional*, they may not exist.
+  foreach (@subdir)
+    {
+      if (-d)
+	{
+	  verb "$configure_ac: adding subdirectory $_ to autoreconf";
+	  autoreconf ($_);
+	}
+      else
+	{
+	  verb "$configure_ac: subdirectory $_ not present";
+	}
+    }
+
+  # Gettext consistency checks...
+  error "$configure_ac: AM_GNU_GETTEXT is used, but not AM_GNU_GETTEXT_VERSION"
+    if $uses_gettext_via_traces && ! $uses_gettext;
+  error "$configure_ac: AM_GNU_GETTEXT_VERSION is used, but not AM_GNU_GETTEXT"
+    if $uses_gettext && ! $uses_gettext_via_traces;
+
+
+  # ---------------------------- #
+  # Setting up the source tree.  #
+  # ---------------------------- #
+
+  # libtoolize, automake --add-missing etc. will drop files in the
+  # $AUX_DIR.  But these tools fail to install these files if the
+  # directory itself does not exist, which valid: just imagine a CVS
+  # repository with hand written code only (there is not even a need
+  # for a Makefile.am!).
+
+  if (defined $aux_dir && ! -d $aux_dir)
+    {
+      verb "$configure_ac: creating directory $aux_dir";
+      mkdir $aux_dir, 0755
+	or error "cannot create $aux_dir: $!";
+    }
+
+
+  # -------------------- #
+  # Running libtoolize.  #
+  # -------------------- #
+
+  if (!$uses_libtool)
+    {
+      verb "$configure_ac: not using Libtool";
+    }
+  elsif ($install)
+    {
+      if ($uses_libltdl)
+	{
+	  $libtoolize .= " --ltdl";
+	}
+      xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
+      $rerun_aclocal = 1;
+    }
+  else
+    {
+      verb "$configure_ac: not running libtoolize: --install not given";
+    }
+
+
+
+  # ------------------- #
+  # Rerunning aclocal.  #
+  # ------------------- #
+
+  # If we re-installed Libtool or Gettext, the macros might have changed.
+  # Automake also needs an up-to-date aclocal.m4.
+  if ($rerun_aclocal)
+    {
+      if (!$uses_aclocal)
+	{
+	  verb "$configure_ac: not using aclocal";
+	}
+      else
+	{
+	  run_aclocal ($aclocal, $aclocal_flags);
+	}
+    }
+
+
+  # ------------------ #
+  # Running autoconf.  #
+  # ------------------ #
+
+  # Don't try to be smarter than `autoconf', which does its own up to
+  # date checks.
+  #
+  # We prefer running autoconf before autoheader, because (i) the
+  # latter runs the former, and (ii) autoconf is stricter than
+  # autoheader.  So all in all, autoconf should give better error
+  # messages.
+  xsystem ($autoconf);
+
+
+  # -------------------- #
+  # Running autoheader.  #
+  # -------------------- #
+
+  # We now consider that if AC_CONFIG_HEADERS is used, then autoheader
+  # is used too.
+  #
+  # Just as for autoconf, up to date ness is performed by the tool
+  # itself.
+  #
+  # Run it before automake, since the latter checks the presence of
+  # config.h.in when it sees an AC_CONFIG_HEADERS.
+  if (!$uses_autoheader)
+    {
+      verb "$configure_ac: not using Autoheader";
+    }
+  else
+    {
+      xsystem ($autoheader);
+    }
+
+
+  # ------------------ #
+  # Running automake.  #
+  # ------------------ #
+
+  if (!$uses_automake)
+    {
+      verb "$configure_ac: not using Automake";
+    }
+  else
+    {
+      # We should always run automake, and let it decide whether it shall
+      # update the file or not.  In fact, the effect of `$force' is already
+      # included in `$automake' via `--no-force'.
+      xsystem ($automake);
+    }
+
+
+  # -------------- #
+  # Running make.  #
+  # -------------- #
+
+  if ($run_make)
+    {
+      if (!-f "config.status")
+	{
+	  verb "no config.status: cannot re-make";
+	}
+      else
+	{
+	  xsystem ("./config.status --recheck");
+	  xsystem ("./config.status");
+	  if (!-f "Makefile")
+	    {
+	      verb "no Makefile: cannot re-make";
+	    }
+	  else
+	    {
+	      xsystem ("$make");
+	    }
+	}
+    }
+}
+
+
+# &autoreconf ($DIRECTORY)
+# ------------------------
+# Reconf the $DIRECTORY.
+sub autoreconf ($)
+{
+  my ($directory) = @_;
+  my $cwd = cwd;
+
+  # The format for this message is not free: taken from Emacs, itself
+  # using GNU Make's format.
+  verb "Entering directory `$directory'";
+  chdir $directory
+    or error "cannot chdir to $directory: $!";
+
+  autoreconf_current_directory;
+
+  # The format is not free: taken from Emacs, itself using GNU Make's
+  # format.
+  verb "Leaving directory `$directory'";
+  chdir $cwd
+    or error "cannot chdir to $cwd: $!";
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+# When debugging, it is convenient that all the related temporary
+# files be at the same place.
+mktmpdir ('ar');
+$ENV{'TMPDIR'} = $tmp;
+parse_args;
+
+# Autoreconf all the given configure.ac.  Unless `--no-recursive' is passed,
+# AC_CONFIG_SUBDIRS will be traversed in &autoreconf_current_directory.
+$ENV{'AUTOM4TE'} = $autom4te;
+for my $directory (@ARGV)
+  {
+    require_configure_ac ($directory);
+    autoreconf ($directory);
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoscan b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoscan
new file mode 100755
index 0000000..57497fb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoscan
@@ -0,0 +1,679 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoscan.in; do not edit by hand.
+
+# autoscan - Create configure.scan (a preliminary configure.ac) for a package.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Configure_ac;
+use Autom4te::General;
+use Autom4te::FileUtils;
+use Autom4te::XFile;
+use File::Basename;
+use File::Find;
+use strict;
+
+use vars qw(@cfiles @makefiles @shfiles @subdirs %printed);
+
+# The kind of the words we are looking for.
+my @kinds = qw (function header identifier program
+		makevar librarie);
+
+# For each kind, the default macro.
+my %generic_macro =
+  (
+   'function'   => 'AC_CHECK_FUNCS',
+   'header'     => 'AC_CHECK_HEADERS',
+   'identifier' => 'AC_CHECK_TYPES',
+   'program'    => 'AC_CHECK_PROGS',
+   'library'    => 'AC_CHECK_LIB'
+  );
+
+my %kind_comment =
+  (
+   'function'   => 'Checks for library functions.',
+   'header'     => 'Checks for header files.',
+   'identifier' => 'Checks for typedefs, structures, and compiler characteristics.',
+   'program'    => 'Checks for programs.',
+  );
+
+# $USED{KIND}{ITEM} is the list of locations where the ITEM (of KIND) was used
+# in the user package.
+# For instance $USED{function}{alloca} is the list of `file:line' where
+# `alloca (...)' appears.
+my %used = ();
+
+# $MACRO{KIND}{ITEM} is the list of macros to use to test ITEM.
+# Initialized from lib/autoscan/*.  E.g., $MACRO{function}{alloca} contains
+# the singleton AC_FUNC_ALLOCA.  Some require several checks.
+my %macro = ();
+
+# $NEEDED_MACROS{MACRO} is an array of locations requiring MACRO.
+# E.g., $NEEDED_MACROS{AC_FUNC_ALLOC} the list of `file:line' containing
+# `alloca (...)'.
+my %needed_macros =
+  (
+   'AC_PREREQ' => [$me],
+  );
+
+my $configure_scan = 'configure.scan';
+my $log;
+
+# Autoconf and lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-cygwin/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+my @prepend_include;
+my @include = ('//share/autoconf');
+
+# $help
+# -----
+$help = "Usage: $0 [OPTION]... [SRCDIR]
+
+Examine source files in the directory tree rooted at SRCDIR, or the
+current directory if none is given.  Search the source files for
+common portability problems, check for incompleteness of
+`configure.ac', and create a file `$configure_scan' which is a
+preliminary `configure.ac' for that package.
+
+  -h, --help          print this help, then exit
+  -V, --version       print version number, then exit
+  -v, --verbose       verbosely report processing
+  -d, --debug         don't remove temporary files
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $version
+# --------
+$version = "autoscan (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+
+
+
+## ------------------------ ##
+## Command line interface.  ##
+## ------------------------ ##
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ('I|include=s' => \@include,
+	  'B|prepend-include=s' => \@prepend_include);
+
+  die "$me: too many arguments
+Try `$me --help' for more information.\n"
+    if @ARGV > 1;
+
+  my $srcdir = $ARGV[0] || ".";
+
+  verb "srcdir = $srcdir";
+  chdir $srcdir || error "cannot cd to $srcdir: $!";
+}
+
+
+# init_tables ()
+# --------------
+# Put values in the tables of what to do with each token.
+sub init_tables ()
+{
+  # The data file format supports only one line of macros per function.
+  # If more than that is required for a common portability problem,
+  # a new Autoconf macro should probably be written for that case,
+  # instead of duplicating the code in lots of configure.ac files.
+  my $file = find_file ("autoscan/autoscan.list",
+			reverse (@prepend_include), @include);
+  my $table = new Autom4te::XFile "< " . open_quote ($file);
+  my $tables_are_consistent = 1;
+
+  while ($_ = $table->getline)
+    {
+      # Ignore blank lines and comments.
+      next
+	if /^\s*$/ || /^\s*\#/;
+
+      # '<kind>: <word> <macro invocation>' or...
+      # '<kind>: <word> warn: <message>'.
+      if (/^(\S+):\s+(\S+)\s+(\S.*)$/)
+	{
+	  my ($kind, $word, $macro) = ($1, $2, $3);
+	  error "$file:$.: invalid kind: $_"
+	    unless grep { $_ eq $kind } @kinds;
+	  push @{$macro{$kind}{$word}}, $macro;
+	}
+      else
+	{
+	  error "$file:$.: invalid definition: $_";
+	}
+    }
+
+  if ($debug)
+    {
+      foreach my $kind (@kinds)
+	{
+	  foreach my $word (sort keys %{$macro{$kind}})
+	    {
+	      print "$kind: $word: @{$macro{$kind}{$word}}\n";
+	    }
+	}
+
+    }
+}
+
+
+# used ($KIND, $WORD, [$WHERE])
+# -----------------------------
+# $WORD is used as a $KIND.
+sub used ($$;$)
+{
+  my ($kind, $word, $where) = @_;
+  $where ||= "$File::Find::name:$.";
+  if (
+      # Check for all the libraries.  But `-links' is certainly a
+      # `find' argument, and `-le', a `test' argument.
+      ($kind eq 'library' && $word !~ /^(e|inks)$/)
+      # Other than libraries are to be checked only if listed in
+      # the Autoscan library files.
+      || defined $macro{$kind}{$word}
+     )
+    {
+      push (@{$used{$kind}{$word}}, $where);
+    }
+}
+
+
+
+## ----------------------- ##
+## Scanning source files.  ##
+## ----------------------- ##
+
+
+# scan_c_file ($FILE-NAME)
+# ------------------------
+sub scan_c_file ($)
+{
+  my ($file_name) = @_;
+  push @cfiles, $File::Find::name;
+
+  # Nonzero if in a multiline comment.
+  my $in_comment = 0;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      if ($in_comment && s,^.*?\*/,,)
+	{
+	  $in_comment = 0;
+	}
+      # The whole line is inside a commment.
+      next if $in_comment;
+      # All on one line.
+      s,/\*.*?\*/,,g;
+
+      # Starting on this line.
+      if (s,/\*.*$,,)
+	{
+	  $in_comment = 1;
+	}
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*//)
+	{
+	  if (/^include\s*<([^>]*)>/)
+	    {
+	      used ('header', $1);
+	    }
+	  if (s/^(if|ifdef|ifndef|elif)\s+//)
+	    {
+	      foreach my $word (split (/\W+/))
+		{
+		  used ('identifier', $word)
+		    unless $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+		}
+	    }
+	  # Ignore other preprocessor directives.
+	  next;
+	}
+
+      # Remove string and character constants.
+      s,\"[^\"]*\",,g;
+      s,\'[^\']*\',,g;
+
+      # Tokens in the code.
+      # Maybe we should ignore function definitions (in column 0)?
+      while (s/\b([a-zA-Z_]\w*)\s*\(/ /)
+	{
+	  used ('function', $1);
+	}
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('identifier', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_makefile($MAKEFILE-NAME)
+# -----------------------------
+sub scan_makefile ($)
+{
+  my ($file_name) = @_;
+  push @makefiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      s/#.*//;
+
+      # Variable assignments.
+      while (s/\b([a-zA-Z_]\w*)\s*=/ /)
+	{
+	  used ('makevar', $1);
+	}
+      # Be sure to catch a whole word.  For instance `lex$U.$(OBJEXT)'
+      # is a single token.  Otherwise we might believe `lex' is needed.
+      foreach my $word (split (/\s+/))
+	{
+	  # Libraries.
+	  if ($word =~ /^-l([a-zA-Z_]\w*)$/)
+	    {
+	      used ('library', $1);
+	    }
+	  # Tokens in the code.
+	  # We allow some additional characters, e.g., `+', since
+	  # autoscan/programs includes `c++'.
+	  if ($word =~ /^[a-zA-Z_][\w+]*$/)
+	    {
+	      used ('program', $word);
+	    }
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_sh_file($SHELL-SCRIPT-NAME)
+# --------------------------------
+sub scan_sh_file ($)
+{
+  my ($file_name) = @_;
+  push @shfiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments and variable references.
+      s/#.*//;
+      s/\${[^\}]*}//g;
+      s/@[^@]*@//g;
+
+      # Tokens in the code.
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('program', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_file ()
+# ------------
+# Called by &find on each file.  $_ contains the current file name with
+# the current directory of the walk through.
+sub scan_file ()
+{
+  # Wanted only if there is no corresponding FILE.in.
+  return
+    if -f "$_.in";
+
+  # Save $_ as Find::File requires it to be preserved.
+  local $_ = $_;
+
+  # Strip a useless leading `./'.
+  $File::Find::name =~ s,^\./,,;
+
+  if ($_ ne '.' and -d $_ and
+      -f "$_/configure.in"  ||
+      -f "$_/configure.ac"  ||
+      -f "$_/configure.gnu" ||
+      -f "$_/configure")
+    {
+      $File::Find::prune = 1;
+      push @subdirs, $File::Find::name;
+    }
+  if (/\.[chlym](\.in)?$/)
+    {
+      used 'program', 'cc', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif (/\.(cc|cpp|cxx|CC|C|hh|hpp|hxx|HH|H|yy|ypp|ll|lpp)(\.in)?$/)
+    {
+      used 'program', 'c++', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif ((/^((?:GNUm|M|m)akefile)(\.in)?$/ && ! -f "$1.am")
+	 || /^(?:GNUm|M|m)akefile(\.am)?$/)
+    {
+      scan_makefile ($_);
+    }
+  elsif (/\.sh(\.in)?$/)
+    {
+      scan_sh_file ($_);
+    }
+}
+
+
+# scan_files ()
+# -------------
+# Read through the files and collect lists of tokens in them
+# that might create nonportabilities.
+sub scan_files ()
+{
+  find (\&scan_file, '.');
+
+  if ($verbose)
+    {
+      print "cfiles: @cfiles\n";
+      print "makefiles: @makefiles\n";
+      print "shfiles: @shfiles\n";
+
+      foreach my $kind (@kinds)
+	{
+	  print "\n$kind:\n";
+	  foreach my $word (sort keys %{$used{$kind}})
+	    {
+	      print "$word: @{$used{$kind}{$word}}\n";
+	    }
+	}
+    }
+}
+
+
+## ----------------------- ##
+## Output configure.scan.  ##
+## ----------------------- ##
+
+
+# output_kind ($FILE, $KIND)
+# --------------------------
+sub output_kind ($$)
+{
+  my ($file, $kind) = @_;
+  # Lists of words to be checked with the generic macro.
+  my @have;
+
+  print $file "\n# $kind_comment{$kind}\n"
+    if exists $kind_comment{$kind};
+  foreach my $word (sort keys %{$used{$kind}})
+    {
+      # Output the needed macro invocations in $configure_scan if not
+      # already printed, and remember these macros are needed.
+      foreach my $macro (@{$macro{$kind}{$word}})
+	{
+	  if ($macro =~ /^warn:\s+(.*)/)
+	    {
+	      my $message = $1;
+	      foreach my $location (@{$used{$kind}{$word}})
+		{
+		  warn "$location: warning: $message\n";
+		}
+	    }
+	  elsif (exists $generic_macro{$kind}
+	      && $macro eq $generic_macro{$kind})
+	    {
+	      push (@have, $word);
+	      push (@{$needed_macros{"$generic_macro{$kind}([$word])"}},
+		    @{$used{$kind}{$word}});
+	    }
+	  else
+	    {
+	      if (! $printed{$macro})
+		{
+		  print $file "$macro\n";
+		  $printed{$macro} = 1;
+		}
+	      push (@{$needed_macros{$macro}},
+		    @{$used{$kind}{$word}});
+	    }
+	}
+    }
+  print $file "$generic_macro{$kind}([" . join(' ', sort(@have)) . "])\n"
+    if @have;
+}
+
+
+# output_libraries ($FILE)
+# ------------------------
+sub output_libraries ($)
+{
+  my ($file) = @_;
+
+  print $file "\n# Checks for libraries.\n";
+  foreach my $word (sort keys %{$used{'library'}})
+    {
+      print $file "# FIXME: Replace `main' with a function in `-l$word':\n";
+      print $file "AC_CHECK_LIB([$word], [main])\n";
+    }
+}
+
+
+# output ($CONFIGURE_SCAN)
+# ------------------------
+# Print a proto configure.ac.
+sub output ($)
+{
+  my $configure_scan = shift;
+  my %unique_makefiles;
+
+  my $file = new Autom4te::XFile "> " . open_quote ($configure_scan);
+
+  print $file
+    ("#                                               -*- Autoconf -*-\n" .
+     "# Process this file with autoconf to produce a configure script.\n" .
+     "\n" .
+     "AC_PREREQ([2.68])\n" .
+     "AC_INIT([FULL-PACKAGE-NAME], [VERSION], [BUG-REPORT-ADDRESS])\n");
+  if (defined $cfiles[0])
+    {
+      print $file "AC_CONFIG_SRCDIR([$cfiles[0]])\n";
+      print $file "AC_CONFIG_HEADERS([config.h])\n";
+    }
+
+  output_kind ($file, 'program');
+  output_kind ($file, 'makevar');
+  output_libraries ($file);
+  output_kind ($file, 'header');
+  output_kind ($file, 'identifier');
+  output_kind ($file, 'function');
+
+  print $file "\n";
+  if (@makefiles)
+    {
+      # Change DIR/Makefile.in to DIR/Makefile.
+      foreach my $m (@makefiles)
+	{
+	  $m =~ s/\.(?:in|am)$//;
+	  $unique_makefiles{$m}++;
+	}
+      print $file ("AC_CONFIG_FILES([",
+		   join ("\n                 ",
+			 sort keys %unique_makefiles), "])\n");
+    }
+  if (@subdirs)
+    {
+      print $file ("AC_CONFIG_SUBDIRS([",
+		   join ("\n                   ",
+			 sort @subdirs), "])\n");
+    }
+  print $file "AC_OUTPUT\n";
+
+  $file->close;
+}
+
+
+
+## --------------------------------------- ##
+## Checking the accuracy of configure.ac.  ##
+## --------------------------------------- ##
+
+
+# &check_configure_ac ($CONFIGURE_AC)
+# -----------------------------------
+# Use autoconf to check if all the suggested macros are included
+# in CONFIGURE_AC.
+sub check_configure_ac ($)
+{
+  my ($configure_ac) = @_;
+
+  # Find what needed macros are invoked in CONFIGURE_AC.
+  # I'd be very happy if someone could explain to me why sort (uniq ...)
+  # doesn't work properly: I need `uniq (sort ...)'.  --akim
+  my $trace_option =
+    join (' --trace=', '',
+	  uniq (sort (map { s/\(.*//; $_ } keys %needed_macros)));
+
+  verb "running: $autoconf $trace_option $configure_ac";
+  my $traces =
+    new Autom4te::XFile "$autoconf $trace_option $configure_ac |";
+
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($file, $line, $macro, @args) = split (/:/, $_);
+      if ($macro =~ /^AC_CHECK_(HEADER|FUNC|TYPE|MEMBER)S$/)
+	{
+	  # To be rigorous, we should distinguish between space and comma
+	  # separated macros.  But there is no point.
+	  foreach my $word (split (/\s|,/, $args[0]))
+	    {
+	      # AC_CHECK_MEMBERS wants `struct' or `union'.
+	      if ($macro eq "AC_CHECK_MEMBERS"
+		  && $word =~ /^stat.st_/)
+		{
+		  $word = "struct " . $word;
+		}
+	      delete $needed_macros{"$macro([$word])"};
+	    }
+	}
+      else
+	{
+	  delete $needed_macros{$macro};
+	}
+    }
+
+  $traces->close;
+
+  # Report the missing macros.
+  foreach my $macro (sort keys %needed_macros)
+    {
+      warn ("$configure_ac: warning: missing $macro wanted by: "
+	    . (${$needed_macros{$macro}}[0])
+	    . "\n");
+      print $log "$me: warning: missing $macro wanted by: \n";
+      foreach my $need (@{$needed_macros{$macro}})
+	{
+	  print $log "\t$need\n";
+	}
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$log = new Autom4te::XFile "> " . open_quote ("$me.log");
+
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+my $configure_ac = find_configure_ac;
+init_tables;
+scan_files;
+output ('configure.scan');
+if (-f $configure_ac)
+  {
+    check_configure_ac ($configure_ac);
+  }
+# This close is really needed.  For some reason, probably best named
+# a bug, it seems that the dtor of $LOG is not called automatically
+# at END.  It results in a truncated file.
+$log->close;
+exit 0;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoupdate b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoupdate
new file mode 100755
index 0000000..b566636
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/autoupdate
@@ -0,0 +1,1064 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoupdate.in; do not edit by hand.
+
+# autoupdate - modernize an Autoconf file.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David MacKenzie <djm@gnu.ai.mit.edu>.
+# Rewritten by Akim Demaille <akim@freefriends.org>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-cygwin/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+# We need to find m4sugar.
+my @prepend_include;
+my @include = ('//share/autoconf');
+my $force = 0;
+# m4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]...
+
+Update each TEMPLATE-FILE if given, or `configure.ac' if present,
+or else `configure.in', to the syntax of the current version of
+Autoconf.  The original files are backed up.
+
+Operation modes:
+  -h, --help                 print this help, then exit
+  -V, --version              print version number, then exit
+  -v, --verbose              verbosely report processing
+  -d, --debug                don't remove temporary files
+  -f, --force                consider all files obsolete
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoupdate (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'f|force'             => \$force);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+
+# ----------------- #
+# Autoconf macros.  #
+# ----------------- #
+
+my (%ac_macros, %au_macros, %m4_builtins);
+
+# HANDLE_AUTOCONF_MACROS ()
+# -------------------------
+# @M4_BUILTINS -- M4 builtins and a useful comment.
+sub handle_autoconf_macros ()
+{
+  # Get the builtins.
+  xsystem ("echo dumpdef | $m4 2>" . shell_quote ("$tmp/m4.defs") . " >/dev/null");
+  my $m4_defs = new Autom4te::XFile "< " . open_quote ("$tmp/m4.defs");
+  while ($_ = $m4_defs->getline)
+    {
+      $m4_builtins{$1} = 1
+	if /^(\w+):/;
+    }
+  $m4_defs->close;
+
+  my $macros = new Autom4te::XFile ("$autoconf"
+				    . " --trace AU_DEFINE:'AU:\$f:\$1'"
+				    . " --trace define:'AC:\$f:\$1'"
+				    . " --melt /dev/null |");
+  while ($_ = $macros->getline)
+    {
+      chomp;
+      my ($domain, $file, $macro) = /^(AC|AU):(.*):([^:]*)$/ or next;
+      if ($domain eq "AU")
+	{
+	  $au_macros{$macro} = 1;
+	}
+      elsif ($file =~ /(^|\/)m4sugar\/(m4sugar|version)\.m4$/)
+	{
+	  # Add the m4sugar macros to m4_builtins.
+	  $m4_builtins{$macro} = 1;
+	}
+      else
+	{
+	  # Autoconf, aclocal, and m4sh macros.
+	  $ac_macros{$macro} = 1;
+	}
+    }
+  $macros->close;
+
+
+  # Don't keep AU macros in @AC_MACROS.
+  delete $ac_macros{$_}
+    foreach (keys %au_macros);
+  # Don't keep M4sugar macros which are redefined by Autoconf,
+  # such as `builtin', `changequote' etc.  See autoconf/autoconf.m4.
+  delete $ac_macros{$_}
+    foreach (keys %m4_builtins);
+  error "no current Autoconf macros found"
+    unless keys %ac_macros;
+  error "no obsolete Autoconf macros found"
+    unless keys %au_macros;
+
+  if ($debug)
+    {
+      print STDERR "Current Autoconf macros:\n";
+      print STDERR join (' ', sort keys %ac_macros) . "\n\n";
+      print STDERR "Obsolete Autoconf macros:\n";
+      print STDERR join (' ', sort keys %au_macros) . "\n\n";
+    }
+
+  # ac.m4 -- autoquoting definitions of the AC macros (M4sugar excluded).
+  # unac.m4 -- undefine the AC macros.
+  my $ac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/ac.m4");
+  print $ac_m4 "# ac.m4 -- autoquoting definitions of the AC macros.\n";
+  my $unac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unac.m4");
+  print $unac_m4 "# unac.m4 -- undefine the AC macros.\n";
+  foreach (sort keys %ac_macros)
+    {
+      print $ac_m4   "_au_m4_define([$_], [m4_if(\$#, 0, [[\$0]], [[\$0(\$\@)]])])\n";
+      print $unac_m4 "_au_m4_undefine([$_])\n";
+    }
+
+  # m4save.m4 -- save the m4 builtins.
+  # unm4.m4 -- disable the m4 builtins.
+  # m4.m4 -- enable the m4 builtins.
+  my $m4save_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4save.m4");
+  print $m4save_m4 "# m4save.m4 -- save the m4 builtins.\n";
+  my $unm4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unm4.m4");
+  print $unm4_m4 "# unm4.m4 -- disable the m4 builtins.\n";
+  my $m4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4.m4");
+  print $m4_m4 "# m4.m4 -- enable the m4 builtins.\n";
+  foreach (sort keys %m4_builtins)
+    {
+      print $m4save_m4 "_au__save([$_])\n";
+      print $unm4_m4   "_au__undefine([$_])\n";
+      print $m4_m4     "_au__restore([$_])\n";
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --force" if $force;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+mktmpdir ('au');
+handle_autoconf_macros;
+
+# $au_changequote -- enable the quote `[', `]' right before any AU macro.
+my $au_changequote =
+  's/\b(' . join ('|', keys %au_macros) . ')\b/_au_m4_changequote([,])$1/g';
+
+# au.m4 -- definitions the AU macros.
+xsystem ("$autoconf --trace AU_DEFINE:'_au_defun(\@<:\@\$1\@:>\@,
+\@<:\@\$2\@:>\@)' --melt /dev/null "
+	. ">" . shell_quote ("$tmp/au.m4"));
+
+
+
+## ------------------- ##
+## Process the files.  ##
+## ------------------- ##
+
+foreach my $file (@ARGV)
+  {
+    # We need an actual file.
+    if ($file eq '-')
+      {
+	$file = "$tmp/stdin";
+	system "cat >" . shell_quote ($file);
+      }
+    elsif (! -r "$file")
+      {
+	die "$me: $file: No such file or directory";
+      }
+
+    # input.m4 -- m4 program to produce the updated file.
+    # Load the values, the dispatcher, neutralize m4, and the prepared
+    # input file.
+    my $input_m4 = <<\EOF;
+      divert(-1)                                            -*- Autoconf -*-
+      changequote([,])
+
+      # Define our special macros:
+      define([_au__defn], defn([defn]))
+      define([_au__divert], defn([divert]))
+      define([_au__ifdef], defn([ifdef]))
+      define([_au__include], defn([include]))
+      define([_au___undefine], defn([undefine]))
+      define([_au__undefine], [_au__ifdef([$1], [_au___undefine([$1])])])
+      define([_au__save], [m4_ifdef([$1],
+	[m4_define([_au_$1], _m4_defn([$1]))])])
+      define([_au__restore],
+	[_au_m4_ifdef([_au_$1],
+	  [_au_m4_define([$1], _au__defn([_au_$1]))])])
+
+      # Set up m4sugar.
+      include(m4sugar/m4sugar.m4)
+
+      # Redefine __file__ to make warnings nicer; $file is replaced below.
+      m4_define([__file__], [$file])
+
+      # Redefine m4_location to fix the line number.
+      m4_define([m4_location], [__file__:m4_eval(__line__ - _au__first_line)])
+
+      # Move all the builtins into the `_au_' pseudo namespace
+      m4_include([m4save.m4])
+
+      # _au_defun(NAME, BODY)
+      # ---------------------
+      # Define NAME to BODY, plus AU activation/deactivation.
+      _au_m4_define([_au_defun],
+      [_au_m4_define([$1],
+      [_au_enable()dnl
+      $2[]dnl
+      _au_disable()])])
+
+      # Import the definition of the obsolete macros.
+      _au__include([au.m4])
+
+
+      ## ------------------------ ##
+      ## _au_enable/_au_disable.  ##
+      ## ------------------------ ##
+
+      # They work by pair: each time an AU macro is activated, it runs
+      # _au_enable, and at its end its runs _au_disable (see _au_defun
+      # above).  AU macros might use AU macros, which should
+      # enable/disable only for the outer AU macros.
+      #
+      # `_au_enabled' is used to this end, determining whether we really
+      # enable/disable.
+
+
+      # __au_enable
+      # -----------
+      # Reenable the builtins, m4sugar, and the autoquoting AC macros.
+      _au_m4_define([__au_enable],
+      [_au__divert(-1)
+      # Enable special characters.
+      _au_m4_changecom([#])
+
+      _au__include([m4.m4])
+      _au__include([ac.m4])
+
+      _au__divert(0)])
+
+      # _au_enable
+      # ----------
+      # Called at the beginning of all the obsolete macros.  If this is the
+      # outermost level, call __au_enable.
+      _au_m4_define([_au_enable],
+      [_au_m4_ifdef([_au_enabled],
+		 [],
+		 [__au_enable()])_au_dnl
+      _au_m4_pushdef([_au_enabled])])
+
+
+      # __au_disable
+      # ------------
+      # Disable the AC autoquoting macros, m4sugar, and m4.
+      _au_m4_define([__au_disable],
+      [_au__divert(-1)
+      _au__include([unac.m4])
+      _au__include([unm4.m4])
+
+      # Disable special characters.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au__divert(0)])
+
+      # _au_disable
+      # -----------
+      # Called at the end of all the obsolete macros.  If we are at the
+      # outermost level, call __au_disable.
+      _au_m4_define([_au_disable],
+      [_au_m4_popdef([_au_enabled])_au_dnl
+      _au_m4_ifdef([_au_enabled],
+		[],
+		[__au_disable()])])
+
+
+      ## ------------------------------- ##
+      ## Disable, and process the file.  ##
+      ## ------------------------------- ##
+      # The AC autoquoting macros are not loaded yet, hence invoking
+      # `_au_disable' would be wrong.
+      _au__include([unm4.m4])
+
+      # Disable special characters, and set the first line number.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au_m4_define(_au__first_line, _au___line__)_au__divert(0)_au_dnl
+EOF
+
+    $input_m4 =~ s/^      //mg;
+    $input_m4 =~ s/\$file/$file/g;
+
+    # prepared input -- input, but reenables the quote before each AU macro.
+    open INPUT_M4, "> " . open_quote ("$tmp/input.m4")
+       or error "cannot open: $!";
+    open FILE, "< " . open_quote ($file)
+       or error "cannot open: $!";
+    print INPUT_M4 "$input_m4";
+    while (<FILE>)
+       {
+	 eval $au_changequote;
+	 print INPUT_M4;
+       }
+    close FILE
+       or error "cannot close $file: $!";
+    close INPUT_M4
+       or error "cannot close $tmp/input.m4: $!";
+
+    # Now ask m4 to perform the update.
+    xsystem ("$m4 --include=" . shell_quote ($tmp)
+	     . join (' --include=', '', map { shell_quote ($_) } reverse (@prepend_include))
+	     . join (' --include=', '', map { shell_quote ($_) } @include)
+	     . " " . shell_quote ("$tmp/input.m4") . " > " . shell_quote ("$tmp/updated"));
+    update_file ("$tmp/updated",
+		 "$file" eq "$tmp/stdin" ? '-' : "$file");
+  }
+exit 0;
+
+
+#		  ## ---------------------------- ##
+#		  ## How `autoupdate' functions.  ##
+#		  ## ---------------------------- ##
+#
+# The task of `autoupdate' is not trivial: the biggest difficulty being
+# that you must limit the changes to the parts that really need to be
+# updated.  Finding a satisfying implementation proved to be quite hard,
+# as this is the fifth implementation of `autoupdate'.
+#
+# Below, we will use a simple example of an obsolete macro:
+#
+#     AU_DEFUN([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))])
+#     AC_DEFUN([NEW], [echo "sum($1) = $2"])
+#
+# the input file contains
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Of course the expected output is
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0], [0])
+#
+#
+# # First implementation: sed
+# # =========================
+#
+# The first implementation was only able to change the name of obsolete
+# macros.
+#
+# The file `acoldnames.m4' defined the old names based on the new names.
+# It was simple then to produce a sed script such as:
+#
+#     s/OLD/NEW/g
+#
+# Updating merely consisted in running this script on the file to
+# update.
+#
+# This scheme suffers from an obvious limitation: that `autoupdate' was
+# unable to cope with new macros that just swap some of its arguments
+# compared to the old macro.  Fortunately, that was enough to upgrade
+# from Autoconf 1 to Autoconf 2.  (But I have no idea whether the
+# changes in Autoconf 2 were precisely limited by this constraint.)
+#
+#
+# # Second implementation: hooks
+# # ============================
+#
+# The version 2.15 of Autoconf brought a vast number of changes compared
+# to 2.13, so a solution was needed.  One could think of extending the
+# `sed' scripts with specialized code for complex macros.  However, this
+# approach is of course full of flaws:
+#
+# a. the Autoconf maintainers have to write these snippets, which we
+#    just don't want to,
+#
+# b. I really don't think you'll ever manage to handle the quoting of
+#    m4 with a sed script.
+#
+# To satisfy a., let's remark that the code which implements the old
+# features in term of the new feature is exactly the code which should
+# replace the old code.
+#
+# To answer point b, as usual in the history of Autoconf, the answer, at
+# least on the paper, is simple: m4 is the best tool to parse m4, so
+# let's use m4.
+#
+# Therefore the specification is:
+#
+#     I want to be able to tell Autoconf, well, m4, that the macro I
+#     am currently defining is an obsolete macro (so that the user is
+#     warned), and its code is the code to use when running autoconf,
+#     but that the very same code has to be used when running
+#     autoupdate.  To summarize, the interface I want is
+#     `AU_DEFUN(OLD-NAME, NEW-CODE)'.
+#
+#
+# Now for the technical details.
+#
+# When running autoconf, except for the warning, AU_DEFUN is basically
+# AC_DEFUN.
+#
+# When running autoupdate, we want *only* OLD-NAMEs to be expanded.
+# This obviously means that acgeneral.m4 and acspecific.m4 must not be
+# loaded.  Nonetheless, because we want to use a rich set of m4
+# features, m4sugar.m4 is needed.  Please note that the fact that
+# Autoconf's macros are not loaded is positive on two points:
+#
+# - we do get an updated `configure.ac', not a `configure'!
+#
+# - the old macros are replaced by *calls* to the new-macros, not the
+#   body of the new macros, since their body is not defined!!!
+#   (Whoa, that's really beautiful!).
+#
+# Additionally we need to disable the quotes when reading the input for
+# two reasons: first because otherwise `m4' will swallow the quotes of
+# other macros:
+#
+#     NEW([1, 2], 3)
+#     => NEW(1, 2, 3)
+#
+# and second, because we want to update the macro calls which are
+# quoted, i.e., we want
+#
+#     FOO([OLD(1, 2)])
+#     => FOO([NEW([1, 2], [3])])
+#
+# If we don't disable the quotes, only the macros called at the top
+# level would be updated.
+#
+# So, let's disable the quotes.
+#
+# Well, not quite: m4sugar.m4 still needs to use quotes for some macros.
+# Well, in this case, when running in autoupdate code, each macro first
+# reestablishes the quotes, expands itself, and disables the quotes.
+#
+# Thinking a bit more, you realize that in fact, people may use `define',
+# `ifelse' etc. in their files, and you certainly don't want to process
+# them.  Another example is `dnl': you don't want to remove the
+# comments.  You then realize you don't want exactly to import m4sugar:
+# you want to specify when it is enabled (macros active), and disabled.
+# m4sugar provides m4_disable/m4_enable to this end.
+#
+# You're getting close to it.  Now remains one task: how to handle
+# twofold definitions?
+#
+# Remember that the same AU_DEFUN must be understood in two different
+# ways, the AC way, and the AU way.
+#
+# One first solution is to check whether acgeneral.m4 was loaded.  But
+# that's definitely not cute.  Another is simply to install `hooks',
+# that is to say, to keep in some place m4 knows, late `define' to be
+# triggered *only* in AU mode.
+#
+# You first think of designing AU_DEFUN like this:
+#
+# 1. AC_DEFUN(OLD-NAME,
+#	      [Warn the user OLD-NAME is obsolete.
+#	       NEW-CODE])
+#
+# 2. Store for late AU binding([define(OLD_NAME,
+#				[Reestablish the quotes.
+#				 NEW-CODE
+#				 Disable the quotes.])])
+#
+# but this will not work: NEW-CODE probably uses $1, $2 etc. and these
+# guys will be replaced with the argument of `Store for late AU binding'
+# when you call it.
+#
+# I don't think there is a means to avoid this using this technology
+# (remember that $1 etc. are *always* expanded in m4).  You may also try
+# to replace them with $[1] to preserve them for a later evaluation, but
+# if `Store for late AU binding' is properly written, it will remain
+# quoted till the end...
+#
+# You have to change technology.  Since the problem is that `$1'
+# etc. should be `consumed' right away, one solution is to define now a
+# second macro, `AU_OLD-NAME', and to install a hook than binds OLD-NAME
+# to AU_OLD-NAME.  Then, autoupdate.m4 just need to run the hooks.  By
+# the way, the same method was used in autoheader.
+#
+#
+# # Third implementation: m4 namespaces by m4sugar
+# # ==============================================
+#
+# Actually, this implementation was just a clean up of the previous
+# implementation: instead of defining hooks by hand, m4sugar was equipped
+# with `namespaces'.  What are they?
+#
+# Sometimes we want to disable some *set* of macros, and restore them
+# later.  We provide support for this via namespaces.
+#
+# There are basically three characters playing this scene: defining a
+# macro in a namespace, disabling a namespace, and restoring a namespace
+# (i.e., all the definitions it holds).
+#
+# Technically, to define a MACRO in NAMESPACE means to define the macro
+# named `NAMESPACE::MACRO' to the VALUE.  At the same time, we append
+# `undefine(NAME)' in the macro named `m4_disable(NAMESPACE)', and
+# similarly a binding of NAME to the value of `NAMESPACE::MACRO' in
+# `m4_enable(NAMESPACE)'.  These mechanisms allow to bind the macro of
+# NAMESPACE and to unbind them at will.
+#
+# Of course this implementation is really inefficient: m4 has to grow
+# strings which can become quickly huge, which slows it significantly.
+#
+# In particular one should avoid as much as possible to use `define' for
+# temporaries.  Now that `define' has quite a complex meaning, it is an
+# expensive operations that should be limited to macros.  Use
+# `m4_define' for temporaries.
+#
+# Private copies of the macros we used in entering / exiting the m4sugar
+# namespace.  It is much more convenient than fighting with the renamed
+# version of define etc.
+#
+#
+#
+# Those two implementations suffered from serious problems:
+#
+# - namespaces were really expensive, and incurred a major performance
+#   loss on `autoconf' itself, not only `autoupdate'.  One solution
+#   would have been the limit the use of namespaces to `autoupdate', but
+#   that's again some complications on m4sugar, which really doesn't need
+#   this.  So we wanted to get rid of the namespaces.
+#
+# - since the quotes were disabled, autoupdate was sometimes making
+#   wrong guesses, for instance on:
+#
+#     foo([1, 2])
+#
+#   m4 saw 2 arguments: `[1'and `2]'.  A simple solution, somewhat
+#   fragile, is to reestablish the quotes right before all the obsolete
+#   macros, i.e., to use sed so that the previous text becomes
+#
+#     changequote([, ])foo([1, 2])
+#
+#   To this end, one wants to trace the definition of obsolete macros.
+#
+# It was there that the limitations of the namespace approach became
+# painful: because it was a complex machinery playing a lot with the
+# builtins of m4 (hence, quite fragile), tracing was almost impossible.
+#
+#
+# So this approach was dropped.
+#
+#
+# # The fourth implementation: two steps
+# # ====================================
+#
+# If you drop the uses of namespaces, you no longer can compute the
+# updated value, and replace the old call with it simultaneously.
+#
+# Obviously you will use m4 to compute the updated values, but you may
+# use some other tool to achieve the replacement.  Personally, I trust
+# nobody but m4 to parse m4, so below, m4 will perform the two tasks.
+#
+# How can m4 be used to replace *some* macros calls with newer values.
+# Well, that's dead simple: m4 should learn the definitions of obsolete
+# macros, forget its builtins, disable the quotes, and then run on the
+# input file, which amounts to doing this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# which will result in
+#
+#     dnl The Unbelievable Truth
+#     NEW(1, 2, m4_eval(1 + 2))
+#     NEW([0, 0],
+#	  0)
+#
+# Grpmh.  Two problems.  A minor problem: it would have been much better
+# to have the `m4_eval' computed, and a major problem: you lost the
+# quotation in the result.
+#
+# Let's address the big problem first.  One solution is to define any
+# modern macro to rewrite its calls with the proper quotation, thanks to
+# `$@'.  Again, tracing the `define's makes it possible to know which
+# are these macros, so you input is:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     define([NEW], [[NEW($@)]changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     changequote([, ])NEW([0, 0],
+#	  0)
+#
+# which results in
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2],[m4_eval(1 + 2)])
+#     NEW([0, 0],[0])
+#
+# Our problem is solved, i.e., the first call to `NEW' is properly
+# quoted, but introduced another problem: we changed the layout of the
+# second calls, which can be a drama in the case of huge macro calls
+# (think of `AC_TRY_RUN' for instance).  This example didn't show it,
+# but we also introduced parens to macros which did not have some:
+#
+#     AC_INIT
+#     => AC_INIT()
+#
+# No big deal for the semantics (unless the macro depends upon $#, which
+# is bad), but the users would not be happy.
+#
+# Additionally, we introduced quotes that were not there before, which is
+# OK in most cases, but could change the semantics of the file.
+#
+# Cruel dilemma: we do want the auto-quoting definition of `NEW' when
+# evaluating `OLD', but we don't when we evaluate the second `NEW'.
+# Back to namespaces?
+#
+# No.
+#
+#
+# # Second step: replacement
+# # ------------------------
+#
+# No, as announced above, we will work in two steps: in a first step we
+# compute the updated values, and in a second step we replace them.  Our
+# goal is something like this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([1, 2], [3])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., the new value of `OLD' is precomputed using the auto-quoting
+# definition of `NEW' and the m4 builtins.  We'll see how afterwards,
+# let's finish with the replacement.
+#
+# Of course the solution above is wrong: if there were other calls to
+# `OLD' with different values, we would smash them to the same value.
+# But it is quite easy to generalize the scheme above:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+#     define([OLD], [defn([OLD($@)])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., for each call to obsolete macros, we build an array `call =>
+# value', and use a macro to dispatch these values.  This results in:
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0],
+#	  0)
+#
+# In French, we say `Youpi !', which you might roughly translate as
+# `Yippee!'.
+#
+#
+# # First step: computation
+# # -----------------------
+#
+# Let's study the anatomy of the file, and name its sections:
+#
+# prologue
+#     divert(-1)dnl
+#     changequote([, ])
+# values
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+# dispatcher
+#     define([OLD], [defn([OLD($@)])changequote()])
+# disabler
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+# input
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+#
+# # Computing the `values' section
+# # ..............................
+#
+# First we need to get the list of all the AU macro uses.  To this end,
+# first get the list of all the AU macros names by tracing `AU_DEFUN' in
+# the initialization of autoconf.  This list is computed in the file
+# `au.txt' below.
+#
+# Then use this list to trace all the AU macro uses in the input.  The
+# goal is obtain in the case of our example:
+#
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# This is the file `values.in' below.
+#
+# We want to evaluate this with only the builtins (in fact m4sugar), the
+# auto-quoting definitions of the new macros (`new.m4'), and the
+# definition of the old macros (`old.m4').  Computing these last two
+# files is easy: it's just a matter of using the right `--trace' option.
+#
+# So the content of `values.in' is:
+#
+#     include($autoconf_dir/m4sugar.m4)
+#     m4_include(new.m4)
+#     m4_include(old.m4)
+#     divert(0)dnl
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# We run m4 on it, which yields:
+#
+#     define([OLD([1],[2])],@<<@NEW([1, 2], [3])@>>@)
+#
+# Transform `@<<@' and `@>>@' into quotes and we get
+#
+#     define([OLD([1],[2])],[NEW([1, 2], [3])])
+#
+# This is `values.m4'.
+#
+#
+# # Computing the `dispatcher' section
+# # ..................................
+#
+# The `prologue', and the `disabler' are simple and need no commenting.
+#
+# To compute the `dispatcher' (`dispatch.m4'), again, it is a simple
+# matter of using the right `--trace'.
+#
+# Finally, the input is not exactly the input file, rather it is the
+# input file with the added `changequote'.  To this end, we build
+# `quote.sed'.
+#
+#
+# # Putting it all together
+# # .......................
+#
+# We build the file `input.m4' which contains:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     include(values.m4)
+#     include(dispatch.m4)
+#     undefine([dnl])
+#     undefine([eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# And we just run m4 on it.  Et voila`, Monsieur !  Mais oui, mais oui.
+#
+# Well, there are a few additional technicalities.  For instance, we
+# rely on `changequote', `ifelse' and `defn', but we don't want to
+# interpret the changequotes of the user, so we simply use another name:
+# `_au_changequote' etc.
+#
+#
+# # Failure of the fourth approach
+# # ------------------------------
+#
+# This approach is heavily based on traces, but then there is an obvious
+# problem: non expanded code will never be seen.  In particular, the body
+# of a `define' definition is not seen, so on the input
+#
+#	  define([idem], [OLD(0, [$1])])
+#
+# autoupdate would never see the `OLD', and wouldn't have updated it.
+# Worse yet, if `idem(0)' was used later, then autoupdate sees that
+# `OLD' is used, computes the result for `OLD(0, 0)' and sets up a
+# dispatcher for `OLD'.  Since there was no computed value for `OLD(0,
+# [$1])', the dispatcher would have replaced with... nothing, leading
+# to
+#
+#	  define([idem], [])
+#
+# With some more thinking, you see that the two step approach is wrong,
+# the namespace approach was much saner.
+#
+# But you learned a lot, in particular you realized that using traces
+# can make it possible to simulate namespaces!
+#
+#
+#
+# # The fifth implementation: m4 namespaces by files
+# # ================================================
+#
+# The fourth implementation demonstrated something unsurprising: you
+# cannot precompute, i.e., the namespace approach was the right one.
+# Still, we no longer want them, they're too expensive.  Let's have a
+# look at the way it worked.
+#
+# When updating
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# you evaluate `input.m4':
+#
+#     divert(-1)
+#     changequote([, ])
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#     ...
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# where `m4_disable' undefines the m4 and m4sugar, and disables the quotes
+# and comments:
+#
+#     define([m4_disable],
+#     [undefine([__file__])
+#     ...
+#     changecom(#)
+#     changequote()])
+#
+# `m4_enable' does the converse: reestablish quotes and comments
+# --easy--, reestablish m4sugar --easy: just load `m4sugar.m4' again-- and
+# reenable the builtins.  This later task requires that you first save
+# the builtins.  And BTW, the definition above of `m4_disable' cannot
+# work: you undefined `changequote' before using it!  So you need to use
+# your privates copies of the builtins.  Let's introduce three files for
+# this:
+#
+#  `m4save.m4'
+#    moves the m4 builtins into the `_au_' pseudo namespace,
+#  `unm4.m4'
+#    undefines the builtins,
+#  `m4.m4'
+#    restores them.
+#
+# So `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#
+#     # Import AU.
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)])
+#
+#     define([_au_disable],
+#     [# Disable m4sugar.
+#     # Disable the m4 builtins.
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Based on what we learned in the fourth implementation we know that we
+# have to enable the quotes *before* any AU macro, and we know we need
+# to build autoquoting versions of the AC macros.  But the autoquoting
+# AC definitions must be disabled in the rest of the file, and enabled
+# inside AU macros.
+#
+# Using `autoconf --trace' it is easy to build the files
+#
+#   `ac.m4'
+#     define the autoquoting AC fake macros
+#   `disable.m4'
+#     undefine the m4sugar and AC autoquoting macros.
+#   `au.m4'
+#     definitions of the AU macros (such as `OLD' above).
+#
+# Now, `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#     # Import AU.
+#     include([au.m4])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)
+#     _au_include(ac.m4)])
+#
+#     define([_au_disable],
+#     [_au_include([disable.m4])
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     _au_changequote([, ])OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Finally, version V is ready.
+#
+# Well... almost.
+#
+# There is a slight problem that remains: if an AU macro OUTER includes
+# an AU macro INNER, then _au_enable will be run when entering OUTER
+# and when entering INNER (not good, but not too bad yet).  But when
+# getting out of INNER, _au_disable will disable everything while we
+# were still in OUTER.  Badaboom.
+#
+# Therefore _au_enable and _au_disable have to be written to work by
+# pairs: each _au_enable pushdef's _au_enabled, and each _au_disable
+# popdef's _au_enabled.  And of course _au_enable and _au_disable are
+# effective when _au_enabled is *not* defined.
+#
+# Finally, version V' is ready.  And there is much rejoicing.  (And I
+# have free time again.  I think.  Yeah, right.)
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/cygltdl-7.dll b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/cygltdl-7.dll
new file mode 100755
index 0000000..b7ab1dc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/cygltdl-7.dll
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/ifnames b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/ifnames
new file mode 100755
index 0000000..69bffb2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/ifnames
@@ -0,0 +1,153 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from ifnames.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# ifnames - print the identifiers used in C preprocessor conditionals
+
+# Copyright (C) 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Reads from stdin if no files are given.
+# Writes to stdout.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>
+# and Paul Eggert <eggert@twinsun.com>.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::General;
+use Autom4te::XFile;
+use Autom4te::FileUtils;
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILE]...
+
+Scan all of the C source FILES (or the standard input, if none are
+given) and write to the standard output a sorted list of all the
+identifiers that appear in those files in `#if', `#elif', `#ifdef', or
+`#ifndef' directives.  Print each identifier on a line, followed by a
+space-separated list of the files in which that identifier occurs.
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "ifnames (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Paul Eggert.
+";
+
+
+# &parse_args ()
+# --------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ();
+}
+
+
+# %OCCURRENCE
+# -----------
+my %occurrence;
+
+
+# &scan_file ($FILE-NAME)
+# -----------------------
+sub scan_file ($)
+{
+  my ($file_name) = @_;
+  my $file = new Autom4te::XFile ("< " . open_quote ($file_name));
+  while ($_ = $file->getline)
+    {
+      # Continuation lines.
+      $_ .= $file->getline
+	while (s/\\$//);
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*(if|ifdef|ifndef|elif)\s+//)
+	{
+	  # Remove comments.  Not perfect, but close enough.
+	  s(/\*.*?\*/)();
+	  s(/\*.*)();
+	  s(//.*)();
+	  foreach my $word (split (/\W+/))
+	    {
+	      next
+		if $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+	      $occurrence{$word}{$file_name} = 1;
+	    }
+	}
+    }
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+parse_args();
+foreach (@ARGV)
+  {
+    scan_file ($_);
+  }
+foreach (sort keys %occurrence)
+  {
+    print "$_ ", join (' ', sort keys %{$occurrence{$_}}), "\n";
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/libtoolize b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/libtoolize
new file mode 100755
index 0000000..9613421
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/libtoolize
@@ -0,0 +1,2555 @@
+#! /bin/sh
+# Generated from libtoolize.m4sh by GNU Autoconf 2.68.
+
+# libtoolize (GNU libtool) 2.4.2
+# Written by Gary V. Vaughan <gary@gnu.org>, 2003
+
+# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
+# Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# Libtoolize is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Libtoolize is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with libtoolize; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]...
+#
+# Prepare a package to use libtool.
+#
+#   -c, --copy          copy files rather than symlinking them
+#       --debug         enable verbose shell tracing
+#   -n, --dry-run       print commands rather than running them
+#   -f, --force         replace existing files
+#   -i, --install       copy missing auxiliary files
+#       --ltdl[=DIR]    install libltdl sources [default: libltdl]
+#       --no-warn       don't display warning messages
+#       --nonrecursive  prepare ltdl for non-recursive make
+#   -q, --quiet         work silently
+#       --recursive     prepare ltdl for recursive make
+#       --subproject    prepare ltdl to configure and build independently
+#   -v, --verbose       verbosely report processing
+#       --version       print version information and exit
+#   -h, --help          print short or long help message
+#
+# The following space or comma delimited options can be passed to $progname
+# via the environment variable LIBTOOLIZE_OPTIONS, unknown environment
+# options are ignored:
+#
+#   --debug             enable verbose shell tracing
+#   --no-warn           don't display warning messages
+#   --quiet             work silently
+#   --verbose           verbosely report processing
+#
+# You must `cd' to the top directory of your package before you run
+# `$progname'.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#       host-triplet:	i686-pc-cygwin
+#       $progname:	(GNU libtool) 2.4.2
+#       automake:		$automake_version
+#       autoconf:		$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+: ${TAR=tar}
+
+PROGRAM=libtoolize
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="/usr/bin/grep -E"}
+: ${FGREP="/usr/bin/grep -F"}
+: ${GREP="/usr/bin/grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="/usr/bin/sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+# ltdl can be installed to be self-contained (subproject, the default);
+# or to be configured by a parent project, either with a recursive or
+# nonrecursive automake driven make:
+ltdl_mode=
+
+# Locations for important files:
+ltdldir=
+
+# Parse environment options
+{
+  my_sed_env_opt='1s/^\([^,:; ]*\).*$/\1/;q'
+  my_sed_env_rest='1s/^[^,:; ]*[,:; ]*\(.*\)$/\1/;q'
+
+  while test -n "$LIBTOOLIZE_OPTIONS"; do
+    opt=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_opt"`
+    LIBTOOLIZE_OPTIONS=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_rest"`
+
+    case $opt in
+      --debug|--no-warn|--quiet|--verbose)
+		envopts="${envopts+$envopts }$opt"			  ;;
+      --*)	env_warning="${env_warning+$env_warning
+}unrecognized environment option \`$opt'" 				  ;;
+      *)	func_fatal_help "garbled LIBTOOLIZE_OPTIONS near \`$opt'" ;;
+    esac
+  done
+
+  test -n "$envopts" && {
+    func_quote_for_eval "$envopts"
+    eval set dummy "$func_quote_for_eval_result" ${1+"$@"}
+    shift
+  }
+}
+
+
+
+# Option defaults:
+opt_debug=:
+opt_copy=false
+opt_force=false
+opt_install=false
+opt_dry_run=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+opt_nonrecursive=false
+opt_recursive=false
+opt_standalone=false
+opt_ltdl="false"
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --copy|-c)
+			opt_copy=:
+			;;
+      --force|-f)
+			opt_force=:
+			;;
+      --install|-i)
+			opt_install=:
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+CP="func_echo_all $CP"
+test -n "$LN_S" && LN_S="func_echo_all $LN_S"
+MKDIR="func_echo_all $MKDIR"
+RM="func_echo_all $RM"
+TAR="func_echo_all $TAR"
+			;;
+      --quiet|--automake|-q)
+			opt_quiet=:
+			;;
+      --verbose|-v)
+			opt_verbose=:
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+			;;
+      --nonrecursive|--non-recursive)
+			opt_nonrecursive=:
+			;;
+      --recursive)
+			opt_recursive=:
+			;;
+      --standalone)
+			opt_standalone=:
+			;;
+      --ltdl)
+			optarg="$1"
+			if test $# -gt 0; then
+			    case $optarg in # ((
+			        -*) ;;
+			        *) opt_ltdl="$optarg"; shift ;;
+			    esac
+			fi
+# This is tricky, since we're overloading $opt_ltdl to be the
+# optarg for --ltdl during option processing, but then stashing
+# the (optional) optarg in $ltdldir and reusing $opt_ltdl to
+# indicate that --ltdl was seen during option processing.  Also,
+# be careful that --ltdl=foo --ltdl=bar results in ltdldir=bar:
+case $opt_ltdl in
+          false|:) ;;  # a bare '--ltdl' followed by another option
+  *)       ltdldir=`$ECHO "$optarg" | $SED 's,/*$,,'` ;;
+esac
+opt_ltdl=:
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-c*|-f*|-i*|-n*|-q*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # show any warnings saved by LIBTOOLIZE_OPTIONS parsing
+  test -n "$env_warning" &&
+    echo "$env_warning" |while read line; do func_warning "$line"; done
+
+  # validate $opt_nonrecursive, $opt_recursive and $opt_standalone
+  if $opt_nonrecursive; then
+    if $opt_recursive || $opt_standalone; then
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    fi
+    ltdl_mode=nonrecursive
+  elif $opt_recursive; then
+    $opt_standalone &&
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    ltdl_mode=recursive
+  elif $opt_standalone; then
+    ltdl_mode=standalone
+  fi
+
+  # any remaining arguments are an error
+  test $# -gt 0 &&
+    func_fatal_help "unknown additional arguments: \`${1+}'"
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+# func_echo_once msg_var
+# Calls func_echo with the value of MSG_VAR, and then sets MSG_VAR="" so
+# that subsequent calls have no effect.
+func_echo_once ()
+{
+    $opt_debug
+    if test -n "$1"; then
+      eval my_msg=\$$1
+
+      if test -n "$my_msg"; then
+        func_echo "$my_msg"
+        eval $1=""
+      fi
+    fi
+}
+
+
+# func_copy srcfile destfile [msg_var]
+# A wrapper for func_copy_cb that accepts arguments in the same order
+# as the cp(1) shell command.
+func_copy ()
+{
+    $opt_debug
+
+    test -f "$1" || \
+      { func_error "\`$1' not copied:  not a regular file"; return 1; }
+
+    func_dirname_and_basename "$1"
+    my_f1=$func_basename_result
+
+    if test -d "$2"; then
+
+      func_copy_cb "$my_f1" \
+	`$ECHO "$1" | $SED "$dirname"` "$2" "$3"
+
+    else
+
+      # Supporting this would mean changing the timestamp:
+      func_dirname_and_basename "$2"
+      my_tname=$func_basename_result
+      test "X$my_f1" = "X$my_tname" \
+        || func_fatal_error "func_copy() cannot change filename on copy"
+
+      func_copy_cb "$my_f1" \
+        `$ECHO "$1" | $SED "$dirname"` \
+        `$ECHO "$2" | $SED "$dirname"` \
+	"$3"
+
+    fi
+
+    return $copy_return_status # set in func_copy_cb
+}
+
+
+# func_copy_cb filename srcdir destdir [msg_var]
+# If option `--copy' was specified, or soft-linking SRCFILE to DESTFILE fails,
+# then try to copy SRCFILE to DESTFILE (without changing the timestamp if
+# possible).
+func_copy_cb ()
+{
+    $opt_debug
+    my_file="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    copy_return_status=1
+
+    # Libtool is probably misinstalled if this happens:
+    test -f "$my_srcdir/$my_file" ||
+        func_fatal_error "\`$my_file' not found in \`$my_srcdir'"
+
+    case $opt_verbose in
+      false) my_copy_msg="file \`$my_destdir/$my_file'"     ;;
+      *)     my_copy_msg="file from \`$my_srcdir/$my_file'" ;;
+    esac
+    func_mkdir_p `$ECHO "$my_destdir/$my_file" | $SED "$dirname"`
+
+    $RM "$my_destdir/$my_file"
+    if $opt_copy; then
+      if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+           | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1; } \
+	 && touch "$my_destdir/$my_file"; then
+	$opt_quiet || func_echo_once "$my_msg_var"
+	$opt_quiet || func_echo "copying $my_copy_msg"
+	copy_return_status=0
+      fi
+    else
+      if test "$my_file" = "aclocal.m4"; then
+	if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+	     | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1 ; }
+	then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "copying $my_copy_msg"
+	  copy_return_status=0
+	fi
+      else
+	if $LN_S "$my_srcdir/$my_file" "$my_destdir/$my_file"; then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "linking $my_copy_msg"
+	  copy_return_status=0
+	fi
+      fi
+    fi
+    if test "$copy_return_status" != 0; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      func_error "can not copy \`$my_srcdir/$my_file' to \`$my_destdir/'"
+      exit_status=$EXIT_FAILURE
+    fi
+}
+
+
+# func_copy_some_files srcfile_spec srcdir destdir [msg_var] [cb=func_copy_cb]
+# Call COPY_CB for each regular file in SRCDIR named by the ':' delimited
+# names in SRCFILE_SPEC.  The odd calling convention is needed to allow
+# spaces in file and directory names.
+func_copy_some_files ()
+{
+    $opt_debug
+    my_srcfile_spec="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_copy_cb="${5-func_copy_cb}"
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for my_filename in $my_srcfile_spec; do
+      IFS="$my_save_IFS"
+      if test -f "$my_srcdir/$my_filename"; then
+        if test "X$my_copy_cb" = Xfunc_copy_cb; then
+	  $opt_force || if test -f "$my_destdir/$my_filename"; then
+	    $opt_quiet || func_echo_once "$my_msg_var"
+	    $opt_quiet \
+	      || func_error "\`$my_destdir/$my_filename' exists: use \`--force' to overwrite"
+	    continue
+	  fi
+        fi
+      else
+	func_echo_once "$my_msg_var"
+	func_fatal_error "\`$my_filename' not found in \`$my_srcdir'"
+      fi
+
+      $my_copy_cb "$my_filename" "$my_srcdir" "$my_destdir" "$my_msg_var"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_fixup_Makefile srcfile srcdir destdir
+func_fixup_Makefile ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_fixup_non_subpackage_script="\
+      s,(LIBOBJS),(ltdl_LIBOBJS),g
+      s,(LTLIBOBJS),(ltdl_LTLIBOBJS),g
+      s,libltdl/configure.ac,,
+      s,libltdl/configure,,
+      s,libltdl/aclocal.m4,,
+      s,libltdl/config-h.in,,
+      s,libltdl/Makefile.am,,
+      s,libltdl/Makefile.in,,
+      /^[	 ]*\\\\\$/d"
+    case $my_filename in
+      Makefile.am)
+	my_fixup_non_subpackage_script=`echo "$my_fixup_non_subpackage_script" | \
+		sed 's,libltdl/,,'`
+	my_fixup_inc_paths_script= ;;
+      Makefile.inc)
+	repl=$ltdldir
+	repl_uscore=`$ECHO "$repl" | $SED 's,[/.+-],_,g'`
+	my_fixup_inc_paths_script="\
+	  s,libltdl_,@repl_uscore@_,
+	  s,libltdl/,@repl@/,
+	  s,: libltdl/,: @repl@/,
+	  s, -Ilibltdl , -I@repl@ ,
+	  s,\\\$(libltdl_,\$(@repl_uscore@_,
+	  s,)/libltdl ,)/@repl@ ,
+	  s,@repl_uscore@,${repl_uscore},g
+	  s,@repl@,${repl},g"
+	;;
+    esac
+
+    $RM "$my_destdir/$my_filename" 2>/dev/null
+    $opt_quiet || func_echo "creating file \`$my_destdir/$my_filename'"
+    if $opt_dry_run; then :;
+    else
+      $SED "$my_fixup_non_subpackage_script
+	    $my_fixup_inc_paths_script" \
+	< "$my_srcdir/$my_filename" > "$my_destdir/$my_filename" ||
+	func_fatal_error "cannot create $my_destdir/$my_filename"
+    fi
+}
+
+# func_scan_files
+# Scan configure.(ac|in) and aclocal.m4 (if present) for use of libltdl
+# and libtool.  Possibly running some of these tools if necessary.
+# Libtoolize affects the contents of aclocal.m4, and should be run before
+# aclocal, so we can't use configure --trace which relies on a consistent
+# configure.(ac|in) and aclocal.m4.
+func_scan_files ()
+{
+    $opt_debug
+    # Prefer configure.ac to configure.in
+    test -f configure.ac && configure_ac=configure.ac
+    test -f "$configure_ac" || configure_ac=
+
+    # Set local variables to reflect contents of configure.ac
+    my_sed_scan_configure_ac='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,;
+	/AC_INIT/ {
+	    s,^.*$,seen_autoconf=:,
+	    p
+	}
+	d'
+    test -z "$configure_ac" \
+        || eval `$SED "$my_sed_scan_configure_ac" "$configure_ac"`
+
+    $seen_autoconf || {
+	my_configure_ac=
+	test -n "$configure_ac" && my_configure_ac="$configure_ac: "
+        func_verbose "${my_configure_ac}not using Autoconf"
+
+	# Make sure ltdldir and ltdl_mode have sensible defaults
+        # since we return early here:
+	test -n "$ltdldir" || ltdldir=libltdl
+	test -n "$ltdl_mode" || ltdl_mode=subproject
+
+	return
+    }
+
+    # ---------------------------------------------------- #
+    # Probe macro usage in configure.ac and/or aclocal.m4. #
+    # ---------------------------------------------------- #
+
+    my_sed_traces='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,
+        s,^.*AC_REQUIRE(.*$,,; s,^.*m4_require(.*$,,;
+	s,^.*m4_define(.*$,,
+	s,^.*A[CU]_DEFUN(.*$,,; s,^.*m4_defun(.*$,,
+	/AC_CONFIG_AUX_DIR(/ {
+	    s,^.*AC_CONFIG_AUX_DIR([[	 ]*\([^])]*\).*$,ac_auxdir=\1,
+	    p
+        }
+	/AC_CONFIG_MACRO_DIR(/ {
+	    s,^.*AC_CONFIG_MACRO_DIR([[	 ]*\([^])]*\).*$,ac_macrodir=\1,
+	    p
+        }
+	/_LT_CONFIG_LTDL_DIR(/d
+	/LT_CONFIG_LTDL_DIR(/ {
+	    s,^.*LT_CONFIG_LTDL_DIR([[	 ]*\([^])]*\).*$,ac_ltdldir=\1,
+	    p
+	}
+	/\[A[CM]_PROG_LIBTOOL/d
+	/A[CM]_PROG_LIBTOOL/ {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/the.*option into.*LT_INIT.*parameter/d
+	/\[LT_INIT/d
+	/LT_INIT/		 {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/\[LTDL_INIT/d
+	/LTDL_INIT/          {
+	    s,^.*LTDL_INIT([[	 ]*\([^])]*\).*$,ltdl_options="\1",
+	    s,^.*LTDL_INIT[	 ]*$,seen_ltdl=:,
+	    p
+	}
+	/LT_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_LIB_LTDL/        {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	d'
+    eval `cat aclocal.m4 "$configure_ac" 2>/dev/null | $SED "$my_sed_traces"`
+
+
+    # ----------------- #
+    # Validate ltdldir. #
+    # ----------------- #
+
+    ac_ltdldir=`$ECHO "$ac_ltdldir" | $SED 's,/*$,,'`
+
+    # If $configure_ac contains AC_CONFIG_LTDL_DIR, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ac_ltdldir" in
+      *\$*)
+        func_fatal_error "can not handle variables in LT_CONFIG_LTDL_DIR"
+        ;;
+    esac
+
+    # If neither --ltdl nor LT_CONFIG_LTDL_DIR are specified, default to
+    # `libltdl'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given! (If LT_CONFIG_LTDL_DIR is not specified
+    # we suggest adding it later in this code.)
+    case x$ac_ltdldir,x$ltdldir in
+      x,x)	ltdldir=libltdl		;;
+      x*,x)	ltdldir=$ac_ltdldir	;;
+      x,x*)	ltdldir=$ltdldir	;;
+      *)
+        test x"$ac_ltdldir" = x"$ltdldir" || \
+	    func_fatal_error "--ltdl='$ltdldir' does not match LT_CONFIG_LTDL_DIR($ac_ltdldir)"
+	;;
+    esac
+
+
+    # ------------------- #
+    # Validate ltdl_mode. #
+    # ------------------- #
+
+    test -n "$ltdl_options" && seen_ltdl=:
+
+    # If $configure_ac contains LTDL_INIT, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ltdl_options" in
+      *\$*)
+        func_fatal_error "can not handle variables in LTDL_INIT"
+        ;;
+    esac
+
+    # Extract mode name from ltdl_options
+    # FIXME: Diagnose multiple conflicting modes in ltdl_options
+    ac_ltdl_mode=
+    case " $ltdl_options " in
+      *" nonrecursive "*)  ac_ltdl_mode=nonrecursive	;;
+      *" recursive "*)     ac_ltdl_mode=recursive	;;
+      *" subproject "*)    ac_ltdl_mode=subproject	;;
+    esac
+
+    # If neither --ltdl nor an LTDL_INIT mode are specified, default to
+    # `subproject'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given!
+    case x$ac_ltdl_mode,x$ltdl_mode in
+      x,x)	ltdl_mode=subproject	;;
+      x*,x)	ltdl_mode=$ac_ltdl_mode	;;
+      x,x*)	ltdl_mode=$ltdl_mode	;;
+      *)
+        test x"$ac_ltdl_mode" = x"$ltdl_mode" || \
+	    func_fatal_error "--$ltdl_mode does not match LTDL_INIT($ac_ltdl_mode)"
+	;;
+    esac
+
+    # ---------------- #
+    # Validate auxdir. #
+    # ---------------- #
+
+    if test -n "$ac_auxdir"; then
+      # If $configure_ac contains AC_CONFIG_AUX_DIR, check that it was
+      # not given in terms of a shell variable!
+      case "$ac_auxdir" in
+      *\$*)
+        func_fatal_error "can not handle variables in AC_CONFIG_AUX_DIR"
+        ;;
+      *)
+	auxdir=$ac_auxdir
+	;;
+      esac
+    else
+      # Try to discover auxdir the same way it is discovered by configure.
+      # Note that we default to the current directory.
+      for dir in . .. ../..; do
+        if test -f "$dir/install-sh"; then
+          auxdir=$dir
+          break
+        elif test -f "$dir/install.sh"; then
+          auxdir="$dir"
+          break
+        fi
+      done
+    fi
+
+    # Just use the current directory if all else fails.
+    test -n "$auxdir" || auxdir=.
+
+
+    # ------------------------------ #
+    # Find local m4 macro directory. #
+    # ------------------------------ #
+
+    # Hunt for ACLOCAL_AMFLAGS in `Makefile.am' for a `-I' argument.
+
+    my_sed_aclocal_flags='
+        /^[	 ]*ACLOCAL_[A-Z_]*FLAGS[	 ]*=[	 ]*/ {
+	    s,,,
+	    q
+	}
+	d'
+    if test -f Makefile.am; then
+      my_macrodir_is_next=false
+      for arg in `$SED "$my_sed_aclocal_flags" Makefile.am`; do
+        if $my_macrodir_is_next; then
+          am_macrodir="$arg"
+          break
+        else
+	  case $arg in
+	    -I) my_macrodir_is_next=: ;;
+	    -I*)
+	      am_macrodir=`$ECHO "$arg" | sed 's,^-I,,'`
+	      break
+	      ;;
+	    *) my_macrodir_is_next=false ;;
+	  esac
+        fi
+      done
+    fi
+
+    macrodir="$ac_macrodir"
+    test -z "$macrodir" && macrodir="$am_macrodir"
+
+    if test -n "$am_macrodir" && test -n "$ac_macrodir"; then
+      test "$am_macrodir" = "$ac_macrodir" \
+        || func_fatal_error "AC_CONFIG_MACRO_DIR([$ac_macrodir]) conflicts with ACLOCAL_AMFLAGS=-I $am_macrodir."
+    fi
+}
+
+# func_included_files searchfile
+# Output INCLUDEFILE if SEARCHFILE m4_includes it, else output SEARCHFILE.
+func_included_files ()
+{
+    $opt_debug
+    my_searchfile="$1"
+
+    my_include_regex=
+    my_sed_include='
+        /^m4_include(\[.*\])$/ {
+	    s,^m4_include(\[\(.*\)\])$,\1,
+	    p
+	}
+        d'
+
+    if test -f "$my_searchfile"; then
+      $ECHO "$my_searchfile"
+
+      # Only recurse when we don't care if all the variables we use get
+      # trashed, since they are in global scope.
+      for my_filename in `$SED "$my_sed_include" "$my_searchfile"`; do
+	func_included_files $my_filename
+      done
+    fi
+}
+
+
+# func_serial filename [macro_regex]
+# Output the value of the serial number comment in FILENAME, where the
+# comment line must also match MACRO_REGEX, if given.
+func_serial ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_macro_regex="$2"
+    my_sed_serial='
+	/^# serial [1-9][0-9.]*[	 ]*'"$my_macro_regex"'[	 ]*$/ {
+	    s,^# serial \([1-9][0-9.]*\).*$,\1,
+	    q
+	}
+	d'
+
+    # Search FILENAME and all the files it m4_includes for a serial number
+    # in the file that AC_DEFUNs MACRO_REGEX.
+    my_serial=
+    func_dirname_and_basename "$my_filename"
+    my_filebase=$func_basename_result
+    for my_file in `func_included_files "$my_filename"`; do
+      if test -z "$my_macro_regex" ||
+         test "$my_filename" = aclocal.m4 ||
+         test "X$my_macro_regex" = "X$my_filebase" ||
+         func_grep '^AC_DEFUN(\['"$my_macro_regex" "$my_file"
+      then
+        my_serial=`$SED -e "$my_sed_serial" "$my_file"`
+	break
+      fi
+    done
+
+    # If the file has no serial number, we assume it's ancient.
+    test -n "$my_serial" || my_serial=0
+
+    $ECHO "$my_serial"
+}
+
+
+# func_serial_max serial1 serial2
+# Compare (possibly multi-part, '.' delimited) serial numbers, and
+# return the largest in $func_serial_max_result.  If they are the
+# same, func_serial_max_result will be empty.
+func_serial_max ()
+{
+    $opt_debug
+    my_serial1="$1"
+    my_serial2="$2"
+
+    my_sed_dot='s/\..*$//g'
+    my_sed_rest='s/^[0-9][1-9]*\.*//'
+    my_sed_digits='s/[^0-9.]//g'
+
+    # Incase they turn out to be the same, we'll set it to empty
+    func_serial_max_result=
+
+    test "X$1$2" = X`$ECHO "$1$2" | $SED "$my_sed_digits"` || {
+      func_error "serial numbers \`$1' or \`$2' contain non-digit chars"
+      return
+    }
+
+    while test -n "$my_serial1$my_serial2"; do
+      my_serial1_part=`$ECHO "$my_serial1" | $SED "$my_sed_dot"`
+      my_serial2_part=`$ECHO "$my_serial2" | $SED "$my_sed_dot"`
+
+      test -z "$my_serial1_part$my_serial2_part" \
+        && break
+
+      test -z "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      test -z "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial1_part" -gt "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial2_part" -gt "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      my_serial1=`$ECHO "$my_serial1" | $SED "$my_sed_rest"`
+      my_serial2=`$ECHO "$my_serial2" | $SED "$my_sed_rest"`
+    done
+}
+
+
+# func_serial_update_check srcfile src_serial destfile dest_serial
+# Unless SRC_SERIAL is newer than DEST_SERIAL set $func_serial_update_check
+# to 'false'.
+func_serial_update_check ()
+{
+    $opt_debug
+    my_srcfile="$1"
+    my_src_serial="$2"
+    my_destfile="$3"
+    my_dest_serial="$4"
+    my_update_p=:
+
+    if test -f "$my_destfile"; then
+      test "X$my_src_serial" = "X0" && {
+        func_warning "no serial number on \`$my_srcfile', not copying."
+	return
+      }
+
+      # Determine whether the destination has an older serial.
+      func_serial_max "$my_src_serial" "$my_dest_serial"
+      test "X$my_src_serial" = "X$func_serial_max_result" || my_update_p=false
+
+      test "X$my_src_serial" = "X$func_serial_max_result" \
+        && func_verbose "\`$my_srcfile' is serial $my_src_serial, greater than $my_dest_serial in \`$my_destfile'"
+
+      if test "X$my_dest_serial" = "X$func_serial_max_result"; then
+        func_verbose "\`$my_srcfile' is serial $my_src_serial, less than $my_dest_serial in \`$my_destfile'"
+	$opt_force || if test -n "$ac_macrodir$ac_ltdldir"; then
+           func_error "\`$my_destfile' is newer: use \`--force' to overwrite"
+        fi
+      fi
+    fi
+
+    func_serial_update_check_result="$my_update_p"
+}
+
+
+# func_aclocal_update_check filename
+# Unless serial number of FILENAME is newer than the matching serial number
+# in aclocal.m4, set $func_aclocal_update_check to 'false'.
+func_aclocal_update_check ()
+{
+    $opt_debug
+    my_srcfile="$aclocaldir/$1"
+    my_destfile="aclocal.m4"
+
+    case $need in
+      libtool.m4)
+	my_src_serial=`func_serial "$my_srcfile" LT_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LT_INIT`
+
+	# Strictly, this libtoolize ought not to have to deal with ancient
+	# serial formats, but we accept them here to be complete:
+	test "X$my_src_serial" = "X0" &&
+	  my_src_serial=`func_serial "$my_srcfile" 'A[CM]_PROG_LIBTOOL'`
+	test "X$my_dest_serial" = "X0" &&
+	  my_dest_serial=`func_serial "$my_destfile" 'A[CM]_PROG_LIBTOOL'`
+	;;
+      ltdl.m4)
+	my_src_serial=`func_serial "$my_srcfile" LTDL_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LTDL_INIT`
+	;;
+      *)
+	my_src_serial=`func_serial "$my_srcfile" "$need"`
+	my_dest_serial=`func_serial "$my_destfile" "$need"`
+	;;
+    esac
+
+    func_serial_update_check \
+      "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+
+    func_aclocal_update_check_result="$func_serial_update_check_result"
+}
+
+
+# func_serial_update filename srcdir destdir [msg_var] [macro_re] [old_macro_re]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer serial number, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.  If given, MACRO_REGEX or
+# OLD_MACRO_REGEX must match any text after "# serial N" in both files.
+func_serial_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_macro_regex="$5"
+    my_old_macro_regex="$6"
+
+    my_serial_update_p=:
+    my_return_status=1
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`func_serial "$my_srcfile" "$my_macro_regex"`
+      my_dest_serial=`func_serial "$my_destfile" "$my_macro_regex"`
+
+      # Strictly, this libtoolize ought not to have to deal with ancient
+      # serial formats, but we accept them here to be complete:
+      test "X$my_src_serial" = "X0" &&
+        my_src_serial=`func_serial "$my_srcfile" "$my_old_macro_regex"`
+
+      test "X$my_dest_serial" = "X0" &&
+        my_dest_serial=`func_serial "$my_destfile" "$my_old_macro_regex"`
+
+      func_serial_update_check \
+        "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_serial_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_serial_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+      my_return_status=$?
+    elif $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      $opt_quiet \
+        || func_echo "\`$my_destfile' is already up to date."
+    fi
+
+    # Do this after the copy for hand maintained `aclocal.m4', incase
+    # it has `m4_include([DESTFILE])', so the copy effectively already
+    # updated `aclocal.m4'.
+    my_included_files=`func_included_files aclocal.m4`
+    case `echo " $my_included_files " | $NL2SP` in
+
+      # Skip included files:
+      *" $my_destfile "*) ;;
+
+      # Otherwise compare to aclocal.m4 serial number (func_serial
+      # returns 0 for older macro serial numbers before we provided
+      # serial tags, so the update message will be correctly given
+      # if aclocal.m4 contains an untagged --i.e older-- macro file):
+      *)
+        if test -f aclocal.m4; then
+          func_serial_max \
+              "$my_src_serial" `func_serial aclocal.m4 "$my_macro_regex"`
+          if test "X$my_src_serial" = "X$func_serial_max_result"; then
+              func_echo_once "$my_msg_var"
+	      func_echo "You should add the contents of \`$my_destfile' to \`aclocal.m4'."
+          fi
+        fi
+        ;;
+    esac
+    return $my_return_status
+}
+
+
+# func_keyword_update filename srcdir destdir sed_script [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision according to the serial number extracted by
+# SED_SCRIPT, or DESTFILE does not yet exist, or the user specified
+# `--force' at the command line.
+func_keyword_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_sed_script="$4"
+    my_msg_var="$5"
+
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    my_keyword_update_p=:
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`$SED -e "$my_sed_script" "$my_srcfile"`
+      test -z "$my_src_serial" && {
+        func_warning "no serial number in \`$my_srcfile', not copying."
+	return
+      }
+
+      my_dest_serial=`$SED -e "$my_sed_script" "$my_destfile"`
+      test -n "$my_dest_serial" || my_dest_serial=0
+
+      func_serial_update_check \
+         "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_keyword_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_keyword_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+    elif $opt_verbose || $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      func_echo_once "$my_msg_var"
+      func_echo "\`$my_destfile' is already up to date."
+    fi
+}
+
+
+# func_ltmain_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_ltmain_update ()
+{
+    $opt_debug
+    my_sed_ltmain='
+	/^package_revision='\''*[0-9][1-9.]*'\''*/ {
+	    s,^package_revision='\''*\([0-9.]*\)'\''*[	 ]*$,\1,
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_ltmain" "$4"
+
+    return $my_return_status
+}
+
+
+# func_config_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_config_update ()
+{
+    $opt_debug
+    my_sed_config='
+	/^timestamp='\''*[0-9][1-9-]*'\''*/ {
+	    s,^timestamp='\''*\([0-9-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_config" "$4"
+
+    return $my_return_status
+}
+
+
+# func_install_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_install_update ()
+{
+    $opt_debug
+    my_sed_install='
+	/^scriptversion='\''*[0-9][1-9.-]*'\''*/ {
+	    s,[#;].*,,
+	    s,^scriptversion='\''*\([0-9.-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_install" "$4"
+
+    return $my_return_status
+}
+
+
+# func_massage_aclocal_DATA [glob_exclude]
+# @aclocal_DATA\@ is substituted as per its value in Makefile.am;
+# this function massages it into a suitable format for func_copy_some_files.
+func_massage_aclocal_DATA ()
+{
+    $opt_debug
+    pkgmacro_files=     # GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgmacro_files from the value used in Makefile.am.
+    for my_filename in m4/argz.m4 m4/libtool.m4 m4/ltdl.m4 m4/ltoptions.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4; do
+      func_dirname_and_basename "$my_filename"
+      my_filename=$func_basename_result
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      pkgmacro_files="$pkgmacro_files:$my_filename"
+    done
+
+    # strip spurious leading `:'
+    pkgmacro_files=`$ECHO "$pkgmacro_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgmacro_subproject
+# Unless --quiet was passed, display a message. Then copy pkgmacro_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgmacro_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$subproject_macrodir/$file" && func_verbose "rm -f '$subproject_macrodir/$file'"
+      rm -f "$subproject_macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test "x$macrodir" != "x$subproject_macrodir"; then
+      pkgmacro_header="putting macros in \`$subproject_macrodir'."
+    elif test -n "$subproject_macrodir"; then
+      pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$subproject_macrodir'."
+    fi
+
+    func_copy_some_files "argz.m4:libtool.m4:ltdl.m4:$pkgmacro_files" \
+      "$aclocaldir" "$subproject_macrodir" pkgmacro_header
+}
+
+
+# func_install_pkgmacro_parent
+# Unless --quiet was passed, or AC_CONFIG_MACRO_DIR was not seen, display
+# a message.  Then update appropriate macros if newer ones are available
+# from the libtool installation tree.
+func_install_pkgmacro_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$macrodir/$file" && func_verbose "rm -f '$macrodir/$file'"
+      rm -f "$macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test -n "$ac_macrodir"; then
+      my_pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$ac_macrodir'."
+    elif test -n "$macrodir"; then
+      my_pkgmacro_header="putting macros in \`$macrodir'."
+    fi
+
+    if $opt_ltdl; then
+      func_serial_update argz.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header argz.m4
+    else
+      func_verbose "Not copying \`$macrodir/argz.m4', libltdl not used."
+    fi
+
+    func_serial_update  libtool.m4 "$aclocaldir" "$macrodir" \
+      my_pkgmacro_header LT_INIT 'A[CM]_PROG_LIBTOOL'
+
+    if $opt_ltdl; then
+      func_serial_update ltdl.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header 'LTDL_INIT'
+    else
+      func_verbose "Not copying \`$macrodir/ltdl.m4', libltdl not used."
+    fi
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for file in $pkgmacro_files; do
+      IFS="$my_save_IFS"
+      func_serial_update "$file" "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header "$file"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_install_pkgmacro_files
+# Install copies of the libtool and libltdl m4 macros into this package.
+func_install_pkgmacro_files ()
+{
+    $opt_debug
+
+    # argz.m4, libtool.m4 and ltdl.m4 are handled specially:
+    func_massage_aclocal_DATA 'argz.m4|libtool.m4|ltdl.m4'
+
+  # 1. Parent has separate macrodir to subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test -n "$macrodir" && test "x$macrodir" != "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_parent
+      func_install_pkgmacro_subproject
+
+  # 2. Parent shares macrodir with subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$macrodir" = "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_subproject
+
+  # 3. Not a subproject, but macrodir was specified in parent:
+    elif test -n "$macrodir"; then
+      func_install_pkgmacro_parent
+
+  # 4. AC_CONFIG_MACRO_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_MACRO_DIR not defined, not copying libtool macros."
+    fi
+}
+
+
+# func_massage_pkgltdl_files [glob_exclude]
+# @pkgltdl_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgltdl_files ()
+{
+    $opt_debug
+    pkgltdl_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgltdl_files from the value used in Makefile.am
+    for my_filename in libltdl/COPYING.LIB libltdl/README libltdl/Makefile.inc libltdl/Makefile.am libltdl/configure.ac libltdl/aclocal.m4 libltdl/Makefile.in libltdl/config-h.in libltdl/configure libltdl/argz_.h libltdl/argz.c libltdl/loaders/dld_link.c libltdl/loaders/dlopen.c libltdl/loaders/dyld.c libltdl/loaders/load_add_on.c libltdl/loaders/loadlibrary.c libltdl/loaders/shl_load.c libltdl/lt__dirent.c libltdl/lt__strl.c libltdl/libltdl/lt__alloc.h libltdl/libltdl/lt__dirent.h libltdl/libltdl/lt__glibc.h libltdl/libltdl/lt__private.h libltdl/libltdl/lt__strl.h libltdl/libltdl/lt_dlloader.h libltdl/libltdl/lt_error.h libltdl/libltdl/lt_system.h libltdl/libltdl/slist.h libltdl/loaders/preopen.c libltdl/lt__alloc.c libltdl/lt_dlloader.c libltdl/lt_error.c libltdl/ltdl.c libltdl/ltdl.h libltdl/slist.c; do
+
+      # Strip surplus leading 'libltdl/':
+      my_filename=`expr "X$my_filename" : 'Xlibltdl/\(.*\)'`
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgltdl_files: in
+        *:$my_filename:*) ;;
+	*) pkgltdl_files="$pkgltdl_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgltdl_files=`$ECHO "$pkgltdl_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgltdl_files
+# Install copies of the libltdl files into this package.  Any auxiliary
+# or m4 macro files needed in the libltdl tree will also be copied by
+# func_install_pkgconfig_files and func_install_pkgmacro_files resp.
+func_install_pkgltdl_files ()
+{
+    $opt_debug
+    $opt_ltdl || return
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgltdl_files; do
+      test -f "$ltdldir/$file" && func_verbose "rm -f '$ltdldir/$file'"
+      rm -f "$ltdldir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified `--ltdl'.
+    $opt_quiet || if test -n "$ac_ltdldir"; then
+      pkgltdl_header="putting libltdl files in LT_CONFIG_LTDL_DIR, \`$ac_ltdldir'."
+    elif test -n "$ltdldir"; then
+      pkgltdl_header="putting libltdl files in \`$ltdldir'."
+    fi
+
+    # These files are handled specially, depending on ltdl_mode:
+    if test "x$ltdl_mode" = "xsubproject"; then
+      func_massage_pkgltdl_files 'Makefile.inc'
+    else
+      func_massage_pkgltdl_files 'Makefile.am|Makefile.in*|aclocal.m4|config*'
+    fi
+
+    func_copy_some_files "$pkgltdl_files" \
+      "$pkgltdldir/libltdl" "$ltdldir" pkgltdl_header
+
+    # For recursive ltdl modes, copy a suitable Makefile.{am,inc}:
+    case $ltdl_mode in
+      recursive)
+        func_fixup_Makefile "Makefile.am" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+      nonrecursive)
+        func_fixup_Makefile "Makefile.inc" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+    esac
+}
+
+
+# func_massage_pkgconfig_files [glob_exclude]
+# @pkgconfig_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgconfig_files ()
+{
+    $opt_debug
+    pkgconfig_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgconfig_files from the value used in Makefile.am
+    for my_filename in config/compile config/config.guess config/config.sub config/depcomp config/install-sh config/missing config/ltmain.sh; do
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgconfig_files: in
+        *:$my_filename:*) ;;
+	*) pkgconfig_files="$pkgconfig_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgconfig_files=`$ECHO "$pkgconfig_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgconfig_subproject
+# Unless --quiet was passed, display a message. Then copy pkgconfig_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgconfig_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$subproject_auxdir/$file" && func_verbose "rm -f '$subproject_auxdir/$file'"
+      rm -f "$subproject_auxdir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified an auxdir.
+    $opt_quiet || if test "x$ac_auxdir" = "x$subproject_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$subproject_auxdir'."
+    elif test -n "$auxdir"; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    func_copy_some_files "$pkgconfig_files" \
+      "$pkgdatadir" "$ltdldir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_parent
+# Unless --quiet was passed, or AC_CONFIG_AUX_DIR was not seen, display a
+# message.  Then update appropriate auxiliary files if newer ones are
+# available from the libtool installation tree.
+func_install_pkgconfig_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$auxdir/$file" && func_verbose "rm -f '$auxdir/$file'"
+      rm -f "$auxdir/$file"
+    done
+
+    if test -n "$ac_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$ac_auxdir'."
+    elif test -n "$auxdir" || test "x$ltdldir" = "x."; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    if $opt_install; then
+      func_config_update config.guess \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_config_update config.sub \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_install_update install-sh \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+    fi
+    func_ltmain_update ltmain.sh \
+      "$pkgdatadir/config" "$auxdir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_files
+# Install copies of the auxiliary files into this package according to
+# the whether libltdl is included as a subproject, and whether the parent
+# shares the AC_CONFIG_AUX_DIR setting.
+func_install_pkgconfig_files ()
+{
+    $opt_debug
+    func_massage_pkgconfig_files
+
+  # 1. Parent shares auxdir with subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test "x$ac_auxdir" = "x$subproject_auxdir"
+    then
+      func_install_pkgconfig_subproject
+
+  # 2. Parent has separate auxdir to subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$auxdir" != "x$subproject_auxdir" is implied
+    then
+      if $seen_autoconf; then
+	func_install_pkgconfig_parent
+      fi
+      func_install_pkgconfig_subproject
+
+  # 3. Not subproject, but AC_CONFIG_AUX_DIR was used in parent:
+    elif test -n "$ac_auxdir" || test "x$auxdir" = "x."; then
+      func_install_pkgconfig_parent
+
+  # 4. AC_CONFIG_AUX_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_AUX_DIR not defined, not copying libtool auxiliary files."
+    fi
+}
+
+
+# func_nonemptydir_p dirvar
+# DIRVAR is the name of a variable to evaluate.  Unless DIRVAR names
+# a directory that exists and is non-empty abort with a diagnostic.
+func_nonemptydir_p ()
+{
+    $opt_debug
+    my_dirvar="$1"
+    my_dir=`eval echo "\\\$$my_dirvar"`
+
+    # Is it a directory at all?
+    test -d "$my_dir" \
+      || func_fatal_error "\$$my_dirvar is not a directory: \`$my_dir'"
+
+    # check that the directories contents can be ls'ed
+    test -n "`{ cd $my_dir && ls; } 2>/dev/null`" \
+        || func_fatal_error "can not list files: \`$my_dir'"
+}
+
+
+# func_check_macros
+# Sanity check macros from aclocal.m4 against installed versions.
+func_check_macros ()
+{
+    $opt_debug
+    $opt_quiet && return
+    $seen_autoconf || return
+
+    ac_config_macro_dir_advised=false
+
+    if test -n "$ac_macrodir$ltdldir" && test -z "$macrodir"; then
+      my_ac_config_macro_srcdir="$aclocaldir"
+      if $opt_ltdl && test "$macrodir" != "$subproject_macrodir"; then
+	my_ac_config_macro_srcdir="$subproject_macrodir"
+      fi
+
+      my_needed="libtool.m4 ltoptions.m4 ltversion.m4 ltsugar.m4 lt~obsolete.m4"
+      $opt_ltdl && my_needed="$my_needed argz.m4 ltdl.m4"
+
+      if test -f "aclocal.m4"; then
+	for need in $my_needed; do
+	  func_aclocal_update_check $need
+	  $func_aclocal_update_check_result && my_missing="$my_missing $need"
+	done
+      else
+        my_missing="$my_needed"
+      fi
+
+      if test -n "$my_missing"; then
+        func_echo "You should add the contents of the following files to \`aclocal.m4':"
+        for need in $my_missing; do
+	  func_echo "  \`$my_ac_config_macro_srcdir/$need'"
+        done
+
+        if test "$my_ac_config_macro_srcdir" != "$aclocaldir"; then
+          func_echo "or else add \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' to $configure_ac."
+	  ac_config_macro_dir_advised=:
+        fi
+      fi
+    fi
+
+    ## ---------------------------------------------------------- ##
+    ## Since we return early here when --no-warn was given:       ##
+    ## DO NOT PUT ANYTHING BUT UPGRADE ADVICE MESSAGES BELOW HERE ##
+    ## ---------------------------------------------------------- ##
+
+    $opt_warning || return
+
+    $seen_libtool ||
+      func_echo "Remember to add \`LT_INIT' to $configure_ac."
+
+    # Suggest using LTDL_INIT if appropriate:
+    $opt_ltdl && if test x$seen_ltdl != x:; then
+      case $ltdl_mode in
+	subproject) ltdl_init_args=""               ;;
+	*)          ltdl_init_args="([$ltdl_mode])" ;;
+      esac
+      func_echo "Remember to add \`LTDL_INIT$ltdl_init_args' to $configure_ac."
+    fi
+
+    if $opt_ltdl; then
+      # Remind the user to call LT_CONFIG_LTDL_DIR:
+      test -n "$ac_ltdldir" ||
+        func_echo "Remember to add \`LT_CONFIG_LTDL_DIR([$ltdldir])' to \`$configure_ac'."
+
+      # For subproject mode, offer some suggestions for avoiding duplicate
+      # files in a project that uses libltdl:
+      if test "x$ltdl_mode" = "xsubproject"; then
+        test "$subproject_auxdir" = "$auxdir" ||
+          func_echo "Consider using \`AC_CONFIG_AUX_DIR([$subproject_auxdir])' in $configure_ac."
+        $ac_config_macro_dir_advised || test "$subproject_macrodir" = "$macrodir" ||
+          func_echo "Consider using \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' in $configure_ac."
+	ac_config_macro_dir_advised=:
+      fi
+    fi
+
+    # Suggest modern idioms for storing autoconf macros:
+    $ac_config_macro_dir_advised || if test -z "$ac_macrodir" || test x"$macrodir" = x.; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([m4])' to $configure_ac and"
+      func_echo "rerunning $progname, to keep the correct libtool macros in-tree."
+      ac_config_macro_dir_advised=:
+
+    elif test -z "$ac_macrodir$ltdldir"; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([$macrodir])' to $configure_ac,"
+      func_echo "and rerunning $progname and aclocal."
+      ac_config_macro_dir_advised=:
+    fi
+
+    if test -z "$am_macrodir$macrodir"; then
+      func_echo "Consider adding \`-I m4' to ACLOCAL_AMFLAGS in Makefile.am."
+
+    elif test -z "$am_macrodir"; then
+      if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" && test "$subproject_macrodir" != "$macrodir"; then
+	func_echo "Consider adding \`-I $subproject_macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      else
+        func_echo "Consider adding \`-I $macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      fi
+    fi
+
+    # Don't trace for this, we're just checking the user didn't invoke it
+    # directly from configure.ac.
+    $SED 's,dnl .*$,,; s,# .*$,,' "$configure_ac" | grep AC_PROG_RANLIB >/dev/null &&
+      func_echo "\`AC_PROG_RANLIB' is rendered obsolete by \`LT_INIT'"
+
+    # FIXME: Ensure ltmain.sh, libtool.m4 and ltdl.m4 are from the same release
+}
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+{
+  # Lists of all files libtoolize has ever installed.  These are removed
+  # before installing the latest files when --force was passed to help
+  # ensure a clean upgrade.
+  # Do not remove config.guess nor config.sub, we don't install them
+  # without --install, and the project may not be using Automake.
+  all_pkgconfig_files="ltmain.sh"
+  all_pkgmacro_files="argz.m4 libtool.m4 ltdl.m4 ltoptions.m4 ltsugar.m4 ltversion.in ltversion.m4 lt~obsolete.m4"
+  all_pkgltdl_files="COPYING.LIB Makefile Makefile.in Makefile.inc Makefile.am README acinclude.m4 aclocal.m4 argz_.h argz.c config.h.in config-h.in configure configure.ac configure.in libltdl/lt__alloc.h libltdl/lt__dirent.h libltdl/lt__glibc.h libltdl/lt__private.h libltdl/lt__strl.h libltdl/lt_dlloader.h libltdl/lt_error.h libltdl/lt_system.h libltdl/slist.h loaders/dld_link.c loaders/dlopen.c loaders/dyld.c loaders/load_add_on.c loaders/loadlibrary.c loaders/preopen.c loaders/shl_load.c lt__alloc.c lt__dirent.c lt__strl.c lt_dlloader.c lt_error.c ltdl.c ltdl.h slist.c"
+
+  # Locations for important files:
+  prefix=/
+  datadir=//share
+  pkgdatadir=//share/libtool
+  pkgltdldir=//share/libtool
+  aclocaldir=//share/aclocal
+  auxdir=
+  macrodir=
+  configure_ac=configure.in
+
+  seen_autoconf=false
+  seen_libtool=false
+  seen_ltdl=false
+
+  # test EBCDIC or ASCII
+  case `echo X|tr X '\101'` in
+   A) # ASCII based system
+      # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+    SP2NL='tr \040 \012'
+    NL2SP='tr \015\012 \040\040'
+    ;;
+   *) # EBCDIC based system
+    SP2NL='tr \100 \n'
+    NL2SP='tr \r\n \100\100'
+    ;;
+  esac
+
+  # Allow the user to override the master libtoolize repository:
+  if test -n "$_lt_pkgdatadir"; then
+    pkgltdldir="$_lt_pkgdatadir"
+    pkgdatadir="$_lt_pkgdatadir/libltdl"
+    aclocaldir="$_lt_pkgdatadir/libltdl/m4"
+  fi
+  func_nonemptydir_p pkgltdldir
+  func_nonemptydir_p pkgdatadir
+  func_nonemptydir_p aclocaldir
+
+  func_scan_files
+
+  case $ltdldir in
+  .) ltdlprefix= ;;
+  *) ltdlprefix=$ltdldir/ ;;
+  esac
+  subproject_auxdir=${ltdlprefix}config
+  subproject_macrodir=${ltdlprefix}m4
+
+  # :::BE CAREFUL HERE:::
+  # func_check_macros needs to check whether --ltdl was specified when
+  # LTDL_INIT was not seen, so we can't just use one variable for both
+  # conditions, or that check will be impossible.   No need to clutter the
+  # rest of the code with '$opt_ltdl || $seen_ltdl' though, because we CAN
+  # safely set opt_ltdl to true if LTDL_INIT was seen:
+  $seen_ltdl && opt_ltdl=:
+
+  func_install_pkgconfig_files
+  func_install_pkgmacro_files
+  func_install_pkgltdl_files
+
+  func_check_macros
+}
+
+exit $exit_status
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/m4.exe b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/m4.exe
new file mode 100755
index 0000000..3016558
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/m4.exe
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/make.exe b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/make.exe
new file mode 100755
index 0000000..23ed513
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-cygwin/bin/make.exe
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal-1.14
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/aclocal-1.14
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoconf
new file mode 100755
index 0000000..47d8dcb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoconf
@@ -0,0 +1,500 @@
+#! /bin/sh
+# Generated from autoconf.in; do not edit by hand.
+# autoconf -- create `configure' using m4 macros
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2003,
+# 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+
+# as_fn_error STATUS ERROR
+# ------------------------
+# Output "`basename $0`: error: ERROR" to stderr. Then exit the script with
+# STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+usage="\
+Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Generate a configuration script from a TEMPLATE-FILE if given, or
+\`configure.ac' if present, or else \`configure.in'.  Output is sent
+to the standard output if TEMPLATE-FILE is given, else into
+\`configure'.
+
+Operation modes:
+  -h, --help                print this help, then exit
+  -V, --version             print version number, then exit
+  -v, --verbose             verbosely report processing
+  -d, --debug               don't remove temporary files
+  -f, --force               consider all files obsolete
+  -o, --output=FILE         save output in FILE (stdout is the default)
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY [syntax]
+
+Warning categories include:
+  \`cross'         cross compilation issues
+  \`obsolete'      obsolete constructs
+  \`syntax'        dubious syntactic constructs
+  \`all'           all the warnings
+  \`no-CATEGORY'   turn off the warnings on CATEGORY
+  \`none'          turn off all the warnings
+  \`error'         warnings are error
+
+The environment variables \`M4' and \`WARNINGS' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the list of calls to MACRO
+  -i, --initialization        also trace Autoconf's initialization process
+
+In tracing mode, no configuration script is created.  FORMAT defaults
+to \`\$f:\$l:\$n:\$%'; see \`autom4te --help' for information about FORMAT.
+
+Report bugs to <bug-autoconf@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>."
+
+version="\
+autoconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille."
+
+help="\
+Try \`$as_me --help' for more information."
+
+exit_missing_arg='
+  as_fn_error $? "option \`$1'\'' requires an argument$as_nl$help"'
+# restore font-lock: '
+
+# Variables.
+: ${AUTOM4TE='/i686-pc-linux-gnu/bin/autom4te'}
+autom4te_options=
+outfile=
+verbose=false
+
+# Parse command line.
+while test $# -gt 0 ; do
+  option=`expr "x$1" : 'x\(--[^=]*\)' \| \
+	       "x$1" : 'x\(-.\)'`
+  optarg=`expr "x$1" : 'x--[^=]*=\(.*\)' \| \
+	       "x$1" : 'x-.\(.*\)'`
+  case $1 in
+    --version | -V )
+       echo "$version" ; exit ;;
+    --help | -h )
+       $as_echo "$usage"; exit ;;
+
+    --verbose | -v )
+       verbose=:
+       autom4te_options="$autom4te_options $1"; shift ;;
+
+    # Arguments passed as is to autom4te.
+    --debug      | -d   | \
+    --force      | -f   | \
+    --include=*  | -I?* | \
+    --prepend-include=* | -B?* | \
+    --warnings=* | -W?* )
+       case $1 in
+	 *\'*) arg=`$as_echo "$1" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$1 ;;
+       esac
+       autom4te_options="$autom4te_options '$arg'"; shift ;;
+    # Options with separated arg passed as is to autom4te.
+    --include  | -I | \
+    --prepend-include  | -B | \
+    --warnings | -W )
+       test $# = 1 && eval "$exit_missing_arg"
+       case $2 in
+	 *\'*) arg=`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$2 ;;
+       esac
+       autom4te_options="$autom4te_options $option '$arg'"
+       shift; shift ;;
+
+    --trace=* | -t?* )
+       traces="$traces --trace='"`$as_echo "$optarg" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift ;;
+    --trace | -t )
+       test $# = 1 && eval "$exit_missing_arg"
+       traces="$traces --trace='"`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift; shift ;;
+    --initialization | -i )
+       autom4te_options="$autom4te_options --melt"
+       shift;;
+
+    --output=* | -o?* )
+       outfile=$optarg
+       shift ;;
+    --output | -o )
+       test $# = 1 && eval "$exit_missing_arg"
+       outfile=$2
+       shift; shift ;;
+
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       exec >&2
+       as_fn_error $? "invalid option \`$1'$as_nl$help" ;; #`
+    * )
+       break ;;
+  esac
+done
+
+# Find the input file.
+case $# in
+  0)
+    if test -f configure.ac; then
+      if test -f configure.in; then
+	$as_echo "$as_me: warning: both \`configure.ac' and \`configure.in' are present." >&2
+	$as_echo "$as_me: warning: proceeding with \`configure.ac'." >&2
+      fi
+      infile=configure.ac
+    elif test -f configure.in; then
+      infile=configure.in
+    else
+      as_fn_error $? "no input file"
+    fi
+    test -z "$traces" && test -z "$outfile" && outfile=configure;;
+  1)
+    infile=$1 ;;
+  *) exec >&2
+     as_fn_error $? "invalid number of arguments$as_nl$help" ;;
+esac
+
+# Unless specified, the output is stdout.
+test -z "$outfile" && outfile=-
+
+# Run autom4te with expansion.
+eval set x "$autom4te_options" \
+  --language=autoconf --output=\"\$outfile\" "$traces" \"\$infile\"
+shift
+$verbose && $as_echo "$as_me: running $AUTOM4TE $*" >&2
+exec "$AUTOM4TE" "$@"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoheader b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoheader
new file mode 100755
index 0000000..5f5d0c5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoheader
@@ -0,0 +1,304 @@
+#! /usr/bin/perl
+# -*- Perl -*-
+# Generated from autoheader.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoheader -- create `config.h.in' from `configure.ac'
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Roland McGrath.
+# Rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, "$pkgdatadir";
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use strict;
+
+# Using `do FILE', we need `local' vars.
+use vars qw ($config_h %verbatim %symbol);
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-linux-gnu/bin/autom4te';
+local $config_h;
+my $config_h_in;
+my @prepend_include;
+my @include;
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Create a template file of C \`\#define\' statements for \`configure\' to
+use.  To this end, scan TEMPLATE-FILE, or \`configure.ac\' if present,
+or else \`configure.in\'.
+
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -f, --force              consider all files obsolete
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+" . Autom4te::ChannelDefs::usage () . "
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "autoheader (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Roland McGrath and Akim Demaille.
+";
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  parse_WARNINGS;
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'W|warnings=s'        => \&parse_warnings);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('ah');
+switch_warning 'obsolete';
+parse_args;
+
+# Preach.
+my $config_h_top = find_file ("config.h.top?",
+			      reverse (@prepend_include), @include);
+my $config_h_bot = find_file ("config.h.bot?",
+			      reverse (@prepend_include), @include);
+my $acconfig_h = find_file ("acconfig.h?",
+			    reverse (@prepend_include), @include);
+if ($config_h_top || $config_h_bot || $acconfig_h)
+  {
+    my $msg = << "END";
+    Using auxiliary files such as \`acconfig.h\', \`config.h.bot\'
+    and \`config.h.top\', to define templates for \`config.h.in\'
+    is deprecated and discouraged.
+
+    Using the third argument of \`AC_DEFINE\' and
+    \`AC_DEFINE_UNQUOTED\' allows one to define a template without
+    \`acconfig.h\':
+
+      AC_DEFINE([NEED_FUNC_MAIN], 1,
+		[Define if a function \`main\' is needed.])
+
+    More sophisticated templates can also be produced, see the
+    documentation.
+END
+    $msg =~ s/^    /WARNING: /gm;
+    msg 'obsolete', $msg;
+  }
+
+# Set up autoconf.
+my $autoconf = "'$autom4te' --language=autoconf ";
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+$autoconf .= ' --debug' if $debug;
+$autoconf .= ' --force' if $force;
+$autoconf .= ' --verbose' if $verbose;
+
+# ----------------------- #
+# Real work starts here.  #
+# ----------------------- #
+
+# Source what the traces are trying to tell us.
+verb "$me: running $autoconf to trace from $ARGV[0]";
+my $quoted_tmp = shell_quote ($tmp);
+xsystem ("$autoconf"
+	 # If you change this list, update the
+	 # `Autoheader-preselections' section of autom4te.in.
+	 . ' --trace AC_CONFIG_HEADERS:\'$$config_h ||= \'"\'"\'$1\'"\'"\';\''
+	 . ' --trace AH_OUTPUT:\'$$verbatim{\'"\'"\'$1\'"\'"\'} = \'"\'"\'$2\'"\'"\';\''
+	 . ' --trace AC_DEFINE_TRACE_LITERAL:\'$$symbol{\'"\'"\'$1\'"\'"\'} = 1;\''
+	 . " " . shell_quote ($ARGV[0]) . " >$quoted_tmp/traces.pl");
+
+local (%verbatim, %symbol);
+debug "$me: \`do'ing $tmp/traces.pl:\n" . `sed 's/^/| /' $quoted_tmp/traces.pl`;
+do "$tmp/traces.pl";
+warn "couldn't parse $tmp/traces.pl: $@" if $@;
+unless ($config_h)
+  {
+    error "error: AC_CONFIG_HEADERS not found in $ARGV[0]";
+    exit 1;
+  }
+
+# We template only the first CONFIG_HEADER.
+$config_h =~ s/ .*//;
+# Support "outfile[:infile]", defaulting infile="outfile.in".
+($config_h, $config_h_in) = split (':', $config_h, 2);
+$config_h_in ||= "$config_h.in";
+
+# %SYMBOL might contain things like `F77_FUNC(name,NAME)', but we keep
+# only the name of the macro.
+%symbol = map { s/\(.*//; $_ => 1 } keys %symbol;
+
+my $out = new Autom4te::XFile ("> " . open_quote ("$tmp/config.hin"));
+
+# Don't write "do not edit" -- it will get copied into the
+# config.h, which it's ok to edit.
+print $out "/* $config_h_in.  Generated from $ARGV[0] by autoheader.  */\n";
+
+# Dump the top.
+if ($config_h_top)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_top));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+# Dump `acconfig.h', except for its bottom portion.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    while ($_ = $in->getline)
+      {
+	last if /\@BOTTOM\@/;
+	next if /\@TOP\@/;
+	print $out $_;
+      }
+  }
+
+# Dump the templates from `configure.ac'.
+foreach (sort keys %verbatim)
+  {
+    print $out "\n$verbatim{$_}\n";
+  }
+
+# Dump bottom portion of `acconfig.h'.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    my $dump = 0;
+    while ($_ = $in->getline)
+      {
+	print $out $_ if $dump;
+	$dump = 1  if /\@BOTTOM\@/;
+      }
+  }
+
+# Dump the bottom.
+if ($config_h_bot)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_bot));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+$out->close;
+
+# Check that all the symbols have a template.
+{
+  my $in = new Autom4te::XFile ("< " . open_quote ("$tmp/config.hin"));
+  my $suggest_ac_define = 1;
+  while ($_ = $in->getline)
+    {
+      my ($symbol) = /^\#\s*\w+\s+(\w+)/
+	or next;
+      delete $symbol{$symbol};
+    }
+  foreach (sort keys %symbol)
+    {
+      msg 'syntax', "warning: missing template: $_";
+      if ($suggest_ac_define)
+	{
+	  msg 'syntax',  "Use AC_DEFINE([$_], [], [Description])";
+	  $suggest_ac_define = 0;
+	}
+
+    }
+  exit 1
+    if keys %symbol;
+}
+
+update_file ("$tmp/config.hin", "$config_h_in", $force);
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autom4te b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autom4te
new file mode 100755
index 0000000..d29ca29
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autom4te
@@ -0,0 +1,1075 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autom4te.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autom4te - Wrapper around M4 libraries.
+# Copyright (C) 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::C4che;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Data directory.
+my $pkgdatadir = $ENV{'AC_MACRODIR'} || '//share/autoconf';
+
+# $LANGUAGE{LANGUAGE} -- Automatic options for LANGUAGE.
+my %language;
+
+my $output = '-';
+
+# Mode of the output file except for traces.
+my $mode = "0666";
+
+# If melt, don't use frozen files.
+my $melt = 0;
+
+# Names of the cache directory, cache directory index, trace cache
+# prefix, and output cache prefix.  And the IO object for the index.
+my $cache;
+my $icache;
+my $tcache;
+my $ocache;
+my $icache_file;
+
+my $flock_implemented = 'yes';
+
+# The macros to trace mapped to their format, as specified by the
+# user.
+my %trace;
+
+# The macros the user will want to trace in the future.
+# We need `include' to get the included file, `m4_pattern_forbid' and
+# `m4_pattern_allow' to check the output.
+#
+# FIXME: What about `sinclude'?
+my @preselect = ('include',
+		 'm4_pattern_allow', 'm4_pattern_forbid',
+		 '_m4_warn');
+
+# M4 include path.
+my @include;
+
+# Do we freeze?
+my $freeze = 0;
+
+# $M4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+# Some non-GNU m4's don't reject the --help option, so give them /dev/null.
+fatal "need GNU m4 1.4 or later: $m4"
+  if system "$m4 --help </dev/null 2>&1 | grep reload-state >/dev/null";
+
+# Set some high recursion limit as the default limit, 250, has already
+# been hit with AC_OUTPUT.  Don't override the user's choice.
+$m4 .= ' --nesting-limit=1024'
+  if " $m4 " !~ / (--nesting-limit(=[0-9]+)?|-L[0-9]*) /;
+
+
+# @M4_BUILTIN -- M4 builtins and a useful comment.
+my @m4_builtin = `echo dumpdef | $m4 2>&1 >/dev/null`;
+map { s/:.*//;s/\W// } @m4_builtin;
+
+
+# %M4_BUILTIN_ALTERNATE_NAME
+# --------------------------
+# The builtins are renamed, e.g., `define' is renamed `m4_define'.
+# So map `define' to `m4_define' and conversely.
+# Some macros don't follow this scheme: be sure to properly map to their
+# alternate name too.
+#
+# FIXME: Trace status of renamed builtins was fixed in M4 1.4.5, which
+# we now depend on; do we still need to do this mapping?
+#
+# So we will merge them, i.e., tracing `BUILTIN' or tracing
+# `m4_BUILTIN' will be the same: tracing both, but honoring the
+# *last* trace specification.
+#
+# FIXME: This is not enough: in the output `$0' will be `BUILTIN'
+# sometimes and `m4_BUILTIN' at others.  We should return a unique name,
+# the one specified by the user.
+#
+# FIXME: To be absolutely rigorous, I would say that given that we
+# _redefine_ divert (instead of _copying_ it), divert and the like
+# should not be part of this list.
+my %m4_builtin_alternate_name;
+@m4_builtin_alternate_name{"$_", "m4_$_"} = ("m4_$_", "$_")
+  foreach (grep { !/m4wrap|m4exit|dnl|ifelse|__.*__/ } @m4_builtin);
+@m4_builtin_alternate_name{"ifelse", "m4_if"}   = ("m4_if", "ifelse");
+@m4_builtin_alternate_name{"m4exit", "m4_exit"} = ("m4_exit", "m4exit");
+@m4_builtin_alternate_name{"m4wrap", "m4_wrap"} = ("m4_wrap", "m4wrap");
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILES]
+
+Run GNU M4 on the FILES, avoiding useless runs.  Output the traces if tracing,
+the frozen file if freezing, otherwise the expansion of the FILES.
+
+If some of the FILES are named \`FILE.m4f\' they are considered to be M4
+frozen files of all the previous files (which are therefore not loaded).
+If \`FILE.m4f\' is not found, then \`FILE.m4\' will be used, together with
+all the previous files.
+
+Some files may be optional, i.e., will only be processed if found in the
+include path, but then must end in \`.m4?\';  the question mark is not part of
+the actual file name.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -o, --output=FILE        save output in FILE (defaults to \`-\', stdout)
+  -f, --force              don\'t rely on cached values
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+  -l, --language=LANG      specify the set of M4 macros to use
+  -C, --cache=DIRECTORY    preserve results for future runs in DIRECTORY
+      --no-cache           disable the cache
+  -m, --mode=OCTAL         change the non trace output file mode (0666)
+  -M, --melt               don\'t use M4 frozen files
+
+Languages include:
+  \`Autoconf\'   create Autoconf configure scripts
+  \`Autotest\'   create Autotest test suites
+  \`M4sh\'       create M4sh shell scripts
+  \`M4sugar\'    create M4sugar output
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variables \`M4\' and \`WARNINGS\' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the MACRO invocations
+  -p, --preselect=MACRO       prepare to trace MACRO in a future run
+
+Freezing:
+  -F, --freeze   produce an M4 frozen state file for FILES
+
+FORMAT defaults to \`\$f:\$l:\$n:\$%\', and can use the following escapes:
+  \$\$     literal \$
+  \$f     file where macro was called
+  \$l     line where macro was called
+  \$d     nesting depth of macro call
+  \$n     name of the macro
+  \$NUM   argument NUM, unquoted and with newlines
+  \$SEP\@  all arguments, with newlines, quoted, and separated by SEP
+  \$SEP*  all arguments, with newlines, unquoted, and separated by SEP
+  \$SEP%  all arguments, without newlines, unquoted, and separated by SEP
+SEP can be empty for the default (comma for \@ and *, colon for %),
+a single character for that character, or {STRING} to use a string.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version =  <<"EOF";
+autom4te (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Akim Demaille.
+EOF
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# $OPTION
+# files_to_options (@FILE)
+# ------------------------
+# Transform Autom4te conventions (e.g., using foo.m4f to designate a frozen
+# file) into a suitable command line for M4 (e.g., using --reload-state).
+# parse_args guarantees that we will see at most one frozen file, and that
+# if a frozen file is present, it is the first argument.
+sub files_to_options (@)
+{
+  my (@file) = @_;
+  my @res;
+  foreach my $file (@file)
+    {
+      my $arg = shell_quote ($file);
+      if ($file =~ /\.m4f$/)
+	{
+	  $arg = "--reload-state=$arg";
+	  # If the user downgraded M4 from 1.6 to 1.4.x after freezing
+	  # the file, then we ensure the frozen __m4_version__ will
+	  # not cause m4_init to make the wrong decision about the
+	  # current M4 version.
+	  $arg .= " --undefine=__m4_version__"
+	    unless grep {/__m4_version__/} @m4_builtin;
+	}
+      push @res, $arg;
+    }
+  return join ' ', @res;
+}
+
+
+# load_configuration ($FILE)
+# --------------------------
+# Load the configuration $FILE.
+sub load_configuration ($)
+{
+  my ($file) = @_;
+  use Text::ParseWords;
+
+  my $cfg = new Autom4te::XFile ("< " . open_quote ($file));
+  my $lang;
+  while ($_ = $cfg->getline)
+    {
+      chomp;
+      # Comments.
+      next
+	if /^\s*(\#.*)?$/;
+
+      my @words = shellwords ($_);
+      my $type = shift @words;
+      if ($type eq 'begin-language:')
+	{
+	  fatal "$file:$.: end-language missing for: $lang"
+	    if defined $lang;
+	  $lang = lc $words[0];
+	}
+      elsif ($type eq 'end-language:')
+	{
+	  error "$file:$.: end-language mismatch: $lang"
+	    if $lang ne lc $words[0];
+	  $lang = undef;
+	}
+      elsif ($type eq 'args:')
+	{
+	  fatal "$file:$.: no current language"
+	    unless defined $lang;
+	  push @{$language{$lang}}, @words;
+	}
+      else
+	{
+	  error "$file:$.: unknown directive: $type";
+	}
+    }
+}
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  # We want to look for the early options, which should not be found
+  # in the configuration file.  Prepend to the user arguments.
+  # Perform this repeatedly so that we can use --language in language
+  # definitions.  Beware that there can be several --language
+  # invocations.
+  my @language;
+  do {
+    @language = ();
+    use Getopt::Long;
+    Getopt::Long::Configure ("pass_through", "permute");
+    GetOptions ("l|language=s" => \@language);
+
+    foreach (@language)
+      {
+	error "unknown language: $_"
+	  unless exists $language{lc $_};
+	unshift @ARGV, @{$language{lc $_}};
+      }
+  } while @language;
+
+  # --debug is useless: it is parsed below.
+  if (exists $ENV{'AUTOM4TE_DEBUG'})
+    {
+      print STDERR "$me: concrete arguments:\n";
+      foreach my $arg (@ARGV)
+	{
+	  print STDERR "| $arg\n";
+	}
+    }
+
+  # Process the arguments for real this time.
+  my @trace;
+  my @prepend_include;
+  parse_WARNINGS;
+  getopt
+    (
+     # Operation modes:
+     "o|output=s"   => \$output,
+     "W|warnings=s" => \&parse_warnings,
+     "m|mode=s"     => \$mode,
+     "M|melt"       => \$melt,
+
+     # Library directories:
+     "B|prepend-include=s" => \@prepend_include,
+     "I|include=s"         => \@include,
+
+     # Tracing:
+     # Using a hash for traces is seducing.  Unfortunately, upon `-t FOO',
+     # instead of mapping `FOO' to undef, Getopt maps it to `1', preventing
+     # us from distinguishing `-t FOO' from `-t FOO=1'.  So let's do it
+     # by hand.
+     "t|trace=s"     => \@trace,
+     "p|preselect=s" => \@preselect,
+
+     # Freezing.
+     "F|freeze" => \$freeze,
+
+     # Caching.
+     "C|cache=s" => \$cache,
+     "no-cache"  => sub { $cache = undef; },
+    );
+
+  fatal "too few arguments
+Try `$me --help' for more information."
+    unless @ARGV;
+
+  # Freezing:
+  # We cannot trace at the same time (well, we can, but it sounds insane).
+  # And it implies melting: there is risk not to update properly using
+  # old frozen files, and worse yet: we could load a frozen file and
+  # refreeze it!  A sort of caching :)
+  fatal "cannot freeze and trace"
+    if $freeze && @trace;
+  $melt = 1
+    if $freeze;
+
+  # Names of the cache directory, cache directory index, trace cache
+  # prefix, and output cache prefix.  If the cache is not to be
+  # preserved, default to a temporary directory (automatically removed
+  # on exit).
+  $cache = $tmp
+    unless $cache;
+  $icache = "$cache/requests";
+  $tcache = "$cache/traces.";
+  $ocache = "$cache/output.";
+
+  # Normalize the includes: the first occurrence is enough, several is
+  # a pain since it introduces a useless difference in the path which
+  # invalidates the cache.  And strip `.' which is implicit and always
+  # first.
+  @include = grep { !/^\.$/ } uniq (reverse(@prepend_include), @include);
+
+  # Convert @trace to %trace, and work around the M4 builtins tracing
+  # problem.
+  # The default format is `$f:$l:$n:$%'.
+  foreach (@trace)
+    {
+      /^([^:]+)(?::(.*))?$/ms;
+      $trace{$1} = defined $2 ? $2 : '$f:$l:$n:$%';
+      $trace{$m4_builtin_alternate_name{$1}} = $trace{$1}
+	if exists $m4_builtin_alternate_name{$1};
+    }
+
+  # Work around the M4 builtins tracing problem for @PRESELECT.
+  # FIXME: Is this still needed, now that we rely on M4 1.4.5?
+  push (@preselect,
+	map { $m4_builtin_alternate_name{$_} }
+	grep { exists $m4_builtin_alternate_name{$_} } @preselect);
+
+  # If we find frozen files, then all the files before it are
+  # discarded: the frozen file is supposed to include them all.
+  #
+  # We don't want to depend upon m4's --include to find the top level
+  # files, so we use `find_file' here.  Try to get a canonical name,
+  # as it's part of the key for caching.  And some files are optional
+  # (also handled by `find_file').
+  my @argv;
+  foreach (@ARGV)
+    {
+      if ($_ eq '-')
+	{
+	  push @argv, $_;
+	}
+      elsif (/\.m4f$/)
+	{
+	  # Frozen files are optional => pass a `?' to `find_file'.
+	  my $file = find_file ("$_?", @include);
+	  if (!$melt && $file)
+	    {
+	      @argv = ($file);
+	    }
+	  else
+	    {
+	      s/\.m4f$/.m4/;
+	      push @argv, find_file ($_, @include);
+	    }
+	}
+      else
+	{
+	  my $file = find_file ($_, @include);
+	  push @argv, $file
+	    if $file;
+	}
+    }
+  @ARGV = @argv;
+}
+
+
+# handle_m4 ($REQ, @MACRO)
+# ------------------------
+# Run m4 on the input files, and save the traces on the @MACRO.
+sub handle_m4 ($@)
+{
+  my ($req, @macro) = @_;
+
+  # GNU m4 appends when using --debugfile/--error-output.
+  unlink ($tcache . $req->id . "t");
+
+  # Run m4.
+  #
+  # We don't output directly to the cache files, to avoid problems
+  # when we are interrupted (that leaves corrupted files).
+  xsystem ("$m4 --gnu"
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . ' --debug=aflq'
+	   . (!exists $ENV{'AUTOM4TE_NO_FATAL'} ? ' --fatal-warning' : '')
+	   . " --debugfile=" . shell_quote ("$tcache" . $req->id . "t")
+	   . join (' --trace=', '', map { shell_quote ($_) } sort @macro)
+	   . " " . files_to_options (@ARGV)
+	   . " > " . shell_quote ("$ocache" . $req->id . "t"));
+
+  # Everything went ok: preserve the outputs.
+  foreach my $file (map { $_ . $req->id } ($tcache, $ocache))
+    {
+      use File::Copy;
+      move ("${file}t", "$file")
+	or fatal "cannot rename ${file}t as $file: $!";
+    }
+}
+
+
+# warn_forbidden ($WHERE, $WORD, %FORBIDDEN)
+# ------------------------------------------
+# $WORD is forbidden.  Warn with a dedicated error message if in
+# %FORBIDDEN, otherwise a simple `error: possibly undefined macro'
+# will do.
+my $first_warn_forbidden = 1;
+sub warn_forbidden ($$%)
+{
+  my ($where, $word, %forbidden) = @_;
+  my $message;
+
+  for my $re (sort keys %forbidden)
+    {
+      if ($word =~ $re)
+	{
+	  $message = $forbidden{$re};
+	  last;
+	}
+    }
+  $message ||= "possibly undefined macro: $word";
+  warn "$where: error: $message\n";
+  if ($first_warn_forbidden)
+    {
+      warn <<EOF;
+      If this token and others are legitimate, please use m4_pattern_allow.
+      See the Autoconf documentation.
+EOF
+      $first_warn_forbidden = 0;
+    }
+}
+
+
+# handle_output ($REQ, $OUTPUT)
+# -----------------------------
+# Run m4 on the input files, perform quadrigraphs substitution, check for
+# forbidden tokens, and save into $OUTPUT.
+sub handle_output ($$)
+{
+  my ($req, $output) = @_;
+
+  verb "creating $output";
+
+  # Load the forbidden/allowed patterns.
+  handle_traces ($req, "$tmp/patterns",
+		 ('m4_pattern_forbid' => 'forbid:$1:$2',
+		  'm4_pattern_allow'  => 'allow:$1'));
+  my @patterns = new Autom4te::XFile ("< " . open_quote ("$tmp/patterns"))->getlines;
+  chomp @patterns;
+  my %forbidden =
+    map { /^forbid:([^:]+):.+$/ => /^forbid:[^:]+:(.+)$/ } @patterns;
+  my $forbidden = join ('|', map { /^forbid:([^:]+)/ } @patterns) || "^\$";
+  my $allowed   = join ('|', map { /^allow:([^:]+)/  } @patterns) || "^\$";
+
+  verb "forbidden tokens: $forbidden";
+  verb "forbidden token : $_ => $forbidden{$_}"
+    foreach (sort keys %forbidden);
+  verb "allowed   tokens: $allowed";
+
+  # Read the (cached) raw M4 output, produce the actual result.  We
+  # have to use the 2nd arg to have Autom4te::XFile honor the third, but then
+  # stdout is to be handled by hand :(.  Don't use fdopen as it means
+  # we will close STDOUT, which we already do in END.
+  my $out = new Autom4te::XFile;
+  if ($output eq '-')
+    {
+      $out->open (">$output");
+    }
+  else
+    {
+      $out->open($output, O_CREAT | O_WRONLY | O_TRUNC, oct ($mode));
+    }
+  fatal "cannot create $output: $!"
+    unless $out;
+  my $in = new Autom4te::XFile ("< " . open_quote ($ocache . $req->id));
+
+  my %prohibited;
+  my $res;
+  while ($_ = $in->getline)
+    {
+      s/\s+$//;
+      s/__oline__/$./g;
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+
+      $res = $_;
+
+      # Don't complain in comments.  Well, until we have something
+      # better, don't consider `#include' etc. are comments.
+      s/\#.*//
+	unless /^\#\s*(if|include|endif|ifdef|ifndef|define)\b/;
+      foreach (split (/\W+/))
+	{
+	  $prohibited{$_} = $.
+	    if !/^$/ && /$forbidden/o && !/$allowed/o && ! exists $prohibited{$_};
+	}
+
+      # Performed *last*: the empty quadrigraph.
+      $res =~ s/\@&t\@//g;
+
+      print $out "$res\n";
+    }
+
+  $out->close();
+
+  # If no forbidden words, we're done.
+  return
+    if ! %prohibited;
+
+  # Locate the forbidden words in the last input file.
+  # This is unsatisfying but...
+  $exit_code = 1;
+  if ($ARGV[$#ARGV] ne '-')
+    {
+      my $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+      my $file = new Autom4te::XFile ("< " . open_quote ($ARGV[$#ARGV]));
+
+      while ($_ = $file->getline)
+	{
+	  # Don't complain in comments.  Well, until we have something
+	  # better, don't consider `#include' etc. to be comments.
+	  s/\#.*//
+	    unless /^\#(if|include|endif|ifdef|ifndef|define)\b/;
+
+	  # Complain once per word, but possibly several times per line.
+	  while (/$prohibited/)
+	    {
+	      my $word = $1;
+	      warn_forbidden ("$ARGV[$#ARGV]:$.", $word, %forbidden);
+	      delete $prohibited{$word};
+	      # If we're done, exit.
+	      return
+		if ! %prohibited;
+	      $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+	    }
+	}
+    }
+  warn_forbidden ("$output:$prohibited{$_}", $_, %forbidden)
+    foreach (sort { $prohibited{$a} <=> $prohibited{$b} } keys %prohibited);
+}
+
+
+## --------------------- ##
+## Handling the traces.  ##
+## --------------------- ##
+
+
+# $M4_MACRO
+# trace_format_to_m4 ($FORMAT)
+# ----------------------------
+# Convert a trace $FORMAT into a M4 trace processing macro's body.
+sub trace_format_to_m4 ($)
+{
+  my ($format) = @_;
+  my $underscore = $_;
+  my %escape = (# File name.
+		'f' => '$1',
+		# Line number.
+		'l' => '$2',
+		# Depth.
+		'd' => '$3',
+		# Name (also available as $0).
+		'n' => '$4',
+		# Escaped dollar.
+		'$' => '$');
+
+  my $res = '';
+  $_ = $format;
+  while ($_)
+    {
+      # $n -> $(n + 4)
+      if (s/^\$(\d+)//)
+	{
+	  $res .= "\$" . ($1 + 4);
+	}
+      # $x, no separator given.
+      elsif (s/^\$([fldn\$])//)
+	{
+	  $res .= $escape{$1};
+	}
+      # $.x or ${sep}x.
+      elsif (s/^\$\{([^}]*)\}([@*%])//
+	    || s/^\$(.?)([@*%])//)
+	{
+	  # $@, list of quoted effective arguments.
+	  if ($2 eq '@')
+	    {
+	      $res .= ']at_at([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $*, list of unquoted effective arguments.
+	  elsif ($2 eq '*')
+	    {
+	      $res .= ']at_star([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $%, list of flattened unquoted effective arguments.
+	  elsif ($2 eq '%')
+	    {
+	      $res .= ']at_percent([' . ($1 ? $1 : ':') . '], $@)[';
+	    }
+	}
+      elsif (/^(\$.)/)
+	{
+	  error "invalid escape: $1";
+	}
+      else
+	{
+	  s/^([^\$]+)//;
+	  $res .= $1;
+	}
+    }
+
+  $_ = $underscore;
+  return '[[' . $res . ']]';
+}
+
+
+# handle_traces($REQ, $OUTPUT, %TRACE)
+# ------------------------------------
+# We use M4 itself to process the traces.  But to avoid name clashes when
+# processing the traces, the builtins are disabled, and moved into `at_'.
+# Actually, all the low level processing macros are in `at_' (and `_at_').
+# To avoid clashes between user macros and `at_' macros, the macros which
+# implement tracing are in `AT_'.
+#
+# Having $REQ is needed to neutralize the macros which have been traced,
+# but are not wanted now.
+sub handle_traces ($$%)
+{
+  my ($req, $output, %trace) = @_;
+
+  verb "formatting traces for `$output': " . join (', ', sort keys %trace);
+
+  # Processing the traces.
+  my $trace_m4 = new Autom4te::XFile ("> " . open_quote ("$tmp/traces.m4"));
+
+  $_ = <<'EOF';
+  divert(-1)
+  changequote([, ])
+  # _at_MODE(SEPARATOR, ELT1, ELT2...)
+  # ----------------------------------
+  # List the elements, separating then with SEPARATOR.
+  # MODE can be:
+  #  `at'       -- the elements are enclosed in brackets.
+  #  `star'     -- the elements are listed as are.
+  #  `percent'  -- the elements are `flattened': spaces are singled out,
+  #                and no new line remains.
+  define([_at_at],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[[$2]]],
+	     [[[$2]][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_percent],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [at_flatten([$2])],
+	     [at_flatten([$2])[$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_star],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[$2]],
+	     [[$2][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  # FLATTEN quotes its result.
+  # Note that the second pattern is `newline, tab or space'.  Don't lose
+  # the tab!
+  define([at_flatten],
+  [at_patsubst(at_patsubst([[[$1]]], [\\\n]), [[\n\t ]+], [ ])])
+
+  define([at_args],    [at_shift(at_shift(at_shift(at_shift(at_shift($@)))))])
+  define([at_at],      [_$0([$1], at_args($@))])
+  define([at_percent], [_$0([$1], at_args($@))])
+  define([at_star],    [_$0([$1], at_args($@))])
+
+EOF
+  s/^  //mg;s/\\t/\t/mg;s/\\n/\n/mg;
+  print $trace_m4 $_;
+
+  # If you trace `define', then on `define([m4_exit], defn([m4exit])' you
+  # will produce
+  #
+  #    AT_define([m4sugar.m4], [115], [1], [define], [m4_exit], <m4exit>)
+  #
+  # Since `<m4exit>' is not quoted, the outer m4, when processing
+  # `trace.m4' will exit prematurely.  Hence, move all the builtins to
+  # the `at_' name space.
+
+  print $trace_m4 "# Copy the builtins.\n";
+  map { print $trace_m4 "define([at_$_], defn([$_]))\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+  print $trace_m4 "# Disable them.\n";
+  map { print $trace_m4 "at_undefine([$_])\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+
+  # Neutralize traces: we don't want traces of cached requests (%REQUEST).
+  print $trace_m4
+   "## -------------------------------------- ##\n",
+   "## By default neutralize all the traces.  ##\n",
+   "## -------------------------------------- ##\n",
+   "\n";
+  print $trace_m4 "at_define([AT_$_], [at_dnl])\n"
+    foreach (sort keys %{$req->macro});
+  print $trace_m4 "\n";
+
+  # Implement traces for current requests (%TRACE).
+  print $trace_m4
+    "## ------------------------- ##\n",
+    "## Trace processing macros.  ##\n",
+    "## ------------------------- ##\n",
+    "\n";
+  foreach (sort keys %trace)
+    {
+      # Trace request can be embed \n.
+      (my $comment = "Trace $_:$trace{$_}") =~ s/^/\# /;
+      print $trace_m4 "$comment\n";
+      print $trace_m4 "at_define([AT_$_],\n";
+      print $trace_m4 trace_format_to_m4 ($trace{$_}) . ")\n\n";
+    }
+  print $trace_m4 "\n";
+
+  # Reenable output.
+  print $trace_m4 "at_divert(0)at_dnl\n";
+
+  # Transform the traces from m4 into an m4 input file.
+  # Typically, transform:
+  #
+  # | m4trace:configure.ac:3: -1- AC_SUBST([exec_prefix], [NONE])
+  #
+  # into
+  #
+  # | AT_AC_SUBST([configure.ac], [3], [1], [AC_SUBST], [exec_prefix], [NONE])
+  #
+  # Pay attention that the file name might include colons, if under DOS
+  # for instance, so we don't use `[^:]+'.
+  my $traces = new Autom4te::XFile ("< " . open_quote ($tcache . $req->id));
+  while ($_ = $traces->getline)
+    {
+      # Trace with arguments, as the example above.  We don't try
+      # to match the trailing parenthesis as it might be on a
+      # separate line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^(]+)\((.*)$}
+       {AT_$4([$1], [$2], [$3], [$4], $5};
+      # Traces without arguments, always on a single line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^)]*)\n$}
+       {AT_$4([$1], [$2], [$3], [$4])\n};
+      print $trace_m4 "$_";
+    }
+  $trace_m4->close;
+
+  my $in = new Autom4te::XFile ("$m4 " . shell_quote ("$tmp/traces.m4") . " |");
+  my $out = new Autom4te::XFile ("> " . open_quote ($output));
+
+  # This is dubious: should we really transform the quadrigraphs in
+  # traces?  It might break balanced [ ] etc. in the output.  The
+  # consensus seeems to be that traces are more useful this way.
+  while ($_ = $in->getline)
+    {
+      # It makes no sense to try to transform __oline__.
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+      s/\@&t\@//g;
+      print $out $_;
+    }
+}
+
+
+# $BOOL
+# up_to_date ($REQ)
+# -----------------
+# Are the cache files of $REQ up to date?
+# $REQ is `valid' if it corresponds to the request and exists, which
+# does not mean it is up to date.  It is up to date if, in addition,
+# its files are younger than its dependencies.
+sub up_to_date ($)
+{
+  my ($req) = @_;
+
+  return 0
+    if ! $req->valid;
+
+  my $tfile = $tcache . $req->id;
+  my $ofile = $ocache . $req->id;
+
+  # We can't answer properly if the traces are not computed since we
+  # need to know what other files were included.  Actually, if any of
+  # the cache files is missing, we are not up to date.
+  return 0
+    if ! -f $tfile || ! -f $ofile;
+
+  # The youngest of the cache files must be older than the oldest of
+  # the dependencies.
+  my $tmtime = mtime ($tfile);
+  my $omtime = mtime ($ofile);
+  my ($file, $mtime) = ($tmtime < $omtime
+			? ($ofile, $omtime) : ($tfile, $tmtime));
+
+  # We depend at least upon the arguments.
+  my @dep = @ARGV;
+
+  # stdin is always out of date.
+  if (grep { $_ eq '-' } @dep)
+    { return 0 }
+
+  # Files may include others.  We can use traces since we just checked
+  # if they are available.
+  handle_traces ($req, "$tmp/dependencies",
+		 ('include'    => '$1',
+		  'm4_include' => '$1'));
+  my $deps = new Autom4te::XFile ("< " . open_quote ("$tmp/dependencies"));
+  while ($_ = $deps->getline)
+    {
+      chomp;
+      my $file = find_file ("$_?", @include);
+      # If a file which used to be included is no longer there, then
+      # don't say it's missing (it might no longer be included).  But
+      # of course, that causes the output to be outdated (as if the
+      # time stamp of that missing file was newer).
+      return 0
+	if ! $file;
+      push @dep, $file;
+    }
+
+  # If $FILE is younger than one of its dependencies, it is outdated.
+  return up_to_date_p ($file, @dep);
+}
+
+
+## ---------- ##
+## Freezing.  ##
+## ---------- ##
+
+# freeze ($OUTPUT)
+# ----------------
+sub freeze ($)
+{
+  my ($output) = @_;
+
+  # When processing the file with diversion disabled, there must be no
+  # output but comments and empty lines.
+  my $result = xqx ("$m4"
+		    . ' --fatal-warning'
+		    . join (' --include=', '', map { shell_quote ($_) } @include)
+		    . ' --define=divert'
+		    . " " . files_to_options (@ARGV)
+		    . ' </dev/null');
+  $result =~ s/#.*\n//g;
+  $result =~ s/^\n//mg;
+
+  fatal "freezing produced output:\n$result"
+    if $result;
+
+  # If freezing produces output, something went wrong: a bad `divert',
+  # or an improper paren etc.
+  xsystem ("$m4"
+	   . ' --fatal-warning'
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . " --freeze-state=" . shell_quote ($output)
+	   . " " . files_to_options (@ARGV)
+	   . ' </dev/null');
+}
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('am4t');
+load_configuration ($ENV{'AUTOM4TE_CFG'} || "$pkgdatadir/autom4te.cfg");
+load_configuration ("$ENV{'HOME'}/.autom4te.cfg")
+  if exists $ENV{'HOME'} && -f "$ENV{'HOME'}/.autom4te.cfg";
+load_configuration (".autom4te.cfg")
+  if -f ".autom4te.cfg";
+parse_args;
+
+# Freezing does not involve the cache.
+if ($freeze)
+  {
+    freeze ($output);
+    exit $exit_code;
+  }
+
+# We need our cache directory.  Don't fail with parallel creation.
+if (! -d "$cache")
+  {
+    mkdir "$cache", 0755
+      or -d "$cache"
+      or fatal "cannot create $cache: $!";
+  }
+
+# Open the index for update, and lock it.  autom4te handles several
+# files, but the index is the first and last file to be updated, so
+# locking it is sufficient.
+$icache_file = new Autom4te::XFile $icache, O_RDWR|O_CREAT;
+$icache_file->lock (LOCK_EX)
+  if ($flock_implemented eq "yes");
+
+# Read the cache index if available and older than autom4te itself.
+# If autom4te is younger, then some structures such as C4che might
+# have changed, which would corrupt its processing.
+Autom4te::C4che->load ($icache_file)
+  if -f $icache && mtime ($icache) > mtime ($0);
+
+# Add the new trace requests.
+my $req = Autom4te::C4che->request ('input' => \@ARGV,
+				    'path'  => \@include,
+				    'macro' => [keys %trace, @preselect]);
+
+# If $REQ's cache files are not up to date, or simply if the user
+# discarded them (-f), declare it invalid.
+$req->valid (0)
+  if $force || ! up_to_date ($req);
+
+# We now know whether we can trust the Request object.  Say it.
+verb "the trace request object is:\n" . $req->marshall;
+
+# We need to run M4 if (i) the user wants it (--force), (ii) $REQ is
+# invalid.
+handle_m4 ($req, keys %{$req->macro})
+  if $force || ! $req->valid;
+
+# Issue the warnings each time autom4te was run.
+my $separator = "\n" . ('-' x 25) . " END OF WARNING " . ('-' x 25) . "\n\n";
+handle_traces ($req, "$tmp/warnings",
+	       ('_m4_warn' => "\$1::\$f:\$l::\$2::\$3$separator"));
+# Swallow excessive newlines.
+for (split (/\n*$separator\n*/o, contents ("$tmp/warnings")))
+{
+  # The message looks like:
+  # | syntax::input.as:5::ouch
+  # | ::input.as:4: baz is expanded from...
+  # | input.as:2: bar is expanded from...
+  # | input.as:3: foo is expanded from...
+  # | input.as:5: the top level
+  # In particular, m4_warn guarantees that either $stackdump is empty, or
+  # it consists of lines where only the last line ends in "top level".
+  my ($cat, $loc, $msg, $stacktrace) = split ('::', $_, 4);
+  msg $cat, $loc, "warning: $msg",
+    partial => ($stacktrace =~ /top level$/) + 0;
+  for (split /\n/, $stacktrace)
+    {
+      my ($loc, $trace) = split (': ', $_, 2);
+      msg $cat, $loc, $trace, partial => ($trace !~ /top level$/) + 0;
+    }
+}
+
+# Now output...
+if (%trace)
+  {
+    # Always produce traces, since even if the output is young enough,
+    # there is no guarantee that the traces use the same *format*
+    # (e.g., `-t FOO:foo' and `-t FOO:bar' are both using the same M4
+    # traces, hence the M4 traces cache is usable, but its formatting
+    # will yield different results).
+    handle_traces ($req, $output, %trace);
+  }
+else
+  {
+    # Actual M4 expansion, if the user wants it, or if $output is old
+    # (STDOUT is pretty old).
+    handle_output ($req, $output)
+      if $force || mtime ($output) < mtime ($ocache . $req->id);
+  }
+
+# If we ran up to here, the cache is valid.
+$req->valid (1);
+Autom4te::C4che->save ($icache_file);
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake-1.14
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/automake-1.14
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoreconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoreconf
new file mode 100755
index 0000000..fac800b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoreconf
@@ -0,0 +1,718 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoreconf.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoreconf - install the GNU Build System in a directory tree
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David J. MacKenzie.
+# Extended and rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+# Do not use Cwd::chdir, since it might hang.
+use Cwd 'cwd';
+use strict;
+
+## ----------- ##
+## Variables.  ##
+## ----------- ##
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [DIRECTORY]...
+
+Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
+(formerly `gettextize'), and `libtoolize' where appropriate)
+repeatedly to remake the GNU Build System files in specified
+DIRECTORIES and their subdirectories (defaulting to `.').
+
+By default, it only remakes those files that are older than their
+sources.  If you install new versions of the GNU Build System,
+you can make `autoreconf' remake all of the files by giving it the
+`--force' option.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don't remove temporary files
+  -f, --force              consider all files obsolete
+  -i, --install            copy missing auxiliary files
+      --no-recursive       don't rebuild sub-packages
+  -s, --symlink            with -i, install symbolic links instead of copies
+  -m, --make               when applicable, re-run ./configure && make
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY [syntax]
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variable \`WARNINGS\' is honored.  Some subtools might
+support other warning types, using \`all' is encouraged.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+The environment variables AUTOM4TE, AUTOCONF, AUTOHEADER, AUTOMAKE,
+ACLOCAL, AUTOPOINT, LIBTOOLIZE, M4, and MAKE are honored.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoreconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+# Lib files.
+my $autoconf   = $ENV{'AUTOCONF'}   || '/i686-pc-linux-gnu/bin/autoconf';
+my $autoheader = $ENV{'AUTOHEADER'} || '/i686-pc-linux-gnu/bin/autoheader';
+my $autom4te   = $ENV{'AUTOM4TE'}   || '/i686-pc-linux-gnu/bin/autom4te';
+my $automake   = $ENV{'AUTOMAKE'}   || 'automake';
+my $aclocal    = $ENV{'ACLOCAL'}    || 'aclocal';
+my $libtoolize = $ENV{'LIBTOOLIZE'} || 'libtoolize';
+my $autopoint  = $ENV{'AUTOPOINT'}  || 'autopoint';
+my $make       = $ENV{'MAKE'}       || 'make';
+
+# --install -- as --add-missing in other tools.
+my $install = 0;
+# symlink -- when --install, use symlinks instead.
+my $symlink = 0;
+# Does aclocal support --force?
+my $aclocal_supports_force = 0;
+# Does aclocal support -Wfoo?
+my $aclocal_supports_warnings = 0;
+# Does automake support --force-missing?
+my $automake_supports_force_missing = 0;
+# Does automake support -Wfoo?
+my $automake_supports_warnings = 0;
+
+my @prepend_include;
+my @include;
+
+# List of command line warning requests.
+my @warning;
+
+# Rerun `./configure && make'?
+my $run_make = 0;
+
+# Recurse into subpackages
+my $recursive = 1;
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ("W|warnings=s"         => \@warning,
+	  'I|include=s'          => \@include,
+	  'B|prepend-include=s'	 => \@prepend_include,
+	  'i|install'            => \$install,
+	  's|symlink'            => \$symlink,
+	  'm|make'               => \$run_make,
+	  'recursive!'           => \$recursive);
+
+  # Split the warnings as a list of elements instead of a list of
+  # lists.
+  @warning = map { split /,/ } @warning;
+  parse_WARNINGS;
+  parse_warnings '--warnings', @warning;
+
+  # Even if the user specified a configure.ac, trim to get the
+  # directory, and look for configure.ac again.  Because (i) the code
+  # is simpler, and (ii) we are still able to diagnose simultaneous
+  # presence of configure.ac and configure.in.
+  @ARGV = map { /configure\.(ac|in)$/ ? dirname ($_) : $_ } @ARGV;
+  push @ARGV, '.' unless @ARGV;
+
+  if ($verbose && $debug)
+    {
+      for my $prog ($autoconf, $autoheader,
+		    $automake, $aclocal,
+		    $autopoint,
+		    $libtoolize)
+	{
+	  xsystem ("$prog --version | sed 1q >&2");
+	  print STDERR "\n";
+	}
+    }
+
+  my $aclocal_help = `$aclocal --help 2>/dev/null`;
+  my $automake_help = `$automake --help 2>/dev/null`;
+  $aclocal_supports_force = $aclocal_help =~ /--force/;
+  $aclocal_supports_warnings = $aclocal_help =~ /--warnings/;
+  $automake_supports_force_missing = $automake_help =~ /--force-missing/;
+  $automake_supports_warnings = $automake_help =~ /--warnings/;
+
+  # Dispatch autoreconf's option to the tools.
+  # --include;
+  $aclocal    .= join (' -I ', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+  $autoheader .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoheader .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+  # --install and --symlink;
+  if ($install)
+    {
+      $automake   .= ' --add-missing';
+      $automake   .= ' --copy' unless $symlink;
+      $libtoolize .= ' --copy' unless $symlink;
+    }
+  # --force;
+  if ($force)
+    {
+      $aclocal    .= ' --force'
+	if $aclocal_supports_force;
+      $autoconf   .= ' --force';
+      $autoheader .= ' --force';
+      $automake   .= ' --force-missing'
+	if $automake_supports_force_missing;
+      $autopoint  .= ' --force';
+      $libtoolize .= ' --force';
+    }
+  else
+    {
+      # The implementation of --no-force is bogus in all implementations
+      # of Automake up to 1.8, so we avoid it in these cases.  (Automake
+      # 1.8 is the first version where aclocal supports force, hence
+      # the condition.)
+      $automake .= ' --no-force'
+	if $aclocal_supports_force;
+    }
+  # --verbose --verbose or --debug;
+  if ($verbose > 1 || $debug)
+    {
+      $autoconf   .= ' --verbose';
+      $autoheader .= ' --verbose';
+      $automake   .= ' --verbose';
+      $aclocal    .= ' --verbose';
+    }
+  if ($debug)
+    {
+      $autoconf   .= ' --debug';
+      $autoheader .= ' --debug';
+      $libtoolize .= ' --debug';
+    }
+  # --warnings;
+  if (@warning)
+    {
+      my $warn = ' --warnings=' . join (',', @warning);
+      $autoconf   .= $warn;
+      $autoheader .= $warn;
+      $automake   .= $warn
+	if $automake_supports_warnings;
+      $aclocal    .= $warn
+        if $aclocal_supports_warnings;
+    }
+}
+
+
+# &run_aclocal ($ACLOCAL, $FLAGS)
+# -------------------------------
+# Update aclocal.m4 as lazily as possible, as aclocal pre-1.8 always
+# overwrites aclocal.m4, hence triggers autoconf, autoheader, automake
+# etc. uselessly.  aclocal 1.8+ does not need this.
+sub run_aclocal ($$)
+{
+  my ($aclocal, $flags) = @_;
+
+  # aclocal 1.8+ does all this for free.  It can be recognized by its
+  # --force support.
+  if ($aclocal_supports_force)
+    {
+      xsystem ("$aclocal $flags");
+    }
+  else
+    {
+      xsystem ("$aclocal $flags --output=aclocal.m4t");
+      # aclocal may produce no output.
+      if (-f 'aclocal.m4t')
+	{
+	  update_file ('aclocal.m4t', 'aclocal.m4');
+	  # Make sure that the local m4 files are older than
+	  # aclocal.m4.
+	  #
+	  # Why is not always the case?  Because we already run
+	  # aclocal at first (before tracing), which, for instance,
+	  # can find Gettext's macros in .../share/aclocal, so we may
+	  # have had the right aclocal.m4 already.  Then autopoint is
+	  # run, and installs locally these M4 files.  Then
+	  # autoreconf, via update_file, sees it is the _same_
+	  # aclocal.m4, and doesn't change its timestamp.  But later,
+	  # Automake's Makefile expresses that aclocal.m4 depends on
+	  # these local files, which are newer, so it triggers aclocal
+	  # again.
+	  #
+	  # To make sure aclocal.m4 is no older, we change the
+	  # modification times of the local M4 files to be not newer
+	  # than it.
+	  #
+	  # First, where are the local files?
+	  my $aclocal_local_dir = '.';
+	  if ($flags =~ /-I\s+(\S+)/)
+	    {
+	      $aclocal_local_dir = $1;
+	    }
+	  # All the local files newer than aclocal.m4 are to be
+	  # made not newer than it.
+	  my $aclocal_m4_mtime = mtime ('aclocal.m4');
+	  for my $file (glob ("$aclocal_local_dir/*.m4"), 'acinclude.m4')
+	    {
+	      if ($aclocal_m4_mtime < mtime ($file))
+		{
+		  debug "aging $file to be not newer than aclocal.m4";
+		  utime $aclocal_m4_mtime, $aclocal_m4_mtime, $file;
+		}
+	    }
+	}
+    }
+}
+
+# &autoreconf_current_directory
+# -----------------------------
+sub autoreconf_current_directory ()
+{
+  my $configure_ac = find_configure_ac;
+
+  # ---------------------- #
+  # Is it using Autoconf?  #
+  # ---------------------- #
+
+  my $uses_autoconf;
+  my $uses_gettext;
+  if (-f $configure_ac)
+    {
+      my $configure_ac_file = new Autom4te::XFile "< $configure_ac";
+      while ($_ = $configure_ac_file->getline)
+	{
+	  s/#.*//;
+	  s/dnl.*//;
+	  $uses_autoconf = 1 if /AC_INIT/;
+	  # See below for why we look for gettext here.
+	  $uses_gettext = 1  if /^AM_GNU_GETTEXT_VERSION/;
+	}
+    }
+  if (!$uses_autoconf)
+    {
+      verb "$configure_ac: not using Autoconf";
+      return;
+    }
+
+
+  # ------------------- #
+  # Running autopoint.  #
+  # ------------------- #
+
+  # Gettext is a bit of a problem: its macros are not necessarily
+  # visible to aclocal, so if we start with a completely striped down
+  # package (think of a fresh CVS checkout), running `aclocal' first
+  # will fail: the Gettext macros are missing.
+  #
+  # Therefore, we can't use the traces to decide if we use Gettext or
+  # not.  I guess that once Gettext move to 2.5x we will be able to,
+  # but in the meanwhile forget it.
+  #
+  # We can only grep for AM_GNU_GETTEXT_VERSION in configure.ac.  You
+  # might think this approach is naive, and indeed it is, as it
+  # prevents one to embed AM_GNU_GETTEXT_VERSION in another *.m4, but
+  # anyway we don't limit the generality, since... that's what
+  # autopoint does.  Actually, it is even more restrictive, as it
+  # greps for `^AM_GNU_GETTEXT_VERSION('.  We did this above, while
+  # scanning configure.ac.
+  if (!$uses_gettext)
+    {
+      verb "$configure_ac: not using Gettext";
+    }
+  elsif (!$install)
+    {
+      verb "$configure_ac: not running autopoint: --install not given";
+    }
+  else
+    {
+      xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
+    }
+
+
+  # ----------------- #
+  # Running aclocal.  #
+  # ----------------- #
+
+  # Run it first: it might discover new macros to add, e.g.,
+  # AC_PROG_LIBTOOL, which we will trace later to see if Libtool is
+  # used.
+  #
+  # Always run it.  Tracking its sources for up-to-dateness is too
+  # complex and too error prone.  The best we can do is avoiding
+  # nuking the time stamp.
+  my $uses_aclocal = 1;
+
+  # Nevertheless, if aclocal.m4 exists and is not made by aclocal,
+  # don't run aclocal.
+
+  if (-f 'aclocal.m4')
+    {
+      my $aclocal_m4 = new Autom4te::XFile 'aclocal.m4';
+      $_ = $aclocal_m4->getline;
+      $uses_aclocal = 0
+	unless defined ($_) && /generated.*by aclocal/;
+    }
+
+  # If there are flags for aclocal in Makefile.am, use them.
+  my $aclocal_flags = '';
+  if ($uses_aclocal && -f 'Makefile.am')
+    {
+      my $makefile = new Autom4te::XFile 'Makefile.am';
+      while ($_ = $makefile->getline)
+	{
+	  if (/^ACLOCAL_[A-Z_]*FLAGS\s*=\s*(.*)/)
+	    {
+	      $aclocal_flags = $1;
+	      last;
+	    }
+	}
+    }
+
+  if (!$uses_aclocal)
+    {
+      verb "$configure_ac: not using aclocal";
+    }
+  else
+    {
+      # Some file systems have sub-second time stamps, and if so we may
+      # run into trouble later, after we rerun autoconf and set the
+      # time stamps of input files to be no greater than aclocal.m4,
+      # because the time-stamp-setting operation (utime) has a
+      # resolution of only 1 second.  Work around the problem by
+      # ensuring that there is at least a one-second window before the
+      # time stamp of aclocal.m4t in which no file time stamps can
+      # fall.
+      sleep 1;
+
+      run_aclocal ($aclocal, $aclocal_flags);
+    }
+
+  # We might have to rerun aclocal if Libtool (or others) imports new
+  # macros.
+  my $rerun_aclocal = 0;
+
+
+
+  # ------------------------------- #
+  # See what tools will be needed.  #
+  # ------------------------------- #
+
+  # Perform a single trace reading to avoid --force forcing a rerun
+  # between two --trace, that's useless.  If there is no AC_INIT, then
+  # we are not interested: it looks like a Cygnus thingy.
+  my $aux_dir;
+  my $uses_gettext_via_traces;
+  my $uses_libtool;
+  my $uses_libltdl;
+  my $uses_autoheader;
+  my $uses_automake;
+  my @subdir;
+  verb "$configure_ac: tracing";
+  my $traces = new Autom4te::XFile
+    ("$autoconf"
+     . join (' ',
+	     map { ' --trace=' . $_ . ':\$n::\${::}%' }
+	     # If you change this list, update the
+	     # `Autoreconf-preselections' section of autom4te.in.
+	     'AC_CONFIG_AUX_DIR',
+	     'AC_CONFIG_HEADERS',
+	     'AC_CONFIG_SUBDIRS',
+	     'AC_INIT',
+	     'AC_PROG_LIBTOOL',
+	     'LT_INIT',
+	     'LT_CONFIG_LTDL_DIR',
+	     'AM_GNU_GETTEXT',
+	     'AM_INIT_AUTOMAKE',
+	    )
+     . ' |');
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($macro, @args) = split (/::/);
+      $aux_dir = $args[0]           if $macro eq "AC_CONFIG_AUX_DIR";
+      $uses_autoconf = 1            if $macro eq "AC_INIT";
+      $uses_gettext_via_traces = 1  if $macro eq "AM_GNU_GETTEXT";
+      $uses_libtool = 1             if $macro eq "AC_PROG_LIBTOOL"
+                                       || $macro eq "LT_INIT";
+      $uses_libltdl = 1             if $macro eq "LT_CONFIG_LTDL_DIR";
+      $uses_autoheader = 1          if $macro eq "AC_CONFIG_HEADERS";
+      $uses_automake = 1            if $macro eq "AM_INIT_AUTOMAKE";
+      push @subdir, split (' ', $args[0])
+                                    if $macro eq "AC_CONFIG_SUBDIRS" && $recursive;
+    }
+
+  # The subdirs are *optional*, they may not exist.
+  foreach (@subdir)
+    {
+      if (-d)
+	{
+	  verb "$configure_ac: adding subdirectory $_ to autoreconf";
+	  autoreconf ($_);
+	}
+      else
+	{
+	  verb "$configure_ac: subdirectory $_ not present";
+	}
+    }
+
+  # Gettext consistency checks...
+  error "$configure_ac: AM_GNU_GETTEXT is used, but not AM_GNU_GETTEXT_VERSION"
+    if $uses_gettext_via_traces && ! $uses_gettext;
+  error "$configure_ac: AM_GNU_GETTEXT_VERSION is used, but not AM_GNU_GETTEXT"
+    if $uses_gettext && ! $uses_gettext_via_traces;
+
+
+  # ---------------------------- #
+  # Setting up the source tree.  #
+  # ---------------------------- #
+
+  # libtoolize, automake --add-missing etc. will drop files in the
+  # $AUX_DIR.  But these tools fail to install these files if the
+  # directory itself does not exist, which valid: just imagine a CVS
+  # repository with hand written code only (there is not even a need
+  # for a Makefile.am!).
+
+  if (defined $aux_dir && ! -d $aux_dir)
+    {
+      verb "$configure_ac: creating directory $aux_dir";
+      mkdir $aux_dir, 0755
+	or error "cannot create $aux_dir: $!";
+    }
+
+
+  # -------------------- #
+  # Running libtoolize.  #
+  # -------------------- #
+
+  if (!$uses_libtool)
+    {
+      verb "$configure_ac: not using Libtool";
+    }
+  elsif ($install)
+    {
+      if ($uses_libltdl)
+	{
+	  $libtoolize .= " --ltdl";
+	}
+      xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
+      $rerun_aclocal = 1;
+    }
+  else
+    {
+      verb "$configure_ac: not running libtoolize: --install not given";
+    }
+
+
+
+  # ------------------- #
+  # Rerunning aclocal.  #
+  # ------------------- #
+
+  # If we re-installed Libtool or Gettext, the macros might have changed.
+  # Automake also needs an up-to-date aclocal.m4.
+  if ($rerun_aclocal)
+    {
+      if (!$uses_aclocal)
+	{
+	  verb "$configure_ac: not using aclocal";
+	}
+      else
+	{
+	  run_aclocal ($aclocal, $aclocal_flags);
+	}
+    }
+
+
+  # ------------------ #
+  # Running autoconf.  #
+  # ------------------ #
+
+  # Don't try to be smarter than `autoconf', which does its own up to
+  # date checks.
+  #
+  # We prefer running autoconf before autoheader, because (i) the
+  # latter runs the former, and (ii) autoconf is stricter than
+  # autoheader.  So all in all, autoconf should give better error
+  # messages.
+  xsystem ($autoconf);
+
+
+  # -------------------- #
+  # Running autoheader.  #
+  # -------------------- #
+
+  # We now consider that if AC_CONFIG_HEADERS is used, then autoheader
+  # is used too.
+  #
+  # Just as for autoconf, up to date ness is performed by the tool
+  # itself.
+  #
+  # Run it before automake, since the latter checks the presence of
+  # config.h.in when it sees an AC_CONFIG_HEADERS.
+  if (!$uses_autoheader)
+    {
+      verb "$configure_ac: not using Autoheader";
+    }
+  else
+    {
+      xsystem ($autoheader);
+    }
+
+
+  # ------------------ #
+  # Running automake.  #
+  # ------------------ #
+
+  if (!$uses_automake)
+    {
+      verb "$configure_ac: not using Automake";
+    }
+  else
+    {
+      # We should always run automake, and let it decide whether it shall
+      # update the file or not.  In fact, the effect of `$force' is already
+      # included in `$automake' via `--no-force'.
+      xsystem ($automake);
+    }
+
+
+  # -------------- #
+  # Running make.  #
+  # -------------- #
+
+  if ($run_make)
+    {
+      if (!-f "config.status")
+	{
+	  verb "no config.status: cannot re-make";
+	}
+      else
+	{
+	  xsystem ("./config.status --recheck");
+	  xsystem ("./config.status");
+	  if (!-f "Makefile")
+	    {
+	      verb "no Makefile: cannot re-make";
+	    }
+	  else
+	    {
+	      xsystem ("$make");
+	    }
+	}
+    }
+}
+
+
+# &autoreconf ($DIRECTORY)
+# ------------------------
+# Reconf the $DIRECTORY.
+sub autoreconf ($)
+{
+  my ($directory) = @_;
+  my $cwd = cwd;
+
+  # The format for this message is not free: taken from Emacs, itself
+  # using GNU Make's format.
+  verb "Entering directory `$directory'";
+  chdir $directory
+    or error "cannot chdir to $directory: $!";
+
+  autoreconf_current_directory;
+
+  # The format is not free: taken from Emacs, itself using GNU Make's
+  # format.
+  verb "Leaving directory `$directory'";
+  chdir $cwd
+    or error "cannot chdir to $cwd: $!";
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+# When debugging, it is convenient that all the related temporary
+# files be at the same place.
+mktmpdir ('ar');
+$ENV{'TMPDIR'} = $tmp;
+parse_args;
+
+# Autoreconf all the given configure.ac.  Unless `--no-recursive' is passed,
+# AC_CONFIG_SUBDIRS will be traversed in &autoreconf_current_directory.
+$ENV{'AUTOM4TE'} = $autom4te;
+for my $directory (@ARGV)
+  {
+    require_configure_ac ($directory);
+    autoreconf ($directory);
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoscan b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoscan
new file mode 100755
index 0000000..008cda3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoscan
@@ -0,0 +1,679 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoscan.in; do not edit by hand.
+
+# autoscan - Create configure.scan (a preliminary configure.ac) for a package.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Configure_ac;
+use Autom4te::General;
+use Autom4te::FileUtils;
+use Autom4te::XFile;
+use File::Basename;
+use File::Find;
+use strict;
+
+use vars qw(@cfiles @makefiles @shfiles @subdirs %printed);
+
+# The kind of the words we are looking for.
+my @kinds = qw (function header identifier program
+		makevar librarie);
+
+# For each kind, the default macro.
+my %generic_macro =
+  (
+   'function'   => 'AC_CHECK_FUNCS',
+   'header'     => 'AC_CHECK_HEADERS',
+   'identifier' => 'AC_CHECK_TYPES',
+   'program'    => 'AC_CHECK_PROGS',
+   'library'    => 'AC_CHECK_LIB'
+  );
+
+my %kind_comment =
+  (
+   'function'   => 'Checks for library functions.',
+   'header'     => 'Checks for header files.',
+   'identifier' => 'Checks for typedefs, structures, and compiler characteristics.',
+   'program'    => 'Checks for programs.',
+  );
+
+# $USED{KIND}{ITEM} is the list of locations where the ITEM (of KIND) was used
+# in the user package.
+# For instance $USED{function}{alloca} is the list of `file:line' where
+# `alloca (...)' appears.
+my %used = ();
+
+# $MACRO{KIND}{ITEM} is the list of macros to use to test ITEM.
+# Initialized from lib/autoscan/*.  E.g., $MACRO{function}{alloca} contains
+# the singleton AC_FUNC_ALLOCA.  Some require several checks.
+my %macro = ();
+
+# $NEEDED_MACROS{MACRO} is an array of locations requiring MACRO.
+# E.g., $NEEDED_MACROS{AC_FUNC_ALLOC} the list of `file:line' containing
+# `alloca (...)'.
+my %needed_macros =
+  (
+   'AC_PREREQ' => [$me],
+  );
+
+my $configure_scan = 'configure.scan';
+my $log;
+
+# Autoconf and lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-linux-gnu/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+my @prepend_include;
+my @include = ('//share/autoconf');
+
+# $help
+# -----
+$help = "Usage: $0 [OPTION]... [SRCDIR]
+
+Examine source files in the directory tree rooted at SRCDIR, or the
+current directory if none is given.  Search the source files for
+common portability problems, check for incompleteness of
+`configure.ac', and create a file `$configure_scan' which is a
+preliminary `configure.ac' for that package.
+
+  -h, --help          print this help, then exit
+  -V, --version       print version number, then exit
+  -v, --verbose       verbosely report processing
+  -d, --debug         don't remove temporary files
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $version
+# --------
+$version = "autoscan (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+
+
+
+## ------------------------ ##
+## Command line interface.  ##
+## ------------------------ ##
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ('I|include=s' => \@include,
+	  'B|prepend-include=s' => \@prepend_include);
+
+  die "$me: too many arguments
+Try `$me --help' for more information.\n"
+    if @ARGV > 1;
+
+  my $srcdir = $ARGV[0] || ".";
+
+  verb "srcdir = $srcdir";
+  chdir $srcdir || error "cannot cd to $srcdir: $!";
+}
+
+
+# init_tables ()
+# --------------
+# Put values in the tables of what to do with each token.
+sub init_tables ()
+{
+  # The data file format supports only one line of macros per function.
+  # If more than that is required for a common portability problem,
+  # a new Autoconf macro should probably be written for that case,
+  # instead of duplicating the code in lots of configure.ac files.
+  my $file = find_file ("autoscan/autoscan.list",
+			reverse (@prepend_include), @include);
+  my $table = new Autom4te::XFile "< " . open_quote ($file);
+  my $tables_are_consistent = 1;
+
+  while ($_ = $table->getline)
+    {
+      # Ignore blank lines and comments.
+      next
+	if /^\s*$/ || /^\s*\#/;
+
+      # '<kind>: <word> <macro invocation>' or...
+      # '<kind>: <word> warn: <message>'.
+      if (/^(\S+):\s+(\S+)\s+(\S.*)$/)
+	{
+	  my ($kind, $word, $macro) = ($1, $2, $3);
+	  error "$file:$.: invalid kind: $_"
+	    unless grep { $_ eq $kind } @kinds;
+	  push @{$macro{$kind}{$word}}, $macro;
+	}
+      else
+	{
+	  error "$file:$.: invalid definition: $_";
+	}
+    }
+
+  if ($debug)
+    {
+      foreach my $kind (@kinds)
+	{
+	  foreach my $word (sort keys %{$macro{$kind}})
+	    {
+	      print "$kind: $word: @{$macro{$kind}{$word}}\n";
+	    }
+	}
+
+    }
+}
+
+
+# used ($KIND, $WORD, [$WHERE])
+# -----------------------------
+# $WORD is used as a $KIND.
+sub used ($$;$)
+{
+  my ($kind, $word, $where) = @_;
+  $where ||= "$File::Find::name:$.";
+  if (
+      # Check for all the libraries.  But `-links' is certainly a
+      # `find' argument, and `-le', a `test' argument.
+      ($kind eq 'library' && $word !~ /^(e|inks)$/)
+      # Other than libraries are to be checked only if listed in
+      # the Autoscan library files.
+      || defined $macro{$kind}{$word}
+     )
+    {
+      push (@{$used{$kind}{$word}}, $where);
+    }
+}
+
+
+
+## ----------------------- ##
+## Scanning source files.  ##
+## ----------------------- ##
+
+
+# scan_c_file ($FILE-NAME)
+# ------------------------
+sub scan_c_file ($)
+{
+  my ($file_name) = @_;
+  push @cfiles, $File::Find::name;
+
+  # Nonzero if in a multiline comment.
+  my $in_comment = 0;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      if ($in_comment && s,^.*?\*/,,)
+	{
+	  $in_comment = 0;
+	}
+      # The whole line is inside a commment.
+      next if $in_comment;
+      # All on one line.
+      s,/\*.*?\*/,,g;
+
+      # Starting on this line.
+      if (s,/\*.*$,,)
+	{
+	  $in_comment = 1;
+	}
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*//)
+	{
+	  if (/^include\s*<([^>]*)>/)
+	    {
+	      used ('header', $1);
+	    }
+	  if (s/^(if|ifdef|ifndef|elif)\s+//)
+	    {
+	      foreach my $word (split (/\W+/))
+		{
+		  used ('identifier', $word)
+		    unless $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+		}
+	    }
+	  # Ignore other preprocessor directives.
+	  next;
+	}
+
+      # Remove string and character constants.
+      s,\"[^\"]*\",,g;
+      s,\'[^\']*\',,g;
+
+      # Tokens in the code.
+      # Maybe we should ignore function definitions (in column 0)?
+      while (s/\b([a-zA-Z_]\w*)\s*\(/ /)
+	{
+	  used ('function', $1);
+	}
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('identifier', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_makefile($MAKEFILE-NAME)
+# -----------------------------
+sub scan_makefile ($)
+{
+  my ($file_name) = @_;
+  push @makefiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      s/#.*//;
+
+      # Variable assignments.
+      while (s/\b([a-zA-Z_]\w*)\s*=/ /)
+	{
+	  used ('makevar', $1);
+	}
+      # Be sure to catch a whole word.  For instance `lex$U.$(OBJEXT)'
+      # is a single token.  Otherwise we might believe `lex' is needed.
+      foreach my $word (split (/\s+/))
+	{
+	  # Libraries.
+	  if ($word =~ /^-l([a-zA-Z_]\w*)$/)
+	    {
+	      used ('library', $1);
+	    }
+	  # Tokens in the code.
+	  # We allow some additional characters, e.g., `+', since
+	  # autoscan/programs includes `c++'.
+	  if ($word =~ /^[a-zA-Z_][\w+]*$/)
+	    {
+	      used ('program', $word);
+	    }
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_sh_file($SHELL-SCRIPT-NAME)
+# --------------------------------
+sub scan_sh_file ($)
+{
+  my ($file_name) = @_;
+  push @shfiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments and variable references.
+      s/#.*//;
+      s/\${[^\}]*}//g;
+      s/@[^@]*@//g;
+
+      # Tokens in the code.
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('program', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_file ()
+# ------------
+# Called by &find on each file.  $_ contains the current file name with
+# the current directory of the walk through.
+sub scan_file ()
+{
+  # Wanted only if there is no corresponding FILE.in.
+  return
+    if -f "$_.in";
+
+  # Save $_ as Find::File requires it to be preserved.
+  local $_ = $_;
+
+  # Strip a useless leading `./'.
+  $File::Find::name =~ s,^\./,,;
+
+  if ($_ ne '.' and -d $_ and
+      -f "$_/configure.in"  ||
+      -f "$_/configure.ac"  ||
+      -f "$_/configure.gnu" ||
+      -f "$_/configure")
+    {
+      $File::Find::prune = 1;
+      push @subdirs, $File::Find::name;
+    }
+  if (/\.[chlym](\.in)?$/)
+    {
+      used 'program', 'cc', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif (/\.(cc|cpp|cxx|CC|C|hh|hpp|hxx|HH|H|yy|ypp|ll|lpp)(\.in)?$/)
+    {
+      used 'program', 'c++', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif ((/^((?:GNUm|M|m)akefile)(\.in)?$/ && ! -f "$1.am")
+	 || /^(?:GNUm|M|m)akefile(\.am)?$/)
+    {
+      scan_makefile ($_);
+    }
+  elsif (/\.sh(\.in)?$/)
+    {
+      scan_sh_file ($_);
+    }
+}
+
+
+# scan_files ()
+# -------------
+# Read through the files and collect lists of tokens in them
+# that might create nonportabilities.
+sub scan_files ()
+{
+  find (\&scan_file, '.');
+
+  if ($verbose)
+    {
+      print "cfiles: @cfiles\n";
+      print "makefiles: @makefiles\n";
+      print "shfiles: @shfiles\n";
+
+      foreach my $kind (@kinds)
+	{
+	  print "\n$kind:\n";
+	  foreach my $word (sort keys %{$used{$kind}})
+	    {
+	      print "$word: @{$used{$kind}{$word}}\n";
+	    }
+	}
+    }
+}
+
+
+## ----------------------- ##
+## Output configure.scan.  ##
+## ----------------------- ##
+
+
+# output_kind ($FILE, $KIND)
+# --------------------------
+sub output_kind ($$)
+{
+  my ($file, $kind) = @_;
+  # Lists of words to be checked with the generic macro.
+  my @have;
+
+  print $file "\n# $kind_comment{$kind}\n"
+    if exists $kind_comment{$kind};
+  foreach my $word (sort keys %{$used{$kind}})
+    {
+      # Output the needed macro invocations in $configure_scan if not
+      # already printed, and remember these macros are needed.
+      foreach my $macro (@{$macro{$kind}{$word}})
+	{
+	  if ($macro =~ /^warn:\s+(.*)/)
+	    {
+	      my $message = $1;
+	      foreach my $location (@{$used{$kind}{$word}})
+		{
+		  warn "$location: warning: $message\n";
+		}
+	    }
+	  elsif (exists $generic_macro{$kind}
+	      && $macro eq $generic_macro{$kind})
+	    {
+	      push (@have, $word);
+	      push (@{$needed_macros{"$generic_macro{$kind}([$word])"}},
+		    @{$used{$kind}{$word}});
+	    }
+	  else
+	    {
+	      if (! $printed{$macro})
+		{
+		  print $file "$macro\n";
+		  $printed{$macro} = 1;
+		}
+	      push (@{$needed_macros{$macro}},
+		    @{$used{$kind}{$word}});
+	    }
+	}
+    }
+  print $file "$generic_macro{$kind}([" . join(' ', sort(@have)) . "])\n"
+    if @have;
+}
+
+
+# output_libraries ($FILE)
+# ------------------------
+sub output_libraries ($)
+{
+  my ($file) = @_;
+
+  print $file "\n# Checks for libraries.\n";
+  foreach my $word (sort keys %{$used{'library'}})
+    {
+      print $file "# FIXME: Replace `main' with a function in `-l$word':\n";
+      print $file "AC_CHECK_LIB([$word], [main])\n";
+    }
+}
+
+
+# output ($CONFIGURE_SCAN)
+# ------------------------
+# Print a proto configure.ac.
+sub output ($)
+{
+  my $configure_scan = shift;
+  my %unique_makefiles;
+
+  my $file = new Autom4te::XFile "> " . open_quote ($configure_scan);
+
+  print $file
+    ("#                                               -*- Autoconf -*-\n" .
+     "# Process this file with autoconf to produce a configure script.\n" .
+     "\n" .
+     "AC_PREREQ([2.68])\n" .
+     "AC_INIT([FULL-PACKAGE-NAME], [VERSION], [BUG-REPORT-ADDRESS])\n");
+  if (defined $cfiles[0])
+    {
+      print $file "AC_CONFIG_SRCDIR([$cfiles[0]])\n";
+      print $file "AC_CONFIG_HEADERS([config.h])\n";
+    }
+
+  output_kind ($file, 'program');
+  output_kind ($file, 'makevar');
+  output_libraries ($file);
+  output_kind ($file, 'header');
+  output_kind ($file, 'identifier');
+  output_kind ($file, 'function');
+
+  print $file "\n";
+  if (@makefiles)
+    {
+      # Change DIR/Makefile.in to DIR/Makefile.
+      foreach my $m (@makefiles)
+	{
+	  $m =~ s/\.(?:in|am)$//;
+	  $unique_makefiles{$m}++;
+	}
+      print $file ("AC_CONFIG_FILES([",
+		   join ("\n                 ",
+			 sort keys %unique_makefiles), "])\n");
+    }
+  if (@subdirs)
+    {
+      print $file ("AC_CONFIG_SUBDIRS([",
+		   join ("\n                   ",
+			 sort @subdirs), "])\n");
+    }
+  print $file "AC_OUTPUT\n";
+
+  $file->close;
+}
+
+
+
+## --------------------------------------- ##
+## Checking the accuracy of configure.ac.  ##
+## --------------------------------------- ##
+
+
+# &check_configure_ac ($CONFIGURE_AC)
+# -----------------------------------
+# Use autoconf to check if all the suggested macros are included
+# in CONFIGURE_AC.
+sub check_configure_ac ($)
+{
+  my ($configure_ac) = @_;
+
+  # Find what needed macros are invoked in CONFIGURE_AC.
+  # I'd be very happy if someone could explain to me why sort (uniq ...)
+  # doesn't work properly: I need `uniq (sort ...)'.  --akim
+  my $trace_option =
+    join (' --trace=', '',
+	  uniq (sort (map { s/\(.*//; $_ } keys %needed_macros)));
+
+  verb "running: $autoconf $trace_option $configure_ac";
+  my $traces =
+    new Autom4te::XFile "$autoconf $trace_option $configure_ac |";
+
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($file, $line, $macro, @args) = split (/:/, $_);
+      if ($macro =~ /^AC_CHECK_(HEADER|FUNC|TYPE|MEMBER)S$/)
+	{
+	  # To be rigorous, we should distinguish between space and comma
+	  # separated macros.  But there is no point.
+	  foreach my $word (split (/\s|,/, $args[0]))
+	    {
+	      # AC_CHECK_MEMBERS wants `struct' or `union'.
+	      if ($macro eq "AC_CHECK_MEMBERS"
+		  && $word =~ /^stat.st_/)
+		{
+		  $word = "struct " . $word;
+		}
+	      delete $needed_macros{"$macro([$word])"};
+	    }
+	}
+      else
+	{
+	  delete $needed_macros{$macro};
+	}
+    }
+
+  $traces->close;
+
+  # Report the missing macros.
+  foreach my $macro (sort keys %needed_macros)
+    {
+      warn ("$configure_ac: warning: missing $macro wanted by: "
+	    . (${$needed_macros{$macro}}[0])
+	    . "\n");
+      print $log "$me: warning: missing $macro wanted by: \n";
+      foreach my $need (@{$needed_macros{$macro}})
+	{
+	  print $log "\t$need\n";
+	}
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$log = new Autom4te::XFile "> " . open_quote ("$me.log");
+
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+my $configure_ac = find_configure_ac;
+init_tables;
+scan_files;
+output ('configure.scan');
+if (-f $configure_ac)
+  {
+    check_configure_ac ($configure_ac);
+  }
+# This close is really needed.  For some reason, probably best named
+# a bug, it seems that the dtor of $LOG is not called automatically
+# at END.  It results in a truncated file.
+$log->close;
+exit 0;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoupdate b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoupdate
new file mode 100755
index 0000000..9cdd962
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/autoupdate
@@ -0,0 +1,1064 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoupdate.in; do not edit by hand.
+
+# autoupdate - modernize an Autoconf file.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David MacKenzie <djm@gnu.ai.mit.edu>.
+# Rewritten by Akim Demaille <akim@freefriends.org>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/i686-pc-linux-gnu/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+# We need to find m4sugar.
+my @prepend_include;
+my @include = ('//share/autoconf');
+my $force = 0;
+# m4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]...
+
+Update each TEMPLATE-FILE if given, or `configure.ac' if present,
+or else `configure.in', to the syntax of the current version of
+Autoconf.  The original files are backed up.
+
+Operation modes:
+  -h, --help                 print this help, then exit
+  -V, --version              print version number, then exit
+  -v, --verbose              verbosely report processing
+  -d, --debug                don't remove temporary files
+  -f, --force                consider all files obsolete
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoupdate (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'f|force'             => \$force);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+
+# ----------------- #
+# Autoconf macros.  #
+# ----------------- #
+
+my (%ac_macros, %au_macros, %m4_builtins);
+
+# HANDLE_AUTOCONF_MACROS ()
+# -------------------------
+# @M4_BUILTINS -- M4 builtins and a useful comment.
+sub handle_autoconf_macros ()
+{
+  # Get the builtins.
+  xsystem ("echo dumpdef | $m4 2>" . shell_quote ("$tmp/m4.defs") . " >/dev/null");
+  my $m4_defs = new Autom4te::XFile "< " . open_quote ("$tmp/m4.defs");
+  while ($_ = $m4_defs->getline)
+    {
+      $m4_builtins{$1} = 1
+	if /^(\w+):/;
+    }
+  $m4_defs->close;
+
+  my $macros = new Autom4te::XFile ("$autoconf"
+				    . " --trace AU_DEFINE:'AU:\$f:\$1'"
+				    . " --trace define:'AC:\$f:\$1'"
+				    . " --melt /dev/null |");
+  while ($_ = $macros->getline)
+    {
+      chomp;
+      my ($domain, $file, $macro) = /^(AC|AU):(.*):([^:]*)$/ or next;
+      if ($domain eq "AU")
+	{
+	  $au_macros{$macro} = 1;
+	}
+      elsif ($file =~ /(^|\/)m4sugar\/(m4sugar|version)\.m4$/)
+	{
+	  # Add the m4sugar macros to m4_builtins.
+	  $m4_builtins{$macro} = 1;
+	}
+      else
+	{
+	  # Autoconf, aclocal, and m4sh macros.
+	  $ac_macros{$macro} = 1;
+	}
+    }
+  $macros->close;
+
+
+  # Don't keep AU macros in @AC_MACROS.
+  delete $ac_macros{$_}
+    foreach (keys %au_macros);
+  # Don't keep M4sugar macros which are redefined by Autoconf,
+  # such as `builtin', `changequote' etc.  See autoconf/autoconf.m4.
+  delete $ac_macros{$_}
+    foreach (keys %m4_builtins);
+  error "no current Autoconf macros found"
+    unless keys %ac_macros;
+  error "no obsolete Autoconf macros found"
+    unless keys %au_macros;
+
+  if ($debug)
+    {
+      print STDERR "Current Autoconf macros:\n";
+      print STDERR join (' ', sort keys %ac_macros) . "\n\n";
+      print STDERR "Obsolete Autoconf macros:\n";
+      print STDERR join (' ', sort keys %au_macros) . "\n\n";
+    }
+
+  # ac.m4 -- autoquoting definitions of the AC macros (M4sugar excluded).
+  # unac.m4 -- undefine the AC macros.
+  my $ac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/ac.m4");
+  print $ac_m4 "# ac.m4 -- autoquoting definitions of the AC macros.\n";
+  my $unac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unac.m4");
+  print $unac_m4 "# unac.m4 -- undefine the AC macros.\n";
+  foreach (sort keys %ac_macros)
+    {
+      print $ac_m4   "_au_m4_define([$_], [m4_if(\$#, 0, [[\$0]], [[\$0(\$\@)]])])\n";
+      print $unac_m4 "_au_m4_undefine([$_])\n";
+    }
+
+  # m4save.m4 -- save the m4 builtins.
+  # unm4.m4 -- disable the m4 builtins.
+  # m4.m4 -- enable the m4 builtins.
+  my $m4save_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4save.m4");
+  print $m4save_m4 "# m4save.m4 -- save the m4 builtins.\n";
+  my $unm4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unm4.m4");
+  print $unm4_m4 "# unm4.m4 -- disable the m4 builtins.\n";
+  my $m4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4.m4");
+  print $m4_m4 "# m4.m4 -- enable the m4 builtins.\n";
+  foreach (sort keys %m4_builtins)
+    {
+      print $m4save_m4 "_au__save([$_])\n";
+      print $unm4_m4   "_au__undefine([$_])\n";
+      print $m4_m4     "_au__restore([$_])\n";
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --force" if $force;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+mktmpdir ('au');
+handle_autoconf_macros;
+
+# $au_changequote -- enable the quote `[', `]' right before any AU macro.
+my $au_changequote =
+  's/\b(' . join ('|', keys %au_macros) . ')\b/_au_m4_changequote([,])$1/g';
+
+# au.m4 -- definitions the AU macros.
+xsystem ("$autoconf --trace AU_DEFINE:'_au_defun(\@<:\@\$1\@:>\@,
+\@<:\@\$2\@:>\@)' --melt /dev/null "
+	. ">" . shell_quote ("$tmp/au.m4"));
+
+
+
+## ------------------- ##
+## Process the files.  ##
+## ------------------- ##
+
+foreach my $file (@ARGV)
+  {
+    # We need an actual file.
+    if ($file eq '-')
+      {
+	$file = "$tmp/stdin";
+	system "cat >" . shell_quote ($file);
+      }
+    elsif (! -r "$file")
+      {
+	die "$me: $file: No such file or directory";
+      }
+
+    # input.m4 -- m4 program to produce the updated file.
+    # Load the values, the dispatcher, neutralize m4, and the prepared
+    # input file.
+    my $input_m4 = <<\EOF;
+      divert(-1)                                            -*- Autoconf -*-
+      changequote([,])
+
+      # Define our special macros:
+      define([_au__defn], defn([defn]))
+      define([_au__divert], defn([divert]))
+      define([_au__ifdef], defn([ifdef]))
+      define([_au__include], defn([include]))
+      define([_au___undefine], defn([undefine]))
+      define([_au__undefine], [_au__ifdef([$1], [_au___undefine([$1])])])
+      define([_au__save], [m4_ifdef([$1],
+	[m4_define([_au_$1], _m4_defn([$1]))])])
+      define([_au__restore],
+	[_au_m4_ifdef([_au_$1],
+	  [_au_m4_define([$1], _au__defn([_au_$1]))])])
+
+      # Set up m4sugar.
+      include(m4sugar/m4sugar.m4)
+
+      # Redefine __file__ to make warnings nicer; $file is replaced below.
+      m4_define([__file__], [$file])
+
+      # Redefine m4_location to fix the line number.
+      m4_define([m4_location], [__file__:m4_eval(__line__ - _au__first_line)])
+
+      # Move all the builtins into the `_au_' pseudo namespace
+      m4_include([m4save.m4])
+
+      # _au_defun(NAME, BODY)
+      # ---------------------
+      # Define NAME to BODY, plus AU activation/deactivation.
+      _au_m4_define([_au_defun],
+      [_au_m4_define([$1],
+      [_au_enable()dnl
+      $2[]dnl
+      _au_disable()])])
+
+      # Import the definition of the obsolete macros.
+      _au__include([au.m4])
+
+
+      ## ------------------------ ##
+      ## _au_enable/_au_disable.  ##
+      ## ------------------------ ##
+
+      # They work by pair: each time an AU macro is activated, it runs
+      # _au_enable, and at its end its runs _au_disable (see _au_defun
+      # above).  AU macros might use AU macros, which should
+      # enable/disable only for the outer AU macros.
+      #
+      # `_au_enabled' is used to this end, determining whether we really
+      # enable/disable.
+
+
+      # __au_enable
+      # -----------
+      # Reenable the builtins, m4sugar, and the autoquoting AC macros.
+      _au_m4_define([__au_enable],
+      [_au__divert(-1)
+      # Enable special characters.
+      _au_m4_changecom([#])
+
+      _au__include([m4.m4])
+      _au__include([ac.m4])
+
+      _au__divert(0)])
+
+      # _au_enable
+      # ----------
+      # Called at the beginning of all the obsolete macros.  If this is the
+      # outermost level, call __au_enable.
+      _au_m4_define([_au_enable],
+      [_au_m4_ifdef([_au_enabled],
+		 [],
+		 [__au_enable()])_au_dnl
+      _au_m4_pushdef([_au_enabled])])
+
+
+      # __au_disable
+      # ------------
+      # Disable the AC autoquoting macros, m4sugar, and m4.
+      _au_m4_define([__au_disable],
+      [_au__divert(-1)
+      _au__include([unac.m4])
+      _au__include([unm4.m4])
+
+      # Disable special characters.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au__divert(0)])
+
+      # _au_disable
+      # -----------
+      # Called at the end of all the obsolete macros.  If we are at the
+      # outermost level, call __au_disable.
+      _au_m4_define([_au_disable],
+      [_au_m4_popdef([_au_enabled])_au_dnl
+      _au_m4_ifdef([_au_enabled],
+		[],
+		[__au_disable()])])
+
+
+      ## ------------------------------- ##
+      ## Disable, and process the file.  ##
+      ## ------------------------------- ##
+      # The AC autoquoting macros are not loaded yet, hence invoking
+      # `_au_disable' would be wrong.
+      _au__include([unm4.m4])
+
+      # Disable special characters, and set the first line number.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au_m4_define(_au__first_line, _au___line__)_au__divert(0)_au_dnl
+EOF
+
+    $input_m4 =~ s/^      //mg;
+    $input_m4 =~ s/\$file/$file/g;
+
+    # prepared input -- input, but reenables the quote before each AU macro.
+    open INPUT_M4, "> " . open_quote ("$tmp/input.m4")
+       or error "cannot open: $!";
+    open FILE, "< " . open_quote ($file)
+       or error "cannot open: $!";
+    print INPUT_M4 "$input_m4";
+    while (<FILE>)
+       {
+	 eval $au_changequote;
+	 print INPUT_M4;
+       }
+    close FILE
+       or error "cannot close $file: $!";
+    close INPUT_M4
+       or error "cannot close $tmp/input.m4: $!";
+
+    # Now ask m4 to perform the update.
+    xsystem ("$m4 --include=" . shell_quote ($tmp)
+	     . join (' --include=', '', map { shell_quote ($_) } reverse (@prepend_include))
+	     . join (' --include=', '', map { shell_quote ($_) } @include)
+	     . " " . shell_quote ("$tmp/input.m4") . " > " . shell_quote ("$tmp/updated"));
+    update_file ("$tmp/updated",
+		 "$file" eq "$tmp/stdin" ? '-' : "$file");
+  }
+exit 0;
+
+
+#		  ## ---------------------------- ##
+#		  ## How `autoupdate' functions.  ##
+#		  ## ---------------------------- ##
+#
+# The task of `autoupdate' is not trivial: the biggest difficulty being
+# that you must limit the changes to the parts that really need to be
+# updated.  Finding a satisfying implementation proved to be quite hard,
+# as this is the fifth implementation of `autoupdate'.
+#
+# Below, we will use a simple example of an obsolete macro:
+#
+#     AU_DEFUN([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))])
+#     AC_DEFUN([NEW], [echo "sum($1) = $2"])
+#
+# the input file contains
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Of course the expected output is
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0], [0])
+#
+#
+# # First implementation: sed
+# # =========================
+#
+# The first implementation was only able to change the name of obsolete
+# macros.
+#
+# The file `acoldnames.m4' defined the old names based on the new names.
+# It was simple then to produce a sed script such as:
+#
+#     s/OLD/NEW/g
+#
+# Updating merely consisted in running this script on the file to
+# update.
+#
+# This scheme suffers from an obvious limitation: that `autoupdate' was
+# unable to cope with new macros that just swap some of its arguments
+# compared to the old macro.  Fortunately, that was enough to upgrade
+# from Autoconf 1 to Autoconf 2.  (But I have no idea whether the
+# changes in Autoconf 2 were precisely limited by this constraint.)
+#
+#
+# # Second implementation: hooks
+# # ============================
+#
+# The version 2.15 of Autoconf brought a vast number of changes compared
+# to 2.13, so a solution was needed.  One could think of extending the
+# `sed' scripts with specialized code for complex macros.  However, this
+# approach is of course full of flaws:
+#
+# a. the Autoconf maintainers have to write these snippets, which we
+#    just don't want to,
+#
+# b. I really don't think you'll ever manage to handle the quoting of
+#    m4 with a sed script.
+#
+# To satisfy a., let's remark that the code which implements the old
+# features in term of the new feature is exactly the code which should
+# replace the old code.
+#
+# To answer point b, as usual in the history of Autoconf, the answer, at
+# least on the paper, is simple: m4 is the best tool to parse m4, so
+# let's use m4.
+#
+# Therefore the specification is:
+#
+#     I want to be able to tell Autoconf, well, m4, that the macro I
+#     am currently defining is an obsolete macro (so that the user is
+#     warned), and its code is the code to use when running autoconf,
+#     but that the very same code has to be used when running
+#     autoupdate.  To summarize, the interface I want is
+#     `AU_DEFUN(OLD-NAME, NEW-CODE)'.
+#
+#
+# Now for the technical details.
+#
+# When running autoconf, except for the warning, AU_DEFUN is basically
+# AC_DEFUN.
+#
+# When running autoupdate, we want *only* OLD-NAMEs to be expanded.
+# This obviously means that acgeneral.m4 and acspecific.m4 must not be
+# loaded.  Nonetheless, because we want to use a rich set of m4
+# features, m4sugar.m4 is needed.  Please note that the fact that
+# Autoconf's macros are not loaded is positive on two points:
+#
+# - we do get an updated `configure.ac', not a `configure'!
+#
+# - the old macros are replaced by *calls* to the new-macros, not the
+#   body of the new macros, since their body is not defined!!!
+#   (Whoa, that's really beautiful!).
+#
+# Additionally we need to disable the quotes when reading the input for
+# two reasons: first because otherwise `m4' will swallow the quotes of
+# other macros:
+#
+#     NEW([1, 2], 3)
+#     => NEW(1, 2, 3)
+#
+# and second, because we want to update the macro calls which are
+# quoted, i.e., we want
+#
+#     FOO([OLD(1, 2)])
+#     => FOO([NEW([1, 2], [3])])
+#
+# If we don't disable the quotes, only the macros called at the top
+# level would be updated.
+#
+# So, let's disable the quotes.
+#
+# Well, not quite: m4sugar.m4 still needs to use quotes for some macros.
+# Well, in this case, when running in autoupdate code, each macro first
+# reestablishes the quotes, expands itself, and disables the quotes.
+#
+# Thinking a bit more, you realize that in fact, people may use `define',
+# `ifelse' etc. in their files, and you certainly don't want to process
+# them.  Another example is `dnl': you don't want to remove the
+# comments.  You then realize you don't want exactly to import m4sugar:
+# you want to specify when it is enabled (macros active), and disabled.
+# m4sugar provides m4_disable/m4_enable to this end.
+#
+# You're getting close to it.  Now remains one task: how to handle
+# twofold definitions?
+#
+# Remember that the same AU_DEFUN must be understood in two different
+# ways, the AC way, and the AU way.
+#
+# One first solution is to check whether acgeneral.m4 was loaded.  But
+# that's definitely not cute.  Another is simply to install `hooks',
+# that is to say, to keep in some place m4 knows, late `define' to be
+# triggered *only* in AU mode.
+#
+# You first think of designing AU_DEFUN like this:
+#
+# 1. AC_DEFUN(OLD-NAME,
+#	      [Warn the user OLD-NAME is obsolete.
+#	       NEW-CODE])
+#
+# 2. Store for late AU binding([define(OLD_NAME,
+#				[Reestablish the quotes.
+#				 NEW-CODE
+#				 Disable the quotes.])])
+#
+# but this will not work: NEW-CODE probably uses $1, $2 etc. and these
+# guys will be replaced with the argument of `Store for late AU binding'
+# when you call it.
+#
+# I don't think there is a means to avoid this using this technology
+# (remember that $1 etc. are *always* expanded in m4).  You may also try
+# to replace them with $[1] to preserve them for a later evaluation, but
+# if `Store for late AU binding' is properly written, it will remain
+# quoted till the end...
+#
+# You have to change technology.  Since the problem is that `$1'
+# etc. should be `consumed' right away, one solution is to define now a
+# second macro, `AU_OLD-NAME', and to install a hook than binds OLD-NAME
+# to AU_OLD-NAME.  Then, autoupdate.m4 just need to run the hooks.  By
+# the way, the same method was used in autoheader.
+#
+#
+# # Third implementation: m4 namespaces by m4sugar
+# # ==============================================
+#
+# Actually, this implementation was just a clean up of the previous
+# implementation: instead of defining hooks by hand, m4sugar was equipped
+# with `namespaces'.  What are they?
+#
+# Sometimes we want to disable some *set* of macros, and restore them
+# later.  We provide support for this via namespaces.
+#
+# There are basically three characters playing this scene: defining a
+# macro in a namespace, disabling a namespace, and restoring a namespace
+# (i.e., all the definitions it holds).
+#
+# Technically, to define a MACRO in NAMESPACE means to define the macro
+# named `NAMESPACE::MACRO' to the VALUE.  At the same time, we append
+# `undefine(NAME)' in the macro named `m4_disable(NAMESPACE)', and
+# similarly a binding of NAME to the value of `NAMESPACE::MACRO' in
+# `m4_enable(NAMESPACE)'.  These mechanisms allow to bind the macro of
+# NAMESPACE and to unbind them at will.
+#
+# Of course this implementation is really inefficient: m4 has to grow
+# strings which can become quickly huge, which slows it significantly.
+#
+# In particular one should avoid as much as possible to use `define' for
+# temporaries.  Now that `define' has quite a complex meaning, it is an
+# expensive operations that should be limited to macros.  Use
+# `m4_define' for temporaries.
+#
+# Private copies of the macros we used in entering / exiting the m4sugar
+# namespace.  It is much more convenient than fighting with the renamed
+# version of define etc.
+#
+#
+#
+# Those two implementations suffered from serious problems:
+#
+# - namespaces were really expensive, and incurred a major performance
+#   loss on `autoconf' itself, not only `autoupdate'.  One solution
+#   would have been the limit the use of namespaces to `autoupdate', but
+#   that's again some complications on m4sugar, which really doesn't need
+#   this.  So we wanted to get rid of the namespaces.
+#
+# - since the quotes were disabled, autoupdate was sometimes making
+#   wrong guesses, for instance on:
+#
+#     foo([1, 2])
+#
+#   m4 saw 2 arguments: `[1'and `2]'.  A simple solution, somewhat
+#   fragile, is to reestablish the quotes right before all the obsolete
+#   macros, i.e., to use sed so that the previous text becomes
+#
+#     changequote([, ])foo([1, 2])
+#
+#   To this end, one wants to trace the definition of obsolete macros.
+#
+# It was there that the limitations of the namespace approach became
+# painful: because it was a complex machinery playing a lot with the
+# builtins of m4 (hence, quite fragile), tracing was almost impossible.
+#
+#
+# So this approach was dropped.
+#
+#
+# # The fourth implementation: two steps
+# # ====================================
+#
+# If you drop the uses of namespaces, you no longer can compute the
+# updated value, and replace the old call with it simultaneously.
+#
+# Obviously you will use m4 to compute the updated values, but you may
+# use some other tool to achieve the replacement.  Personally, I trust
+# nobody but m4 to parse m4, so below, m4 will perform the two tasks.
+#
+# How can m4 be used to replace *some* macros calls with newer values.
+# Well, that's dead simple: m4 should learn the definitions of obsolete
+# macros, forget its builtins, disable the quotes, and then run on the
+# input file, which amounts to doing this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# which will result in
+#
+#     dnl The Unbelievable Truth
+#     NEW(1, 2, m4_eval(1 + 2))
+#     NEW([0, 0],
+#	  0)
+#
+# Grpmh.  Two problems.  A minor problem: it would have been much better
+# to have the `m4_eval' computed, and a major problem: you lost the
+# quotation in the result.
+#
+# Let's address the big problem first.  One solution is to define any
+# modern macro to rewrite its calls with the proper quotation, thanks to
+# `$@'.  Again, tracing the `define's makes it possible to know which
+# are these macros, so you input is:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     define([NEW], [[NEW($@)]changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     changequote([, ])NEW([0, 0],
+#	  0)
+#
+# which results in
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2],[m4_eval(1 + 2)])
+#     NEW([0, 0],[0])
+#
+# Our problem is solved, i.e., the first call to `NEW' is properly
+# quoted, but introduced another problem: we changed the layout of the
+# second calls, which can be a drama in the case of huge macro calls
+# (think of `AC_TRY_RUN' for instance).  This example didn't show it,
+# but we also introduced parens to macros which did not have some:
+#
+#     AC_INIT
+#     => AC_INIT()
+#
+# No big deal for the semantics (unless the macro depends upon $#, which
+# is bad), but the users would not be happy.
+#
+# Additionally, we introduced quotes that were not there before, which is
+# OK in most cases, but could change the semantics of the file.
+#
+# Cruel dilemma: we do want the auto-quoting definition of `NEW' when
+# evaluating `OLD', but we don't when we evaluate the second `NEW'.
+# Back to namespaces?
+#
+# No.
+#
+#
+# # Second step: replacement
+# # ------------------------
+#
+# No, as announced above, we will work in two steps: in a first step we
+# compute the updated values, and in a second step we replace them.  Our
+# goal is something like this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([1, 2], [3])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., the new value of `OLD' is precomputed using the auto-quoting
+# definition of `NEW' and the m4 builtins.  We'll see how afterwards,
+# let's finish with the replacement.
+#
+# Of course the solution above is wrong: if there were other calls to
+# `OLD' with different values, we would smash them to the same value.
+# But it is quite easy to generalize the scheme above:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+#     define([OLD], [defn([OLD($@)])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., for each call to obsolete macros, we build an array `call =>
+# value', and use a macro to dispatch these values.  This results in:
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0],
+#	  0)
+#
+# In French, we say `Youpi !', which you might roughly translate as
+# `Yippee!'.
+#
+#
+# # First step: computation
+# # -----------------------
+#
+# Let's study the anatomy of the file, and name its sections:
+#
+# prologue
+#     divert(-1)dnl
+#     changequote([, ])
+# values
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+# dispatcher
+#     define([OLD], [defn([OLD($@)])changequote()])
+# disabler
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+# input
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+#
+# # Computing the `values' section
+# # ..............................
+#
+# First we need to get the list of all the AU macro uses.  To this end,
+# first get the list of all the AU macros names by tracing `AU_DEFUN' in
+# the initialization of autoconf.  This list is computed in the file
+# `au.txt' below.
+#
+# Then use this list to trace all the AU macro uses in the input.  The
+# goal is obtain in the case of our example:
+#
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# This is the file `values.in' below.
+#
+# We want to evaluate this with only the builtins (in fact m4sugar), the
+# auto-quoting definitions of the new macros (`new.m4'), and the
+# definition of the old macros (`old.m4').  Computing these last two
+# files is easy: it's just a matter of using the right `--trace' option.
+#
+# So the content of `values.in' is:
+#
+#     include($autoconf_dir/m4sugar.m4)
+#     m4_include(new.m4)
+#     m4_include(old.m4)
+#     divert(0)dnl
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# We run m4 on it, which yields:
+#
+#     define([OLD([1],[2])],@<<@NEW([1, 2], [3])@>>@)
+#
+# Transform `@<<@' and `@>>@' into quotes and we get
+#
+#     define([OLD([1],[2])],[NEW([1, 2], [3])])
+#
+# This is `values.m4'.
+#
+#
+# # Computing the `dispatcher' section
+# # ..................................
+#
+# The `prologue', and the `disabler' are simple and need no commenting.
+#
+# To compute the `dispatcher' (`dispatch.m4'), again, it is a simple
+# matter of using the right `--trace'.
+#
+# Finally, the input is not exactly the input file, rather it is the
+# input file with the added `changequote'.  To this end, we build
+# `quote.sed'.
+#
+#
+# # Putting it all together
+# # .......................
+#
+# We build the file `input.m4' which contains:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     include(values.m4)
+#     include(dispatch.m4)
+#     undefine([dnl])
+#     undefine([eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# And we just run m4 on it.  Et voila`, Monsieur !  Mais oui, mais oui.
+#
+# Well, there are a few additional technicalities.  For instance, we
+# rely on `changequote', `ifelse' and `defn', but we don't want to
+# interpret the changequotes of the user, so we simply use another name:
+# `_au_changequote' etc.
+#
+#
+# # Failure of the fourth approach
+# # ------------------------------
+#
+# This approach is heavily based on traces, but then there is an obvious
+# problem: non expanded code will never be seen.  In particular, the body
+# of a `define' definition is not seen, so on the input
+#
+#	  define([idem], [OLD(0, [$1])])
+#
+# autoupdate would never see the `OLD', and wouldn't have updated it.
+# Worse yet, if `idem(0)' was used later, then autoupdate sees that
+# `OLD' is used, computes the result for `OLD(0, 0)' and sets up a
+# dispatcher for `OLD'.  Since there was no computed value for `OLD(0,
+# [$1])', the dispatcher would have replaced with... nothing, leading
+# to
+#
+#	  define([idem], [])
+#
+# With some more thinking, you see that the two step approach is wrong,
+# the namespace approach was much saner.
+#
+# But you learned a lot, in particular you realized that using traces
+# can make it possible to simulate namespaces!
+#
+#
+#
+# # The fifth implementation: m4 namespaces by files
+# # ================================================
+#
+# The fourth implementation demonstrated something unsurprising: you
+# cannot precompute, i.e., the namespace approach was the right one.
+# Still, we no longer want them, they're too expensive.  Let's have a
+# look at the way it worked.
+#
+# When updating
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# you evaluate `input.m4':
+#
+#     divert(-1)
+#     changequote([, ])
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#     ...
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# where `m4_disable' undefines the m4 and m4sugar, and disables the quotes
+# and comments:
+#
+#     define([m4_disable],
+#     [undefine([__file__])
+#     ...
+#     changecom(#)
+#     changequote()])
+#
+# `m4_enable' does the converse: reestablish quotes and comments
+# --easy--, reestablish m4sugar --easy: just load `m4sugar.m4' again-- and
+# reenable the builtins.  This later task requires that you first save
+# the builtins.  And BTW, the definition above of `m4_disable' cannot
+# work: you undefined `changequote' before using it!  So you need to use
+# your privates copies of the builtins.  Let's introduce three files for
+# this:
+#
+#  `m4save.m4'
+#    moves the m4 builtins into the `_au_' pseudo namespace,
+#  `unm4.m4'
+#    undefines the builtins,
+#  `m4.m4'
+#    restores them.
+#
+# So `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#
+#     # Import AU.
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)])
+#
+#     define([_au_disable],
+#     [# Disable m4sugar.
+#     # Disable the m4 builtins.
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Based on what we learned in the fourth implementation we know that we
+# have to enable the quotes *before* any AU macro, and we know we need
+# to build autoquoting versions of the AC macros.  But the autoquoting
+# AC definitions must be disabled in the rest of the file, and enabled
+# inside AU macros.
+#
+# Using `autoconf --trace' it is easy to build the files
+#
+#   `ac.m4'
+#     define the autoquoting AC fake macros
+#   `disable.m4'
+#     undefine the m4sugar and AC autoquoting macros.
+#   `au.m4'
+#     definitions of the AU macros (such as `OLD' above).
+#
+# Now, `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#     # Import AU.
+#     include([au.m4])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)
+#     _au_include(ac.m4)])
+#
+#     define([_au_disable],
+#     [_au_include([disable.m4])
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     _au_changequote([, ])OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Finally, version V is ready.
+#
+# Well... almost.
+#
+# There is a slight problem that remains: if an AU macro OUTER includes
+# an AU macro INNER, then _au_enable will be run when entering OUTER
+# and when entering INNER (not good, but not too bad yet).  But when
+# getting out of INNER, _au_disable will disable everything while we
+# were still in OUTER.  Badaboom.
+#
+# Therefore _au_enable and _au_disable have to be written to work by
+# pairs: each _au_enable pushdef's _au_enabled, and each _au_disable
+# popdef's _au_enabled.  And of course _au_enable and _au_disable are
+# effective when _au_enabled is *not* defined.
+#
+# Finally, version V' is ready.  And there is much rejoicing.  (And I
+# have free time again.  I think.  Yeah, right.)
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/ifnames b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/ifnames
new file mode 100755
index 0000000..69bffb2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/ifnames
@@ -0,0 +1,153 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from ifnames.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# ifnames - print the identifiers used in C preprocessor conditionals
+
+# Copyright (C) 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Reads from stdin if no files are given.
+# Writes to stdout.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>
+# and Paul Eggert <eggert@twinsun.com>.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::General;
+use Autom4te::XFile;
+use Autom4te::FileUtils;
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILE]...
+
+Scan all of the C source FILES (or the standard input, if none are
+given) and write to the standard output a sorted list of all the
+identifiers that appear in those files in `#if', `#elif', `#ifdef', or
+`#ifndef' directives.  Print each identifier on a line, followed by a
+space-separated list of the files in which that identifier occurs.
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "ifnames (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Paul Eggert.
+";
+
+
+# &parse_args ()
+# --------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ();
+}
+
+
+# %OCCURRENCE
+# -----------
+my %occurrence;
+
+
+# &scan_file ($FILE-NAME)
+# -----------------------
+sub scan_file ($)
+{
+  my ($file_name) = @_;
+  my $file = new Autom4te::XFile ("< " . open_quote ($file_name));
+  while ($_ = $file->getline)
+    {
+      # Continuation lines.
+      $_ .= $file->getline
+	while (s/\\$//);
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*(if|ifdef|ifndef|elif)\s+//)
+	{
+	  # Remove comments.  Not perfect, but close enough.
+	  s(/\*.*?\*/)();
+	  s(/\*.*)();
+	  s(//.*)();
+	  foreach my $word (split (/\W+/))
+	    {
+	      next
+		if $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+	      $occurrence{$word}{$file_name} = 1;
+	    }
+	}
+    }
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+parse_args();
+foreach (@ARGV)
+  {
+    scan_file ($_);
+  }
+foreach (sort keys %occurrence)
+  {
+    print "$_ ", join (' ', sort keys %{$occurrence{$_}}), "\n";
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtool b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtool
new file mode 100755
index 0000000..d1473ff
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtool
@@ -0,0 +1,10988 @@
+#! /bin/sh
+
+# libtool - Provide generalized library-building support services.
+# Generated automatically by config.status (libtool) 2.4.2
+# Libtool was configured on host ubuntu-pradip:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags="CXX F77 FC GO GCJ RC "
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=2.4.2
+macro_revision=1.3337
+
+# Assembler program.
+AS="as"
+
+# DLL creation program.
+DLLTOOL="false"
+
+# Object dumper program.
+OBJDUMP="objdump"
+
+# Whether or not to build shared libraries.
+build_libtool_libs=yes
+
+# Whether or not to build static libraries.
+build_old_libs=yes
+
+# What type of objects to build.
+pic_mode=default
+
+# Whether or not to optimize for fast installation.
+fast_install=yes
+
+# Shell to use when invoking shell scripts.
+SHELL="/bin/sh"
+
+# An echo program that protects backslashes.
+ECHO="printf %s\\n"
+
+# The PATH separator for the build system.
+PATH_SEPARATOR=":"
+
+# The host system.
+host_alias=
+host=i686-pc-linux-gnu
+host_os=linux-gnu
+
+# The build system.
+build_alias=
+build=i686-pc-linux-gnu
+build_os=linux-gnu
+
+# A sed program that does not truncate output.
+SED="/bin/sed"
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP="/bin/grep"
+
+# An ERE matcher.
+EGREP="/bin/grep -E"
+
+# A literal string matcher.
+FGREP="/bin/grep -F"
+
+# A BSD- or MS-compatible name lister.
+NM="/usr/bin/nm -B"
+
+# Whether we need soft or hard links.
+LN_S="ln -s"
+
+# What is the maximum length of a command?
+max_cmd_len=1572864
+
+# Object file suffix (normally "o").
+objext=o
+
+# Executable file suffix (normally "").
+exeext=
+
+# whether the shell understands "unset".
+lt_unset=unset
+
+# turn spaces into newlines.
+SP2NL="tr \\040 \\012"
+
+# turn newlines into spaces.
+NL2SP="tr \\015\\012 \\040\\040"
+
+# convert $build file names to $host format.
+to_host_file_cmd=func_convert_file_noop
+
+# convert $build files to toolchain format.
+to_tool_file_cmd=func_convert_file_noop
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method="pass_all"
+
+# Command to use when deplibs_check_method = "file_magic".
+file_magic_cmd="\$MAGIC_CMD"
+
+# How to find potential files when deplibs_check_method = "file_magic".
+file_magic_glob=""
+
+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
+want_nocaseglob="no"
+
+# Command to associate shared and link libraries.
+sharedlib_from_linklib_cmd="printf %s\\n"
+
+# The archiver.
+AR="ar"
+
+# Flags to create an archive.
+AR_FLAGS="cru"
+
+# How to feed a file listing to the archiver.
+archiver_list_spec="@"
+
+# A symbol stripping program.
+STRIP="strip"
+
+# Commands used to install an old-style archive.
+RANLIB="ranlib"
+old_postinstall_cmds="chmod 644 \$oldlib~\$RANLIB \$tool_oldlib"
+old_postuninstall_cmds=""
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=no
+
+# A C compiler.
+LTCC="gcc"
+
+# LTCC compiler flags.
+LTCFLAGS="-g -O2"
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe="sed -n -e 's/^.*[	 ]\\([ABCDGIRSTW][ABCDGIRSTW]*\\)[	 ][	 ]*\\([_A-Za-z][_A-Za-z0-9]*\\)\$/\\1 \\2 \\2/p' | sed '/ __gnu_lto/d'"
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl="sed -n -e 's/^T .* \\(.*\\)\$/extern int \\1();/p' -e 's/^[ABCDGIRSTW]* .* \\(.*\\)\$/extern char \\1;/p'"
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p'"
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\(lib[^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"lib\\2\", (void *) \\&\\2},/p'"
+
+# Specify filename containing input files for $NM.
+nm_file_list_spec="@"
+
+# The root where to search for dependent libraries,and in which our libraries should be installed.
+lt_sysroot=
+
+# The name of the directory that contains temporary libtool files.
+objdir=.libs
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=file
+
+# Must we lock files when doing compilation?
+need_locks="no"
+
+# Manifest tool.
+MANIFEST_TOOL=":"
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL=""
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT=""
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO=""
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL=""
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=""
+
+# Old archive suffix (normally "a").
+libext=a
+
+# Shared library suffix (normally ".so").
+shrext_cmds=".so"
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=""
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=no
+
+# Do we need a version for libraries?
+need_version=no
+
+# Library versioning type.
+version_type=linux
+
+# Shared library runtime path variable.
+runpath_var=LD_RUN_PATH
+
+# Shared library path variable.
+shlibpath_var=LD_LIBRARY_PATH
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=no
+
+# Format of library name prefix.
+libname_spec="lib\$name"
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec="\${libname}\${release}\${shared_ext}\$versuffix \${libname}\${release}\${shared_ext}\$major \$libname\${shared_ext}"
+
+# The coded name of the library, if different from the real name.
+soname_spec="\${libname}\${release}\${shared_ext}\$major"
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=""
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=""
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=""
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds="PATH=\\\"\\\$PATH:/sbin\\\" ldconfig -n \$libdir"
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=""
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=yes
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec="/usr/lib/gcc/i686-linux-gnu/4.6 /usr/lib/i386-linux-gnu /usr/lib /lib/i386-linux-gnu /lib "
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/i386-linux-gnu/mesa-egl /usr/lib/i386-linux-gnu/mesa /lib/i386-linux-gnu /usr/lib/i386-linux-gnu /lib/i686-linux-gnu /usr/lib/i686-linux-gnu /usr/local/lib /usr/lib/vmware-tools/lib32/libvmGuestLib.so /usr/lib/vmware-tools/lib64/libvmGuestLib.so /usr/lib/vmware-tools/lib32/libvmGuestLibJava.so /usr/lib/vmware-tools/lib64/libvmGuestLibJava.so /usr/lib/vmware-tools/lib32/libDeployPkg.so /usr/lib/vmware-tools/lib64/libDeployPkg.so "
+
+# Whether dlopen is supported.
+dlopen_support=yes
+
+# Whether dlopen of programs is supported.
+dlopen_self=yes
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=no
+
+# Commands to strip libraries.
+old_striplib="strip --strip-debug"
+striplib="strip --strip-unneeded"
+
+
+# The linker used to build libraries.
+LD="/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="gcc"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fPIC -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag="-static"
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec="\${wl}--export-dynamic"
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\${wl}--whole-archive\$convenience \${wl}--no-whole-archive"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname -o \$lib"
+archive_expsym_cmds="echo \\\"{ global:\\\" > \$output_objdir/\$libname.ver~
+	    cat \$export_symbols | sed -e \\\"s/\\\\(.*\\\\)/\\\\1;/\\\" >> \$output_objdir/\$libname.ver~
+	    echo \\\"local: *; };\\\" >> \$output_objdir/\$libname.ver~
+	    \$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname \${wl}-version-script \${wl}\$output_objdir/\$libname.ver -o \$lib"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="yes"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec="\${wl}-rpath \${wl}\$libdir"
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL CONFIG
+
+
+# libtool (GNU libtool) 2.4.2
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --no-warn            don't display warning messages
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.4.2
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.2
+TIMESTAMP=""
+package_revision=1.3337
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+} # Extended-shell func_dirname implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result="${1##*/}"
+} # Extended-shell func_basename implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"
+} # Extended-shell func_dirname_and_basename implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}
+} # Extended-shell func_stripname implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
+} # Extended-shell func_split_short_opt implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}
+} # Extended-shell func_split_long_opt implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+    eval "${1}+=\${2}"
+} # Extended-shell func_append implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+    func_quote_for_eval "${2}"
+    eval "${1}+=\\ \$func_quote_for_eval_result"
+} # Extended-shell func_append_quoted implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+    func_arith_result=$(( $* ))
+} # Extended-shell func_arith implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+    func_len_result=${#1}
+} # Extended-shell func_len implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac
+} # Extended-shell func_lo2o implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+    func_xform_result=${1%.*}.lo
+} # Extended-shell func_xform implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+  shift; set dummy --mode clean ${1+"$@"}; shift
+  ;;
+compile|compil|compi|comp|com|co|c)
+  shift; set dummy --mode compile ${1+"$@"}; shift
+  ;;
+execute|execut|execu|exec|exe|ex|e)
+  shift; set dummy --mode execute ${1+"$@"}; shift
+  ;;
+finish|finis|fini|fin|fi|f)
+  shift; set dummy --mode finish ${1+"$@"}; shift
+  ;;
+install|instal|insta|inst|ins|in|i)
+  shift; set dummy --mode install ${1+"$@"}; shift
+  ;;
+link|lin|li|l)
+  shift; set dummy --mode link ${1+"$@"}; shift
+  ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+  shift; set dummy --mode uninstall ${1+"$@"}; shift
+  ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_warning=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+			;;
+      --config)
+			opt_config=:
+func_config
+			;;
+      --dlopen|-dlopen)
+			optarg="$1"
+			opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+			shift
+			;;
+      --preserve-dup-deps)
+			opt_preserve_dup_deps=:
+			;;
+      --features)
+			opt_features=:
+func_features
+			;;
+      --finish)
+			opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+			;;
+      --help)
+			opt_help=:
+			;;
+      --help-all)
+			opt_help_all=:
+opt_help=': help-all'
+			;;
+      --mode)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_mode="$optarg"
+case $optarg in
+  # Valid mode arguments:
+  clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+  # Catch anything else as an error
+  *) func_error "invalid argument for $opt"
+     exit_cmd=exit
+     break
+     ;;
+esac
+			shift
+			;;
+      --no-silent|--no-quiet)
+			opt_silent=false
+preserve_args+=" $opt"
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+preserve_args+=" $opt"
+			;;
+      --no-verbose)
+			opt_verbose=false
+preserve_args+=" $opt"
+			;;
+      --silent|--quiet)
+			opt_silent=:
+preserve_args+=" $opt"
+        opt_verbose=false
+			;;
+      --verbose|-v)
+			opt_verbose=:
+preserve_args+=" $opt"
+opt_silent=false
+			;;
+      --tag)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_tag="$optarg"
+preserve_args+=" $opt $optarg"
+func_enable_tag "$optarg"
+			shift
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-n*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # save first non-option argument
+  if test "$#" -gt 0; then
+    nonopt="$opt"
+    shift
+  fi
+
+  # preserve --debug
+  test "$opt_debug" = : || preserve_args+=" --debug"
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+      ;;
+  esac
+
+  $opt_help || {
+    # Sanity checks first:
+    func_check_version_match
+
+    if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+      func_fatal_configuration "not configured to build any kind of library"
+    fi
+
+    # Darwin sucks
+    eval std_shrext=\"$shrext_cmds\"
+
+    # Only execute mode is allowed to have -dlopen flags.
+    if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+      func_error "unrecognized option \`-dlopen'"
+      $ECHO "$help" 1>&2
+      exit $EXIT_FAILURE
+    fi
+
+    # Change the help message to a mode-specific one.
+    generic_help="$help"
+    help="Try \`$progname --help --mode=$opt_mode' for more information."
+  }
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_dirname_and_basename "$1" "" "."
+    func_stripname '' '.exe' "$func_basename_result"
+    func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot.  Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+  func_resolve_sysroot_result=$1
+  case $func_resolve_sysroot_result in
+  =*)
+    func_stripname '=' '' "$func_resolve_sysroot_result"
+    func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+    ;;
+  esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+  case "$lt_sysroot:$1" in
+  ?*:"$lt_sysroot"*)
+    func_stripname "$lt_sysroot" '' "$1"
+    func_replace_sysroot_result="=$func_stripname_result"
+    ;;
+  *)
+    # Including no sysroot.
+    func_replace_sysroot_result=$1
+    ;;
+  esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+	func_append_quoted CC_quoted "$arg"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_append_quoted CC_quoted "$arg"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+  $opt_debug
+  func_convert_core_file_wine_to_w32_result="$1"
+  if test -n "$1"; then
+    # Unfortunately, winepath does not exit with a non-zero error code, so we
+    # are forced to check the contents of stdout. On the other hand, if the
+    # command is not found, the shell will set an exit code of 127 and print
+    # *an error message* to stdout. So we must check for both error code of
+    # zero AND non-empty stdout, which explains the odd construction:
+    func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+    if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+      func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+        $SED -e "$lt_sed_naive_backslashify"`
+    else
+      func_convert_core_file_wine_to_w32_result=
+    fi
+  fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+  $opt_debug
+  # unfortunately, winepath doesn't convert paths, only file names
+  func_convert_core_path_wine_to_w32_result=""
+  if test -n "$1"; then
+    oldIFS=$IFS
+    IFS=:
+    for func_convert_core_path_wine_to_w32_f in $1; do
+      IFS=$oldIFS
+      func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+      if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+        if test -z "$func_convert_core_path_wine_to_w32_result"; then
+          func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+        else
+          func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+        fi
+      fi
+    done
+    IFS=$oldIFS
+  fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+  $opt_debug
+  if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+    func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+    if test "$?" -ne 0; then
+      # on failure, ensure result is empty
+      func_cygpath_result=
+    fi
+  else
+    func_cygpath_result=
+    func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+  fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format.  Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+  $opt_debug
+  # awkward: cmd appends spaces to result
+  func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+    $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+  $opt_debug
+  if test -z "$2" && test -n "$1" ; then
+    func_error "Could not determine host file name corresponding to"
+    func_error "  \`$1'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback:
+    func_to_host_file_result="$1"
+  fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+  $opt_debug
+  if test -z "$4" && test -n "$3"; then
+    func_error "Could not determine the host path corresponding to"
+    func_error "  \`$3'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback.  This is a deliberately simplistic "conversion" and
+    # should not be "improved".  See libtool.info.
+    if test "x$1" != "x$2"; then
+      lt_replace_pathsep_chars="s|$1|$2|g"
+      func_to_host_path_result=`echo "$3" |
+        $SED -e "$lt_replace_pathsep_chars"`
+    else
+      func_to_host_path_result="$3"
+    fi
+  fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+  $opt_debug
+  case $4 in
+  $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+    ;;
+  esac
+  case $4 in
+  $2 ) func_to_host_path_result+="$3"
+    ;;
+  esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+  $opt_debug
+  $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result.  If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+  $opt_debug
+  case ,$2, in
+    *,"$to_tool_file_cmd",*)
+      func_to_tool_file_result=$1
+      ;;
+    *)
+      $to_tool_file_cmd "$1"
+      func_to_tool_file_result=$func_to_host_file_result
+      ;;
+  esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+  func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+    # LT_CYGPATH in this case.
+    func_to_host_file_result=`cygpath -m "$1"`
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format.  Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_file_wine_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_msys_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format.  Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set.  Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+    func_convert_core_file_wine_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format.  If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+#   file name conversion function    : func_convert_file_X_to_Y ()
+#   path conversion function         : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same.  If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+  $opt_debug
+  if test -z "$to_host_path_cmd"; then
+    func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+    to_host_path_cmd="func_convert_path_${func_stripname_result}"
+  fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+  $opt_debug
+  func_init_to_host_path_cmd
+  $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+  func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from ARG.  MSYS
+    # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+    # and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format.  Requires a wine environment and
+# a working winepath.  Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format.  Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set.  Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from
+    # ARG. msys behavior is inconsistent here, cygpath turns them
+    # into '.;' and ';.', and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          pie_flag+=" $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  later+=" $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_append_quoted lastarg "$arg"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  base_compile+=" $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_append_quoted base_compile "$lastarg"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      removelist+=" $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    removelist+=" $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+    srcfile=$func_to_tool_file_result
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	command+=" -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	command+=" -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      command+="$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $opt_mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$opt_mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $opt_dlopen; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  dir+="/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_append_quoted args "$file"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libs=
+    libdirs=
+    admincmds=
+
+    for opt in "$nonopt" ${1+"$@"}
+    do
+      if test -d "$opt"; then
+	libdirs+=" $opt"
+
+      elif test -f "$opt"; then
+	if func_lalib_unsafe_p "$opt"; then
+	  libs+=" $opt"
+	else
+	  func_warning "\`$opt' is not a valid libtool archive"
+	fi
+
+      else
+	func_fatal_error "invalid argument \`$opt'"
+      fi
+    done
+
+    if test -n "$libs"; then
+      if test -n "$lt_sysroot"; then
+        sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+        sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+      else
+        sysroot_cmd=
+      fi
+
+      # Remove sysroot references
+      if $opt_dry_run; then
+        for lib in $libs; do
+          echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+        done
+      else
+        tmpdir=`func_mktempdir`
+        for lib in $libs; do
+	  sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+	    > $tmpdir/tmp-la
+	  mv -f $tmpdir/tmp-la $lib
+	done
+        ${RM}r "$tmpdir"
+      fi
+    fi
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || admincmds+="
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      echo "----------------------------------------------------------------------"
+      echo "Libraries have been installed in:"
+      for libdir in $libdirs; do
+	$ECHO "   $libdir"
+      done
+      echo
+      echo "If you ever happen to want to link against installed libraries"
+      echo "in a given directory, LIBDIR, you must either use libtool, and"
+      echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+      echo "flag during linking and do at least one of the following:"
+      if test -n "$shlibpath_var"; then
+	echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+	echo "     during execution"
+      fi
+      if test -n "$runpath_var"; then
+	echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+	echo "     during linking"
+      fi
+      if test -n "$hardcode_libdir_flag_spec"; then
+	libdir=LIBDIR
+	eval flag=\"$hardcode_libdir_flag_spec\"
+
+	$ECHO "   - use the \`$flag' linker flag"
+      fi
+      if test -n "$admincmds"; then
+	$ECHO "   - have your system administrator run these commands:$admincmds"
+      fi
+      if test -f /etc/ld.so.conf; then
+	echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+      fi
+      echo
+
+      echo "See any operating system documentation about shared libraries for"
+      case $host in
+	solaris2.[6789]|solaris2.1[0-9])
+	  echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	  echo "pages."
+	  ;;
+	*)
+	  echo "more information, such as the ld(1) and ld.so(8) manual pages."
+	  ;;
+      esac
+      echo "----------------------------------------------------------------------"
+    fi
+    exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    install_prog+="$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	files+=" $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      install_prog+=" $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      install_shared_prog+=" $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	install_shared_prog+=" -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	staticlibs+=" $file"
+	;;
+
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) current_libdirs+=" $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) future_libdirs+=" $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	dir+="$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && staticlibs+=" $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+      func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+      tool_oldlib=$func_to_tool_file_result
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+	    func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+	    $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+          case $host in
+	    *cygwin* | *mingw* | *cegcc* )
+	      # if an import library, we need to obtain dlname
+	      if func_win32_import_lib_p "$dlprefile"; then
+	        func_tr_sh "$dlprefile"
+	        eval "curr_lafile=\$libfile_$func_tr_sh_result"
+	        dlprefile_dlbasename=""
+	        if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+	          # Use subshell, to avoid clobbering current variable values
+	          dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+	          if test -n "$dlprefile_dlname" ; then
+	            func_basename "$dlprefile_dlname"
+	            dlprefile_dlbasename="$func_basename_result"
+	          else
+	            # no lafile. user explicitly requested -dlpreopen <import library>.
+	            $sharedlib_from_linklib_cmd "$dlprefile"
+	            dlprefile_dlbasename=$sharedlib_from_linklib_result
+	          fi
+	        fi
+	        $opt_dry_run || {
+	          if test -n "$dlprefile_dlbasename" ; then
+	            eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+	          else
+	            func_warning "Could not compute DLL name from $name"
+	            eval '$ECHO ": $name " >> "$nlist"'
+	          fi
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+	            $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+	        }
+	      else # not an import lib
+	        $opt_dry_run || {
+	          eval '$ECHO ": $name " >> "$nlist"'
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	        }
+	      fi
+	    ;;
+	    *)
+	      $opt_dry_run || {
+	        eval '$ECHO ": $name " >> "$nlist"'
+	        func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	        eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	      }
+	    ;;
+          esac
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) symtab_cflags+=" $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      func_to_tool_file "$1" func_convert_file_msys_to_w32
+      win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+  $opt_debug
+  sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+  $opt_debug
+  match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+  $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+    $SED '/^Contents of section '"$match_literal"':/{
+      # Place marker at beginning of archive member dllname section
+      s/.*/====MARK====/
+      p
+      d
+    }
+    # These lines can sometimes be longer than 43 characters, but
+    # are always uninteresting
+    /:[	 ]*file format pe[i]\{,1\}-/d
+    /^In archive [^:]*:/d
+    # Ensure marker is printed
+    /^====MARK====/p
+    # Remove all lines with less than 43 characters
+    /^.\{43\}/!d
+    # From remaining lines, remove first 43 characters
+    s/^.\{43\}//' |
+    $SED -n '
+      # Join marker and all lines until next marker into a single line
+      /^====MARK====/ b para
+      H
+      $ b para
+      b
+      :para
+      x
+      s/\n//g
+      # Remove the marker
+      s/^====MARK====//
+      # Remove trailing dots and whitespace
+      s/[\. \t]*$//
+      # Print
+      /./p' |
+    # we now have a list, one entry per line, of the stringified
+    # contents of the appropriate section of all members of the
+    # archive which possess that section. Heuristic: eliminate
+    # all those which have a first or second character that is
+    # a '.' (that is, objdump's representation of an unprintable
+    # character.) This should work for all archives with less than
+    # 0x302f exports -- but will fail for DLLs whose name actually
+    # begins with a literal '.' or a single character followed by
+    # a '.'.
+    #
+    # Of those that remain, print the first one.
+    $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+  test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+  test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+  $opt_debug
+  if func_cygming_gnu_implib_p "$1" ; then
+    # binutils import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+  elif func_cygming_ms_implib_p "$1" ; then
+    # ms-generated import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+  else
+    # unknown
+    sharedlib_from_linklib_result=""
+  fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  case \" \$* \" in
+  *\\ --lt-*)
+    for lt_wr_arg
+    do
+      case \$lt_wr_arg in
+      --lt-*) ;;
+      *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+      esac
+      shift
+    done ;;
+  esac
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# fixup the dll searchpath if we need to.
+	#
+	# Fix the DLL searchpath if we need to.  Do this before prepending
+	# to shlibpath, because on Windows, both are PATH and uninstalled
+	# libraries must come first.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_path "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_path "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  /* Update the DLL searchpath.  EXE_PATH_VALUE ($dllsearchpath) must
+     be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+     because on Windows, both *_VARNAMEs are PATH but uninstalled
+     libraries must come first. */
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+	      $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/  fputs ("\1", f);/p
+g
+D'
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      libtool_args+=" $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  compile_command+=" @OUTPUT@"
+	  finalize_command+=" @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    compile_command+=" @SYMFILE@"
+	    finalize_command+=" @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      dlfiles+=" $arg"
+	    else
+	      dlprefiles+=" $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) deplibs+=" $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      moreargs+=" $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      dlfiles+=" $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    dlprefiles+=" $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  libobjs+=" $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  non_pic_objects+=" $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  non_pic_objects+=" $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  libobjs+=" $pic_object"
+		  non_pic_objects+=" $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) rpath+=" $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) xrpath+=" $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  weak_libs+=" $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $wl$qarg"
+	  prev=
+	  compile_command+=" $wl$qarg"
+	  finalize_command+=" $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  compile_command+=" $link_static_flag"
+	  finalize_command+=" $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  compile_command+=" $arg"
+	  finalize_command+=" $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname "-L" '' "$arg"
+	if test -z "$func_stripname_result"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	func_resolve_sysroot "$func_stripname_result"
+	dir=$func_resolve_sysroot_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "* | *" $arg "*)
+	  # Will only happen for absolute or sysroot arguments
+	  ;;
+	*)
+	  # Preserve sysroot, but never include relative directories
+	  case $dir in
+	    [\\/]* | [A-Za-z]:[\\/]* | =*) deplibs+=" $arg" ;;
+	    *) deplibs+=" -L$dir" ;;
+	  esac
+	  lib_search_path+=" $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) dllsearchpath+=":$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    deplibs+=" System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	deplibs+=" $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot|--sysroot)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+      |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) new_inherited_linker_flags+=" $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	=*)
+	  func_stripname '=' '' "$dir"
+	  dir=$lt_sysroot$func_stripname_result
+	  ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) xrpath+=" $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $func_quote_for_eval_result"
+	  compiler_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $wl$func_quote_for_eval_result"
+	  compiler_flags+=" $wl$func_quote_for_eval_result"
+	  linker_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      # --sysroot=*          for sysroot support
+      # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+      -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        compile_command+=" $arg"
+        finalize_command+=" $arg"
+        compiler_flags+=" $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	objs+=" $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		dlfiles+=" $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      dlprefiles+=" $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    libobjs+=" $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    non_pic_objects+=" $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    libobjs+=" $pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	deplibs+=" $arg"
+	old_deplibs+=" $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	func_resolve_sysroot "$arg"
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  dlfiles+=" $func_resolve_sysroot_result"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  dlprefiles+=" $func_resolve_sysroot_result"
+	  prev=
+	else
+	  deplibs+=" $func_resolve_sysroot_result"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      compile_command+=" $arg"
+      finalize_command+=" $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    func_to_tool_file "$output_objdir/"
+    tool_output_objdir=$func_to_tool_file_result
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_preserve_dup_deps ; then
+	case "$libs " in
+	*" $deplib "*) specialdeplibs+=" $deplib" ;;
+	esac
+      fi
+      libs+=" $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) specialdeplibs+=" $pre_post_deps" ;;
+	  esac
+	  pre_post_deps+=" $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  func_resolve_sysroot "$lib"
+	  case $lib in
+	  *.la)	func_source "$func_resolve_sysroot_result" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) deplibs+=" $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+        |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    compiler_flags+=" $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    dir=$func_resolve_sysroot_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) xrpath+=" $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la)
+	  func_resolve_sysroot "$deplib"
+	  lib=$func_resolve_sysroot_result
+	  ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      newdlprefiles+=" $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      newdlfiles+=" $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) new_inherited_linker_flags+=" $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && dlfiles+=" $dlopen"
+	  test -n "$dlpreopen" && dlprefiles+=" $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    convenience+=" $ladir/$objdir/$old_library"
+	    old_convenience+=" $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	if test -n "$old_library" &&
+	   { test "$prefer_static_libs" = yes ||
+	     test "$prefer_static_libs,$installed" = "built,no"; }; then
+	  linklib=$old_library
+	else
+	  for l in $old_library $library_names; do
+	    linklib="$l"
+	  done
+	fi
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    dlprefiles+=" $lib $dependency_libs"
+	  else
+	    newdlfiles+=" $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$lt_sysroot$libdir"
+	    absdir="$lt_sysroot$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  case "$host" in
+	    # special handling for platforms with PE-DLLs.
+	    *cygwin* | *mingw* | *cegcc* )
+	      # Linker will automatically link against shared library if both
+	      # static and shared are present.  Therefore, ensure we extract
+	      # symbols from the import library if a shared library is present
+	      # (otherwise, the dlopen module name will be incorrect).  We do
+	      # this by putting the import library name into $newdlprefiles.
+	      # We recover the dlopen module name by 'saving' the la file
+	      # name in a special purpose variable, and (later) extracting the
+	      # dlname from the la file.
+	      if test -n "$dlname"; then
+	        func_tr_sh "$dir/$linklib"
+	        eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+	        newdlprefiles+=" $dir/$linklib"
+	      else
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      fi
+	    ;;
+	    * )
+	      # Prefer using a static library (so that no silly _DYNAMIC symbols
+	      # are required to link).
+	      if test -n "$old_library"; then
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      # Otherwise, use the dlname, so that lt_dlopen finds it.
+	      elif test -n "$dlname"; then
+	        newdlprefiles+=" $dir/$dlname"
+	      else
+	        newdlprefiles+=" $dir/$linklib"
+	      fi
+	    ;;
+	  esac
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  newlib_search_path+=" $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         func_resolve_sysroot "$func_stripname_result"
+	         newlib_search_path+=" $func_resolve_sysroot_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) temp_rpath+="$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      notinst_deplibs+=" $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      notinst_deplibs+=" $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$opt_mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$absdir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      add_dir+=" -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) compile_shlibpath+="$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) finalize_shlibpath+="$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$opt_mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) finalize_shlibpath+="$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    add_dir+=" -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) xrpath+=" $temp_xrpath";;
+		   esac;;
+	      *) temp_deplibs+=" $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  newlib_search_path+=" $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    case $deplib in
+              -L*) func_stripname '-L' '' "$deplib"
+                   func_resolve_sysroot "$func_stripname_result";;
+              *) func_resolve_sysroot "$deplib" ;;
+            esac
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $func_resolve_sysroot_result "*)
+                specialdeplibs+=" $func_resolve_sysroot_result" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $func_resolve_sysroot_result"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_resolve_sysroot "$deplib"
+	        deplib=$func_resolve_sysroot_result
+	        func_dirname "$deplib" "" "."
+		dir=$func_dirname_result
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      compiler_flags+=" ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      linker_flags+=" -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) lib_search_path+=" $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) tmp_libs+=" $deplib" ;;
+	      esac
+	      ;;
+	    *) tmp_libs+=" $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  tmp_libs+=" $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      objs+="$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  libobjs+=" $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  # correct linux to gnu/linux during the next big refactor
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux) # correct to gnu/linux during the next big refactor
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  verstring+=":${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      libobjs+=" $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$opt_mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       removelist+=" $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	oldlibs+=" $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  func_replace_sysroot "$libdir"
+	  temp_xrpath+=" -R$func_replace_sysroot_result"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) dlfiles+=" $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) dlprefiles+=" $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    deplibs+=" System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      deplibs+=" -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    newdeplibs+=" $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    newdeplibs+=" $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      newdeplibs+=" $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      newdeplibs+=" $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		if test -n "$file_magic_glob"; then
+		  libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+		else
+		  libnameglob=$libname
+		fi
+		test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  if test "$want_nocaseglob" = yes; then
+		    shopt -s nocaseglob
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		    $nocaseglob
+		  else
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		  fi
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			newdeplibs+=" $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      newdeplibs+=" $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	# Remove ${wl} instances when linking with ld.
+	# FIXME: should test the right _cmds variable.
+	case $archive_cmds in
+	  *\$LD\ *) wl= ;;
+        esac
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		func_replace_sysroot "$libdir"
+		libdir=$func_replace_sysroot_result
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		dep_rpath+=" $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) perm_rpath+=" $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      rpath+="$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  linknames+=" $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  delfiles+=" $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd1 in $cmds; do
+	      IFS="$save_ifs"
+	      # Take the normal branch if the nm_file_list_spec branch
+	      # doesn't work or if tool conversion is not needed.
+	      case $nm_file_list_spec~$to_tool_file_cmd in
+		*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+		  try_normal_branch=yes
+		  eval cmd=\"$cmd1\"
+		  func_len " $cmd"
+		  len=$func_len_result
+		  ;;
+		*)
+		  try_normal_branch=no
+		  ;;
+	      esac
+	      if test "$try_normal_branch" = yes \
+		 && { test "$len" -lt "$max_cmd_len" \
+		      || test "$max_cmd_len" -le -1; }
+	      then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      elif test -n "$nm_file_list_spec"; then
+		func_basename "$output"
+		output_la=$func_basename_result
+		save_libobjs=$libobjs
+		save_output=$output
+		output=${output_objdir}/${output_la}.nm
+		func_to_tool_file "$output"
+		libobjs=$nm_file_list_spec$func_to_tool_file_result
+		delfiles+=" $output"
+		func_verbose "creating $NM input file list: $output"
+		for obj in $save_libobjs; do
+		  func_to_tool_file "$obj"
+		  $ECHO "$func_to_tool_file_result"
+		done > "$output"
+		eval cmd=\"$cmd1\"
+		func_show_eval "$cmd" 'exit $?'
+		output=$save_output
+		libobjs=$save_libobjs
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    tmp_deplibs+=" $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    generated+=" $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    libobjs+=" $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  linker_flags+=" $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    echo ')' >> $output
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$func_to_tool_file_result
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  objlist+=" $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      delfiles+=" $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$opt_mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  libobjs+=" $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$opt_mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # If we're not building shared, we need to use non_pic_objs
+      test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      compile_command+=" ${wl}-bind_at_load"
+	      finalize_command+=" ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      compile_command+=" $compile_deplibs"
+      finalize_command+=" $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) dllsearchpath+=":$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      if test -n "$postlink_cmds"; then
+	func_to_tool_file "$output_objdir/$outputname"
+	postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	func_execute_cmds "$postlink_cmds" 'exit $?'
+      fi
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    oldobjs+=" $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	generated+=" $gentop"
+
+	func_extract_archives $gentop $addlibs
+	oldobjs+=" $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  oldobjs+=" $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      oldobjs+=" $gentop/$newobj"
+	      ;;
+	    *) oldobjs+=" $obj" ;;
+	    esac
+	  done
+	fi
+	func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+	tool_oldlib=$func_to_tool_file_result
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	elif test -n "$archiver_list_spec"; then
+	  func_verbose "using command file archive linking..."
+	  for obj in $oldobjs
+	  do
+	    func_to_tool_file "$obj"
+	    $ECHO "$func_to_tool_file_result"
+	  done > $output_objdir/$libname.libcmd
+	  func_to_tool_file "$output_objdir/$libname.libcmd"
+	  oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    objlist+=" $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		func_resolve_sysroot "$deplib"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		newdependency_libs+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      -L*)
+		func_stripname -L '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -L$func_replace_sysroot_result"
+		;;
+	      -R*)
+		func_stripname -R '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -R$func_replace_sysroot_result"
+		;;
+	      *) newdependency_libs+=" $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlfiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      *) newdlfiles+=" $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlprefiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlfiles+=" $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlprefiles+=" $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) RM+=" $arg"; rmforce=yes ;;
+      -*) RM+=" $arg" ;;
+      *) files+=" $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	odir="$objdir"
+      else
+	odir="$dir/$objdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$opt_mode" = uninstall && odir="$dir"
+
+      # Remember odir for removal later, being careful to avoid duplicates
+      if test "$opt_mode" = clean; then
+	case " $rmdirs " in
+	  *" $odir "*) ;;
+	  *) rmdirs+=" $odir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    rmfiles+=" $odir/$n"
+	  done
+	  test -n "$old_library" && rmfiles+=" $odir/$old_library"
+
+	  case "$opt_mode" in
+	  clean)
+	    case " $library_names " in
+	    *" $dlname "*) ;;
+	    *) test -n "$dlname" && rmfiles+=" $odir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && rmfiles+=" $odir/$name $odir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    rmfiles+=" $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    rmfiles+=" $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$opt_mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    rmfiles+=" $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      rmfiles+=" $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    rmfiles+=" $odir/$name $odir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      rmfiles+=" $odir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      rmfiles+=" $odir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
+
+# ### BEGIN LIBTOOL TAG CONFIG: CXX
+
+# The linker used to build libraries.
+LD="/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="g++"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fPIC -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag="-static"
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec="\${wl}--export-dynamic"
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\${wl}--whole-archive\$convenience \${wl}--no-whole-archive"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \${wl}-soname \$wl\$soname -o \$lib"
+archive_expsym_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \${wl}-soname \$wl\$soname \${wl}-retain-symbols-file \$wl\$export_symbols -o \$lib"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="yes"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec="\${wl}-rpath \${wl}\$libdir"
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs="/usr/lib/gcc/i686-linux-gnu/4.6 /usr/lib/gcc/i686-linux-gnu/4.6/../../../i386-linux-gnu /usr/lib/gcc/i686-linux-gnu/4.6/../../../../lib /lib/i386-linux-gnu /lib/../lib /usr/lib/i386-linux-gnu /usr/lib/../lib /usr/lib/gcc/i686-linux-gnu/4.6/../../.."
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects="/usr/lib/gcc/i686-linux-gnu/4.6/../../../i386-linux-gnu/crti.o /usr/lib/gcc/i686-linux-gnu/4.6/crtbeginS.o"
+postdep_objects="/usr/lib/gcc/i686-linux-gnu/4.6/crtendS.o /usr/lib/gcc/i686-linux-gnu/4.6/../../../i386-linux-gnu/crtn.o"
+predeps=""
+postdeps="-lstdc++ -lm -lgcc_s -lc -lgcc_s"
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path="-L/usr/lib/gcc/i686-linux-gnu/4.6 -L/usr/lib/gcc/i686-linux-gnu/4.6/../../../i386-linux-gnu -L/usr/lib/gcc/i686-linux-gnu/4.6/../../../../lib -L/lib/i386-linux-gnu -L/lib/../lib -L/usr/lib/i386-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/i686-linux-gnu/4.6/../../.."
+
+# ### END LIBTOOL TAG CONFIG: CXX
+
+# ### BEGIN LIBTOOL TAG CONFIG: F77
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: F77
+
+# ### BEGIN LIBTOOL TAG CONFIG: FC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: FC
+
+# ### BEGIN LIBTOOL TAG CONFIG: GO
+
+# The linker used to build libraries.
+LD="/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GO
+
+# ### BEGIN LIBTOOL TAG CONFIG: GCJ
+
+# The linker used to build libraries.
+LD="/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GCJ
+
+# ### BEGIN LIBTOOL TAG CONFIG: RC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=""
+reload_cmds=""
+
+# Commands used to build an old-style archive.
+old_archive_cmds=""
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: RC
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtoolize b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtoolize
new file mode 100755
index 0000000..44d8b0e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/libtoolize
@@ -0,0 +1,2555 @@
+#! /bin/sh
+# Generated from libtoolize.m4sh by GNU Autoconf 2.68.
+
+# libtoolize (GNU libtool) 2.4.2
+# Written by Gary V. Vaughan <gary@gnu.org>, 2003
+
+# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
+# Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# Libtoolize is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Libtoolize is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with libtoolize; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]...
+#
+# Prepare a package to use libtool.
+#
+#   -c, --copy          copy files rather than symlinking them
+#       --debug         enable verbose shell tracing
+#   -n, --dry-run       print commands rather than running them
+#   -f, --force         replace existing files
+#   -i, --install       copy missing auxiliary files
+#       --ltdl[=DIR]    install libltdl sources [default: libltdl]
+#       --no-warn       don't display warning messages
+#       --nonrecursive  prepare ltdl for non-recursive make
+#   -q, --quiet         work silently
+#       --recursive     prepare ltdl for recursive make
+#       --subproject    prepare ltdl to configure and build independently
+#   -v, --verbose       verbosely report processing
+#       --version       print version information and exit
+#   -h, --help          print short or long help message
+#
+# The following space or comma delimited options can be passed to $progname
+# via the environment variable LIBTOOLIZE_OPTIONS, unknown environment
+# options are ignored:
+#
+#   --debug             enable verbose shell tracing
+#   --no-warn           don't display warning messages
+#   --quiet             work silently
+#   --verbose           verbosely report processing
+#
+# You must `cd' to the top directory of your package before you run
+# `$progname'.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#       host-triplet:	i686-pc-linux-gnu
+#       $progname:	(GNU libtool) 2.4.2
+#       automake:		$automake_version
+#       autoconf:		$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+: ${TAR=tar}
+
+PROGRAM=libtoolize
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="/bin/grep -E"}
+: ${FGREP="/bin/grep -F"}
+: ${GREP="/bin/grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="/bin/sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+# ltdl can be installed to be self-contained (subproject, the default);
+# or to be configured by a parent project, either with a recursive or
+# nonrecursive automake driven make:
+ltdl_mode=
+
+# Locations for important files:
+ltdldir=
+
+# Parse environment options
+{
+  my_sed_env_opt='1s/^\([^,:; ]*\).*$/\1/;q'
+  my_sed_env_rest='1s/^[^,:; ]*[,:; ]*\(.*\)$/\1/;q'
+
+  while test -n "$LIBTOOLIZE_OPTIONS"; do
+    opt=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_opt"`
+    LIBTOOLIZE_OPTIONS=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_rest"`
+
+    case $opt in
+      --debug|--no-warn|--quiet|--verbose)
+		envopts="${envopts+$envopts }$opt"			  ;;
+      --*)	env_warning="${env_warning+$env_warning
+}unrecognized environment option \`$opt'" 				  ;;
+      *)	func_fatal_help "garbled LIBTOOLIZE_OPTIONS near \`$opt'" ;;
+    esac
+  done
+
+  test -n "$envopts" && {
+    func_quote_for_eval "$envopts"
+    eval set dummy "$func_quote_for_eval_result" ${1+"$@"}
+    shift
+  }
+}
+
+
+
+# Option defaults:
+opt_debug=:
+opt_copy=false
+opt_force=false
+opt_install=false
+opt_dry_run=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+opt_nonrecursive=false
+opt_recursive=false
+opt_standalone=false
+opt_ltdl="false"
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --copy|-c)
+			opt_copy=:
+			;;
+      --force|-f)
+			opt_force=:
+			;;
+      --install|-i)
+			opt_install=:
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+CP="func_echo_all $CP"
+test -n "$LN_S" && LN_S="func_echo_all $LN_S"
+MKDIR="func_echo_all $MKDIR"
+RM="func_echo_all $RM"
+TAR="func_echo_all $TAR"
+			;;
+      --quiet|--automake|-q)
+			opt_quiet=:
+			;;
+      --verbose|-v)
+			opt_verbose=:
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+			;;
+      --nonrecursive|--non-recursive)
+			opt_nonrecursive=:
+			;;
+      --recursive)
+			opt_recursive=:
+			;;
+      --standalone)
+			opt_standalone=:
+			;;
+      --ltdl)
+			optarg="$1"
+			if test $# -gt 0; then
+			    case $optarg in # ((
+			        -*) ;;
+			        *) opt_ltdl="$optarg"; shift ;;
+			    esac
+			fi
+# This is tricky, since we're overloading $opt_ltdl to be the
+# optarg for --ltdl during option processing, but then stashing
+# the (optional) optarg in $ltdldir and reusing $opt_ltdl to
+# indicate that --ltdl was seen during option processing.  Also,
+# be careful that --ltdl=foo --ltdl=bar results in ltdldir=bar:
+case $opt_ltdl in
+          false|:) ;;  # a bare '--ltdl' followed by another option
+  *)       ltdldir=`$ECHO "$optarg" | $SED 's,/*$,,'` ;;
+esac
+opt_ltdl=:
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-c*|-f*|-i*|-n*|-q*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # show any warnings saved by LIBTOOLIZE_OPTIONS parsing
+  test -n "$env_warning" &&
+    echo "$env_warning" |while read line; do func_warning "$line"; done
+
+  # validate $opt_nonrecursive, $opt_recursive and $opt_standalone
+  if $opt_nonrecursive; then
+    if $opt_recursive || $opt_standalone; then
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    fi
+    ltdl_mode=nonrecursive
+  elif $opt_recursive; then
+    $opt_standalone &&
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    ltdl_mode=recursive
+  elif $opt_standalone; then
+    ltdl_mode=standalone
+  fi
+
+  # any remaining arguments are an error
+  test $# -gt 0 &&
+    func_fatal_help "unknown additional arguments: \`${1+}'"
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+# func_echo_once msg_var
+# Calls func_echo with the value of MSG_VAR, and then sets MSG_VAR="" so
+# that subsequent calls have no effect.
+func_echo_once ()
+{
+    $opt_debug
+    if test -n "$1"; then
+      eval my_msg=\$$1
+
+      if test -n "$my_msg"; then
+        func_echo "$my_msg"
+        eval $1=""
+      fi
+    fi
+}
+
+
+# func_copy srcfile destfile [msg_var]
+# A wrapper for func_copy_cb that accepts arguments in the same order
+# as the cp(1) shell command.
+func_copy ()
+{
+    $opt_debug
+
+    test -f "$1" || \
+      { func_error "\`$1' not copied:  not a regular file"; return 1; }
+
+    func_dirname_and_basename "$1"
+    my_f1=$func_basename_result
+
+    if test -d "$2"; then
+
+      func_copy_cb "$my_f1" \
+	`$ECHO "$1" | $SED "$dirname"` "$2" "$3"
+
+    else
+
+      # Supporting this would mean changing the timestamp:
+      func_dirname_and_basename "$2"
+      my_tname=$func_basename_result
+      test "X$my_f1" = "X$my_tname" \
+        || func_fatal_error "func_copy() cannot change filename on copy"
+
+      func_copy_cb "$my_f1" \
+        `$ECHO "$1" | $SED "$dirname"` \
+        `$ECHO "$2" | $SED "$dirname"` \
+	"$3"
+
+    fi
+
+    return $copy_return_status # set in func_copy_cb
+}
+
+
+# func_copy_cb filename srcdir destdir [msg_var]
+# If option `--copy' was specified, or soft-linking SRCFILE to DESTFILE fails,
+# then try to copy SRCFILE to DESTFILE (without changing the timestamp if
+# possible).
+func_copy_cb ()
+{
+    $opt_debug
+    my_file="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    copy_return_status=1
+
+    # Libtool is probably misinstalled if this happens:
+    test -f "$my_srcdir/$my_file" ||
+        func_fatal_error "\`$my_file' not found in \`$my_srcdir'"
+
+    case $opt_verbose in
+      false) my_copy_msg="file \`$my_destdir/$my_file'"     ;;
+      *)     my_copy_msg="file from \`$my_srcdir/$my_file'" ;;
+    esac
+    func_mkdir_p `$ECHO "$my_destdir/$my_file" | $SED "$dirname"`
+
+    $RM "$my_destdir/$my_file"
+    if $opt_copy; then
+      if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+           | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1; } \
+	 && touch "$my_destdir/$my_file"; then
+	$opt_quiet || func_echo_once "$my_msg_var"
+	$opt_quiet || func_echo "copying $my_copy_msg"
+	copy_return_status=0
+      fi
+    else
+      if test "$my_file" = "aclocal.m4"; then
+	if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+	     | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1 ; }
+	then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "copying $my_copy_msg"
+	  copy_return_status=0
+	fi
+      else
+	if $LN_S "$my_srcdir/$my_file" "$my_destdir/$my_file"; then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "linking $my_copy_msg"
+	  copy_return_status=0
+	fi
+      fi
+    fi
+    if test "$copy_return_status" != 0; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      func_error "can not copy \`$my_srcdir/$my_file' to \`$my_destdir/'"
+      exit_status=$EXIT_FAILURE
+    fi
+}
+
+
+# func_copy_some_files srcfile_spec srcdir destdir [msg_var] [cb=func_copy_cb]
+# Call COPY_CB for each regular file in SRCDIR named by the ':' delimited
+# names in SRCFILE_SPEC.  The odd calling convention is needed to allow
+# spaces in file and directory names.
+func_copy_some_files ()
+{
+    $opt_debug
+    my_srcfile_spec="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_copy_cb="${5-func_copy_cb}"
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for my_filename in $my_srcfile_spec; do
+      IFS="$my_save_IFS"
+      if test -f "$my_srcdir/$my_filename"; then
+        if test "X$my_copy_cb" = Xfunc_copy_cb; then
+	  $opt_force || if test -f "$my_destdir/$my_filename"; then
+	    $opt_quiet || func_echo_once "$my_msg_var"
+	    $opt_quiet \
+	      || func_error "\`$my_destdir/$my_filename' exists: use \`--force' to overwrite"
+	    continue
+	  fi
+        fi
+      else
+	func_echo_once "$my_msg_var"
+	func_fatal_error "\`$my_filename' not found in \`$my_srcdir'"
+      fi
+
+      $my_copy_cb "$my_filename" "$my_srcdir" "$my_destdir" "$my_msg_var"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_fixup_Makefile srcfile srcdir destdir
+func_fixup_Makefile ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_fixup_non_subpackage_script="\
+      s,(LIBOBJS),(ltdl_LIBOBJS),g
+      s,(LTLIBOBJS),(ltdl_LTLIBOBJS),g
+      s,libltdl/configure.ac,,
+      s,libltdl/configure,,
+      s,libltdl/aclocal.m4,,
+      s,libltdl/config-h.in,,
+      s,libltdl/Makefile.am,,
+      s,libltdl/Makefile.in,,
+      /^[	 ]*\\\\\$/d"
+    case $my_filename in
+      Makefile.am)
+	my_fixup_non_subpackage_script=`echo "$my_fixup_non_subpackage_script" | \
+		sed 's,libltdl/,,'`
+	my_fixup_inc_paths_script= ;;
+      Makefile.inc)
+	repl=$ltdldir
+	repl_uscore=`$ECHO "$repl" | $SED 's,[/.+-],_,g'`
+	my_fixup_inc_paths_script="\
+	  s,libltdl_,@repl_uscore@_,
+	  s,libltdl/,@repl@/,
+	  s,: libltdl/,: @repl@/,
+	  s, -Ilibltdl , -I@repl@ ,
+	  s,\\\$(libltdl_,\$(@repl_uscore@_,
+	  s,)/libltdl ,)/@repl@ ,
+	  s,@repl_uscore@,${repl_uscore},g
+	  s,@repl@,${repl},g"
+	;;
+    esac
+
+    $RM "$my_destdir/$my_filename" 2>/dev/null
+    $opt_quiet || func_echo "creating file \`$my_destdir/$my_filename'"
+    if $opt_dry_run; then :;
+    else
+      $SED "$my_fixup_non_subpackage_script
+	    $my_fixup_inc_paths_script" \
+	< "$my_srcdir/$my_filename" > "$my_destdir/$my_filename" ||
+	func_fatal_error "cannot create $my_destdir/$my_filename"
+    fi
+}
+
+# func_scan_files
+# Scan configure.(ac|in) and aclocal.m4 (if present) for use of libltdl
+# and libtool.  Possibly running some of these tools if necessary.
+# Libtoolize affects the contents of aclocal.m4, and should be run before
+# aclocal, so we can't use configure --trace which relies on a consistent
+# configure.(ac|in) and aclocal.m4.
+func_scan_files ()
+{
+    $opt_debug
+    # Prefer configure.ac to configure.in
+    test -f configure.ac && configure_ac=configure.ac
+    test -f "$configure_ac" || configure_ac=
+
+    # Set local variables to reflect contents of configure.ac
+    my_sed_scan_configure_ac='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,;
+	/AC_INIT/ {
+	    s,^.*$,seen_autoconf=:,
+	    p
+	}
+	d'
+    test -z "$configure_ac" \
+        || eval `$SED "$my_sed_scan_configure_ac" "$configure_ac"`
+
+    $seen_autoconf || {
+	my_configure_ac=
+	test -n "$configure_ac" && my_configure_ac="$configure_ac: "
+        func_verbose "${my_configure_ac}not using Autoconf"
+
+	# Make sure ltdldir and ltdl_mode have sensible defaults
+        # since we return early here:
+	test -n "$ltdldir" || ltdldir=libltdl
+	test -n "$ltdl_mode" || ltdl_mode=subproject
+
+	return
+    }
+
+    # ---------------------------------------------------- #
+    # Probe macro usage in configure.ac and/or aclocal.m4. #
+    # ---------------------------------------------------- #
+
+    my_sed_traces='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,
+        s,^.*AC_REQUIRE(.*$,,; s,^.*m4_require(.*$,,;
+	s,^.*m4_define(.*$,,
+	s,^.*A[CU]_DEFUN(.*$,,; s,^.*m4_defun(.*$,,
+	/AC_CONFIG_AUX_DIR(/ {
+	    s,^.*AC_CONFIG_AUX_DIR([[	 ]*\([^])]*\).*$,ac_auxdir=\1,
+	    p
+        }
+	/AC_CONFIG_MACRO_DIR(/ {
+	    s,^.*AC_CONFIG_MACRO_DIR([[	 ]*\([^])]*\).*$,ac_macrodir=\1,
+	    p
+        }
+	/_LT_CONFIG_LTDL_DIR(/d
+	/LT_CONFIG_LTDL_DIR(/ {
+	    s,^.*LT_CONFIG_LTDL_DIR([[	 ]*\([^])]*\).*$,ac_ltdldir=\1,
+	    p
+	}
+	/\[A[CM]_PROG_LIBTOOL/d
+	/A[CM]_PROG_LIBTOOL/ {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/the.*option into.*LT_INIT.*parameter/d
+	/\[LT_INIT/d
+	/LT_INIT/		 {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/\[LTDL_INIT/d
+	/LTDL_INIT/          {
+	    s,^.*LTDL_INIT([[	 ]*\([^])]*\).*$,ltdl_options="\1",
+	    s,^.*LTDL_INIT[	 ]*$,seen_ltdl=:,
+	    p
+	}
+	/LT_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_LIB_LTDL/        {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	d'
+    eval `cat aclocal.m4 "$configure_ac" 2>/dev/null | $SED "$my_sed_traces"`
+
+
+    # ----------------- #
+    # Validate ltdldir. #
+    # ----------------- #
+
+    ac_ltdldir=`$ECHO "$ac_ltdldir" | $SED 's,/*$,,'`
+
+    # If $configure_ac contains AC_CONFIG_LTDL_DIR, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ac_ltdldir" in
+      *\$*)
+        func_fatal_error "can not handle variables in LT_CONFIG_LTDL_DIR"
+        ;;
+    esac
+
+    # If neither --ltdl nor LT_CONFIG_LTDL_DIR are specified, default to
+    # `libltdl'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given! (If LT_CONFIG_LTDL_DIR is not specified
+    # we suggest adding it later in this code.)
+    case x$ac_ltdldir,x$ltdldir in
+      x,x)	ltdldir=libltdl		;;
+      x*,x)	ltdldir=$ac_ltdldir	;;
+      x,x*)	ltdldir=$ltdldir	;;
+      *)
+        test x"$ac_ltdldir" = x"$ltdldir" || \
+	    func_fatal_error "--ltdl='$ltdldir' does not match LT_CONFIG_LTDL_DIR($ac_ltdldir)"
+	;;
+    esac
+
+
+    # ------------------- #
+    # Validate ltdl_mode. #
+    # ------------------- #
+
+    test -n "$ltdl_options" && seen_ltdl=:
+
+    # If $configure_ac contains LTDL_INIT, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ltdl_options" in
+      *\$*)
+        func_fatal_error "can not handle variables in LTDL_INIT"
+        ;;
+    esac
+
+    # Extract mode name from ltdl_options
+    # FIXME: Diagnose multiple conflicting modes in ltdl_options
+    ac_ltdl_mode=
+    case " $ltdl_options " in
+      *" nonrecursive "*)  ac_ltdl_mode=nonrecursive	;;
+      *" recursive "*)     ac_ltdl_mode=recursive	;;
+      *" subproject "*)    ac_ltdl_mode=subproject	;;
+    esac
+
+    # If neither --ltdl nor an LTDL_INIT mode are specified, default to
+    # `subproject'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given!
+    case x$ac_ltdl_mode,x$ltdl_mode in
+      x,x)	ltdl_mode=subproject	;;
+      x*,x)	ltdl_mode=$ac_ltdl_mode	;;
+      x,x*)	ltdl_mode=$ltdl_mode	;;
+      *)
+        test x"$ac_ltdl_mode" = x"$ltdl_mode" || \
+	    func_fatal_error "--$ltdl_mode does not match LTDL_INIT($ac_ltdl_mode)"
+	;;
+    esac
+
+    # ---------------- #
+    # Validate auxdir. #
+    # ---------------- #
+
+    if test -n "$ac_auxdir"; then
+      # If $configure_ac contains AC_CONFIG_AUX_DIR, check that it was
+      # not given in terms of a shell variable!
+      case "$ac_auxdir" in
+      *\$*)
+        func_fatal_error "can not handle variables in AC_CONFIG_AUX_DIR"
+        ;;
+      *)
+	auxdir=$ac_auxdir
+	;;
+      esac
+    else
+      # Try to discover auxdir the same way it is discovered by configure.
+      # Note that we default to the current directory.
+      for dir in . .. ../..; do
+        if test -f "$dir/install-sh"; then
+          auxdir=$dir
+          break
+        elif test -f "$dir/install.sh"; then
+          auxdir="$dir"
+          break
+        fi
+      done
+    fi
+
+    # Just use the current directory if all else fails.
+    test -n "$auxdir" || auxdir=.
+
+
+    # ------------------------------ #
+    # Find local m4 macro directory. #
+    # ------------------------------ #
+
+    # Hunt for ACLOCAL_AMFLAGS in `Makefile.am' for a `-I' argument.
+
+    my_sed_aclocal_flags='
+        /^[	 ]*ACLOCAL_[A-Z_]*FLAGS[	 ]*=[	 ]*/ {
+	    s,,,
+	    q
+	}
+	d'
+    if test -f Makefile.am; then
+      my_macrodir_is_next=false
+      for arg in `$SED "$my_sed_aclocal_flags" Makefile.am`; do
+        if $my_macrodir_is_next; then
+          am_macrodir="$arg"
+          break
+        else
+	  case $arg in
+	    -I) my_macrodir_is_next=: ;;
+	    -I*)
+	      am_macrodir=`$ECHO "$arg" | sed 's,^-I,,'`
+	      break
+	      ;;
+	    *) my_macrodir_is_next=false ;;
+	  esac
+        fi
+      done
+    fi
+
+    macrodir="$ac_macrodir"
+    test -z "$macrodir" && macrodir="$am_macrodir"
+
+    if test -n "$am_macrodir" && test -n "$ac_macrodir"; then
+      test "$am_macrodir" = "$ac_macrodir" \
+        || func_fatal_error "AC_CONFIG_MACRO_DIR([$ac_macrodir]) conflicts with ACLOCAL_AMFLAGS=-I $am_macrodir."
+    fi
+}
+
+# func_included_files searchfile
+# Output INCLUDEFILE if SEARCHFILE m4_includes it, else output SEARCHFILE.
+func_included_files ()
+{
+    $opt_debug
+    my_searchfile="$1"
+
+    my_include_regex=
+    my_sed_include='
+        /^m4_include(\[.*\])$/ {
+	    s,^m4_include(\[\(.*\)\])$,\1,
+	    p
+	}
+        d'
+
+    if test -f "$my_searchfile"; then
+      $ECHO "$my_searchfile"
+
+      # Only recurse when we don't care if all the variables we use get
+      # trashed, since they are in global scope.
+      for my_filename in `$SED "$my_sed_include" "$my_searchfile"`; do
+	func_included_files $my_filename
+      done
+    fi
+}
+
+
+# func_serial filename [macro_regex]
+# Output the value of the serial number comment in FILENAME, where the
+# comment line must also match MACRO_REGEX, if given.
+func_serial ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_macro_regex="$2"
+    my_sed_serial='
+	/^# serial [1-9][0-9.]*[	 ]*'"$my_macro_regex"'[	 ]*$/ {
+	    s,^# serial \([1-9][0-9.]*\).*$,\1,
+	    q
+	}
+	d'
+
+    # Search FILENAME and all the files it m4_includes for a serial number
+    # in the file that AC_DEFUNs MACRO_REGEX.
+    my_serial=
+    func_dirname_and_basename "$my_filename"
+    my_filebase=$func_basename_result
+    for my_file in `func_included_files "$my_filename"`; do
+      if test -z "$my_macro_regex" ||
+         test "$my_filename" = aclocal.m4 ||
+         test "X$my_macro_regex" = "X$my_filebase" ||
+         func_grep '^AC_DEFUN(\['"$my_macro_regex" "$my_file"
+      then
+        my_serial=`$SED -e "$my_sed_serial" "$my_file"`
+	break
+      fi
+    done
+
+    # If the file has no serial number, we assume it's ancient.
+    test -n "$my_serial" || my_serial=0
+
+    $ECHO "$my_serial"
+}
+
+
+# func_serial_max serial1 serial2
+# Compare (possibly multi-part, '.' delimited) serial numbers, and
+# return the largest in $func_serial_max_result.  If they are the
+# same, func_serial_max_result will be empty.
+func_serial_max ()
+{
+    $opt_debug
+    my_serial1="$1"
+    my_serial2="$2"
+
+    my_sed_dot='s/\..*$//g'
+    my_sed_rest='s/^[0-9][1-9]*\.*//'
+    my_sed_digits='s/[^0-9.]//g'
+
+    # Incase they turn out to be the same, we'll set it to empty
+    func_serial_max_result=
+
+    test "X$1$2" = X`$ECHO "$1$2" | $SED "$my_sed_digits"` || {
+      func_error "serial numbers \`$1' or \`$2' contain non-digit chars"
+      return
+    }
+
+    while test -n "$my_serial1$my_serial2"; do
+      my_serial1_part=`$ECHO "$my_serial1" | $SED "$my_sed_dot"`
+      my_serial2_part=`$ECHO "$my_serial2" | $SED "$my_sed_dot"`
+
+      test -z "$my_serial1_part$my_serial2_part" \
+        && break
+
+      test -z "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      test -z "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial1_part" -gt "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial2_part" -gt "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      my_serial1=`$ECHO "$my_serial1" | $SED "$my_sed_rest"`
+      my_serial2=`$ECHO "$my_serial2" | $SED "$my_sed_rest"`
+    done
+}
+
+
+# func_serial_update_check srcfile src_serial destfile dest_serial
+# Unless SRC_SERIAL is newer than DEST_SERIAL set $func_serial_update_check
+# to 'false'.
+func_serial_update_check ()
+{
+    $opt_debug
+    my_srcfile="$1"
+    my_src_serial="$2"
+    my_destfile="$3"
+    my_dest_serial="$4"
+    my_update_p=:
+
+    if test -f "$my_destfile"; then
+      test "X$my_src_serial" = "X0" && {
+        func_warning "no serial number on \`$my_srcfile', not copying."
+	return
+      }
+
+      # Determine whether the destination has an older serial.
+      func_serial_max "$my_src_serial" "$my_dest_serial"
+      test "X$my_src_serial" = "X$func_serial_max_result" || my_update_p=false
+
+      test "X$my_src_serial" = "X$func_serial_max_result" \
+        && func_verbose "\`$my_srcfile' is serial $my_src_serial, greater than $my_dest_serial in \`$my_destfile'"
+
+      if test "X$my_dest_serial" = "X$func_serial_max_result"; then
+        func_verbose "\`$my_srcfile' is serial $my_src_serial, less than $my_dest_serial in \`$my_destfile'"
+	$opt_force || if test -n "$ac_macrodir$ac_ltdldir"; then
+           func_error "\`$my_destfile' is newer: use \`--force' to overwrite"
+        fi
+      fi
+    fi
+
+    func_serial_update_check_result="$my_update_p"
+}
+
+
+# func_aclocal_update_check filename
+# Unless serial number of FILENAME is newer than the matching serial number
+# in aclocal.m4, set $func_aclocal_update_check to 'false'.
+func_aclocal_update_check ()
+{
+    $opt_debug
+    my_srcfile="$aclocaldir/$1"
+    my_destfile="aclocal.m4"
+
+    case $need in
+      libtool.m4)
+	my_src_serial=`func_serial "$my_srcfile" LT_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LT_INIT`
+
+	# Strictly, this libtoolize ought not to have to deal with ancient
+	# serial formats, but we accept them here to be complete:
+	test "X$my_src_serial" = "X0" &&
+	  my_src_serial=`func_serial "$my_srcfile" 'A[CM]_PROG_LIBTOOL'`
+	test "X$my_dest_serial" = "X0" &&
+	  my_dest_serial=`func_serial "$my_destfile" 'A[CM]_PROG_LIBTOOL'`
+	;;
+      ltdl.m4)
+	my_src_serial=`func_serial "$my_srcfile" LTDL_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LTDL_INIT`
+	;;
+      *)
+	my_src_serial=`func_serial "$my_srcfile" "$need"`
+	my_dest_serial=`func_serial "$my_destfile" "$need"`
+	;;
+    esac
+
+    func_serial_update_check \
+      "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+
+    func_aclocal_update_check_result="$func_serial_update_check_result"
+}
+
+
+# func_serial_update filename srcdir destdir [msg_var] [macro_re] [old_macro_re]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer serial number, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.  If given, MACRO_REGEX or
+# OLD_MACRO_REGEX must match any text after "# serial N" in both files.
+func_serial_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_macro_regex="$5"
+    my_old_macro_regex="$6"
+
+    my_serial_update_p=:
+    my_return_status=1
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`func_serial "$my_srcfile" "$my_macro_regex"`
+      my_dest_serial=`func_serial "$my_destfile" "$my_macro_regex"`
+
+      # Strictly, this libtoolize ought not to have to deal with ancient
+      # serial formats, but we accept them here to be complete:
+      test "X$my_src_serial" = "X0" &&
+        my_src_serial=`func_serial "$my_srcfile" "$my_old_macro_regex"`
+
+      test "X$my_dest_serial" = "X0" &&
+        my_dest_serial=`func_serial "$my_destfile" "$my_old_macro_regex"`
+
+      func_serial_update_check \
+        "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_serial_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_serial_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+      my_return_status=$?
+    elif $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      $opt_quiet \
+        || func_echo "\`$my_destfile' is already up to date."
+    fi
+
+    # Do this after the copy for hand maintained `aclocal.m4', incase
+    # it has `m4_include([DESTFILE])', so the copy effectively already
+    # updated `aclocal.m4'.
+    my_included_files=`func_included_files aclocal.m4`
+    case `echo " $my_included_files " | $NL2SP` in
+
+      # Skip included files:
+      *" $my_destfile "*) ;;
+
+      # Otherwise compare to aclocal.m4 serial number (func_serial
+      # returns 0 for older macro serial numbers before we provided
+      # serial tags, so the update message will be correctly given
+      # if aclocal.m4 contains an untagged --i.e older-- macro file):
+      *)
+        if test -f aclocal.m4; then
+          func_serial_max \
+              "$my_src_serial" `func_serial aclocal.m4 "$my_macro_regex"`
+          if test "X$my_src_serial" = "X$func_serial_max_result"; then
+              func_echo_once "$my_msg_var"
+	      func_echo "You should add the contents of \`$my_destfile' to \`aclocal.m4'."
+          fi
+        fi
+        ;;
+    esac
+    return $my_return_status
+}
+
+
+# func_keyword_update filename srcdir destdir sed_script [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision according to the serial number extracted by
+# SED_SCRIPT, or DESTFILE does not yet exist, or the user specified
+# `--force' at the command line.
+func_keyword_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_sed_script="$4"
+    my_msg_var="$5"
+
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    my_keyword_update_p=:
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`$SED -e "$my_sed_script" "$my_srcfile"`
+      test -z "$my_src_serial" && {
+        func_warning "no serial number in \`$my_srcfile', not copying."
+	return
+      }
+
+      my_dest_serial=`$SED -e "$my_sed_script" "$my_destfile"`
+      test -n "$my_dest_serial" || my_dest_serial=0
+
+      func_serial_update_check \
+         "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_keyword_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_keyword_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+    elif $opt_verbose || $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      func_echo_once "$my_msg_var"
+      func_echo "\`$my_destfile' is already up to date."
+    fi
+}
+
+
+# func_ltmain_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_ltmain_update ()
+{
+    $opt_debug
+    my_sed_ltmain='
+	/^package_revision='\''*[0-9][1-9.]*'\''*/ {
+	    s,^package_revision='\''*\([0-9.]*\)'\''*[	 ]*$,\1,
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_ltmain" "$4"
+
+    return $my_return_status
+}
+
+
+# func_config_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_config_update ()
+{
+    $opt_debug
+    my_sed_config='
+	/^timestamp='\''*[0-9][1-9-]*'\''*/ {
+	    s,^timestamp='\''*\([0-9-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_config" "$4"
+
+    return $my_return_status
+}
+
+
+# func_install_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_install_update ()
+{
+    $opt_debug
+    my_sed_install='
+	/^scriptversion='\''*[0-9][1-9.-]*'\''*/ {
+	    s,[#;].*,,
+	    s,^scriptversion='\''*\([0-9.-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_install" "$4"
+
+    return $my_return_status
+}
+
+
+# func_massage_aclocal_DATA [glob_exclude]
+# @aclocal_DATA\@ is substituted as per its value in Makefile.am;
+# this function massages it into a suitable format for func_copy_some_files.
+func_massage_aclocal_DATA ()
+{
+    $opt_debug
+    pkgmacro_files=     # GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgmacro_files from the value used in Makefile.am.
+    for my_filename in m4/argz.m4 m4/libtool.m4 m4/ltdl.m4 m4/ltoptions.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4; do
+      func_dirname_and_basename "$my_filename"
+      my_filename=$func_basename_result
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      pkgmacro_files="$pkgmacro_files:$my_filename"
+    done
+
+    # strip spurious leading `:'
+    pkgmacro_files=`$ECHO "$pkgmacro_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgmacro_subproject
+# Unless --quiet was passed, display a message. Then copy pkgmacro_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgmacro_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$subproject_macrodir/$file" && func_verbose "rm -f '$subproject_macrodir/$file'"
+      rm -f "$subproject_macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test "x$macrodir" != "x$subproject_macrodir"; then
+      pkgmacro_header="putting macros in \`$subproject_macrodir'."
+    elif test -n "$subproject_macrodir"; then
+      pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$subproject_macrodir'."
+    fi
+
+    func_copy_some_files "argz.m4:libtool.m4:ltdl.m4:$pkgmacro_files" \
+      "$aclocaldir" "$subproject_macrodir" pkgmacro_header
+}
+
+
+# func_install_pkgmacro_parent
+# Unless --quiet was passed, or AC_CONFIG_MACRO_DIR was not seen, display
+# a message.  Then update appropriate macros if newer ones are available
+# from the libtool installation tree.
+func_install_pkgmacro_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$macrodir/$file" && func_verbose "rm -f '$macrodir/$file'"
+      rm -f "$macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test -n "$ac_macrodir"; then
+      my_pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$ac_macrodir'."
+    elif test -n "$macrodir"; then
+      my_pkgmacro_header="putting macros in \`$macrodir'."
+    fi
+
+    if $opt_ltdl; then
+      func_serial_update argz.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header argz.m4
+    else
+      func_verbose "Not copying \`$macrodir/argz.m4', libltdl not used."
+    fi
+
+    func_serial_update  libtool.m4 "$aclocaldir" "$macrodir" \
+      my_pkgmacro_header LT_INIT 'A[CM]_PROG_LIBTOOL'
+
+    if $opt_ltdl; then
+      func_serial_update ltdl.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header 'LTDL_INIT'
+    else
+      func_verbose "Not copying \`$macrodir/ltdl.m4', libltdl not used."
+    fi
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for file in $pkgmacro_files; do
+      IFS="$my_save_IFS"
+      func_serial_update "$file" "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header "$file"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_install_pkgmacro_files
+# Install copies of the libtool and libltdl m4 macros into this package.
+func_install_pkgmacro_files ()
+{
+    $opt_debug
+
+    # argz.m4, libtool.m4 and ltdl.m4 are handled specially:
+    func_massage_aclocal_DATA 'argz.m4|libtool.m4|ltdl.m4'
+
+  # 1. Parent has separate macrodir to subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test -n "$macrodir" && test "x$macrodir" != "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_parent
+      func_install_pkgmacro_subproject
+
+  # 2. Parent shares macrodir with subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$macrodir" = "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_subproject
+
+  # 3. Not a subproject, but macrodir was specified in parent:
+    elif test -n "$macrodir"; then
+      func_install_pkgmacro_parent
+
+  # 4. AC_CONFIG_MACRO_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_MACRO_DIR not defined, not copying libtool macros."
+    fi
+}
+
+
+# func_massage_pkgltdl_files [glob_exclude]
+# @pkgltdl_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgltdl_files ()
+{
+    $opt_debug
+    pkgltdl_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgltdl_files from the value used in Makefile.am
+    for my_filename in libltdl/COPYING.LIB libltdl/README libltdl/Makefile.inc libltdl/Makefile.am libltdl/configure.ac libltdl/aclocal.m4 libltdl/Makefile.in libltdl/config-h.in libltdl/configure libltdl/argz_.h libltdl/argz.c libltdl/loaders/dld_link.c libltdl/loaders/dlopen.c libltdl/loaders/dyld.c libltdl/loaders/load_add_on.c libltdl/loaders/loadlibrary.c libltdl/loaders/shl_load.c libltdl/lt__dirent.c libltdl/lt__strl.c libltdl/libltdl/lt__alloc.h libltdl/libltdl/lt__dirent.h libltdl/libltdl/lt__glibc.h libltdl/libltdl/lt__private.h libltdl/libltdl/lt__strl.h libltdl/libltdl/lt_dlloader.h libltdl/libltdl/lt_error.h libltdl/libltdl/lt_system.h libltdl/libltdl/slist.h libltdl/loaders/preopen.c libltdl/lt__alloc.c libltdl/lt_dlloader.c libltdl/lt_error.c libltdl/ltdl.c libltdl/ltdl.h libltdl/slist.c; do
+
+      # Strip surplus leading 'libltdl/':
+      my_filename=`expr "X$my_filename" : 'Xlibltdl/\(.*\)'`
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgltdl_files: in
+        *:$my_filename:*) ;;
+	*) pkgltdl_files="$pkgltdl_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgltdl_files=`$ECHO "$pkgltdl_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgltdl_files
+# Install copies of the libltdl files into this package.  Any auxiliary
+# or m4 macro files needed in the libltdl tree will also be copied by
+# func_install_pkgconfig_files and func_install_pkgmacro_files resp.
+func_install_pkgltdl_files ()
+{
+    $opt_debug
+    $opt_ltdl || return
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgltdl_files; do
+      test -f "$ltdldir/$file" && func_verbose "rm -f '$ltdldir/$file'"
+      rm -f "$ltdldir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified `--ltdl'.
+    $opt_quiet || if test -n "$ac_ltdldir"; then
+      pkgltdl_header="putting libltdl files in LT_CONFIG_LTDL_DIR, \`$ac_ltdldir'."
+    elif test -n "$ltdldir"; then
+      pkgltdl_header="putting libltdl files in \`$ltdldir'."
+    fi
+
+    # These files are handled specially, depending on ltdl_mode:
+    if test "x$ltdl_mode" = "xsubproject"; then
+      func_massage_pkgltdl_files 'Makefile.inc'
+    else
+      func_massage_pkgltdl_files 'Makefile.am|Makefile.in*|aclocal.m4|config*'
+    fi
+
+    func_copy_some_files "$pkgltdl_files" \
+      "$pkgltdldir/libltdl" "$ltdldir" pkgltdl_header
+
+    # For recursive ltdl modes, copy a suitable Makefile.{am,inc}:
+    case $ltdl_mode in
+      recursive)
+        func_fixup_Makefile "Makefile.am" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+      nonrecursive)
+        func_fixup_Makefile "Makefile.inc" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+    esac
+}
+
+
+# func_massage_pkgconfig_files [glob_exclude]
+# @pkgconfig_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgconfig_files ()
+{
+    $opt_debug
+    pkgconfig_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgconfig_files from the value used in Makefile.am
+    for my_filename in config/compile config/config.guess config/config.sub config/depcomp config/install-sh config/missing config/ltmain.sh; do
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgconfig_files: in
+        *:$my_filename:*) ;;
+	*) pkgconfig_files="$pkgconfig_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgconfig_files=`$ECHO "$pkgconfig_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgconfig_subproject
+# Unless --quiet was passed, display a message. Then copy pkgconfig_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgconfig_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$subproject_auxdir/$file" && func_verbose "rm -f '$subproject_auxdir/$file'"
+      rm -f "$subproject_auxdir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified an auxdir.
+    $opt_quiet || if test "x$ac_auxdir" = "x$subproject_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$subproject_auxdir'."
+    elif test -n "$auxdir"; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    func_copy_some_files "$pkgconfig_files" \
+      "$pkgdatadir" "$ltdldir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_parent
+# Unless --quiet was passed, or AC_CONFIG_AUX_DIR was not seen, display a
+# message.  Then update appropriate auxiliary files if newer ones are
+# available from the libtool installation tree.
+func_install_pkgconfig_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$auxdir/$file" && func_verbose "rm -f '$auxdir/$file'"
+      rm -f "$auxdir/$file"
+    done
+
+    if test -n "$ac_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$ac_auxdir'."
+    elif test -n "$auxdir" || test "x$ltdldir" = "x."; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    if $opt_install; then
+      func_config_update config.guess \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_config_update config.sub \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_install_update install-sh \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+    fi
+    func_ltmain_update ltmain.sh \
+      "$pkgdatadir/config" "$auxdir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_files
+# Install copies of the auxiliary files into this package according to
+# the whether libltdl is included as a subproject, and whether the parent
+# shares the AC_CONFIG_AUX_DIR setting.
+func_install_pkgconfig_files ()
+{
+    $opt_debug
+    func_massage_pkgconfig_files
+
+  # 1. Parent shares auxdir with subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test "x$ac_auxdir" = "x$subproject_auxdir"
+    then
+      func_install_pkgconfig_subproject
+
+  # 2. Parent has separate auxdir to subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$auxdir" != "x$subproject_auxdir" is implied
+    then
+      if $seen_autoconf; then
+	func_install_pkgconfig_parent
+      fi
+      func_install_pkgconfig_subproject
+
+  # 3. Not subproject, but AC_CONFIG_AUX_DIR was used in parent:
+    elif test -n "$ac_auxdir" || test "x$auxdir" = "x."; then
+      func_install_pkgconfig_parent
+
+  # 4. AC_CONFIG_AUX_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_AUX_DIR not defined, not copying libtool auxiliary files."
+    fi
+}
+
+
+# func_nonemptydir_p dirvar
+# DIRVAR is the name of a variable to evaluate.  Unless DIRVAR names
+# a directory that exists and is non-empty abort with a diagnostic.
+func_nonemptydir_p ()
+{
+    $opt_debug
+    my_dirvar="$1"
+    my_dir=`eval echo "\\\$$my_dirvar"`
+
+    # Is it a directory at all?
+    test -d "$my_dir" \
+      || func_fatal_error "\$$my_dirvar is not a directory: \`$my_dir'"
+
+    # check that the directories contents can be ls'ed
+    test -n "`{ cd $my_dir && ls; } 2>/dev/null`" \
+        || func_fatal_error "can not list files: \`$my_dir'"
+}
+
+
+# func_check_macros
+# Sanity check macros from aclocal.m4 against installed versions.
+func_check_macros ()
+{
+    $opt_debug
+    $opt_quiet && return
+    $seen_autoconf || return
+
+    ac_config_macro_dir_advised=false
+
+    if test -n "$ac_macrodir$ltdldir" && test -z "$macrodir"; then
+      my_ac_config_macro_srcdir="$aclocaldir"
+      if $opt_ltdl && test "$macrodir" != "$subproject_macrodir"; then
+	my_ac_config_macro_srcdir="$subproject_macrodir"
+      fi
+
+      my_needed="libtool.m4 ltoptions.m4 ltversion.m4 ltsugar.m4 lt~obsolete.m4"
+      $opt_ltdl && my_needed="$my_needed argz.m4 ltdl.m4"
+
+      if test -f "aclocal.m4"; then
+	for need in $my_needed; do
+	  func_aclocal_update_check $need
+	  $func_aclocal_update_check_result && my_missing="$my_missing $need"
+	done
+      else
+        my_missing="$my_needed"
+      fi
+
+      if test -n "$my_missing"; then
+        func_echo "You should add the contents of the following files to \`aclocal.m4':"
+        for need in $my_missing; do
+	  func_echo "  \`$my_ac_config_macro_srcdir/$need'"
+        done
+
+        if test "$my_ac_config_macro_srcdir" != "$aclocaldir"; then
+          func_echo "or else add \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' to $configure_ac."
+	  ac_config_macro_dir_advised=:
+        fi
+      fi
+    fi
+
+    ## ---------------------------------------------------------- ##
+    ## Since we return early here when --no-warn was given:       ##
+    ## DO NOT PUT ANYTHING BUT UPGRADE ADVICE MESSAGES BELOW HERE ##
+    ## ---------------------------------------------------------- ##
+
+    $opt_warning || return
+
+    $seen_libtool ||
+      func_echo "Remember to add \`LT_INIT' to $configure_ac."
+
+    # Suggest using LTDL_INIT if appropriate:
+    $opt_ltdl && if test x$seen_ltdl != x:; then
+      case $ltdl_mode in
+	subproject) ltdl_init_args=""               ;;
+	*)          ltdl_init_args="([$ltdl_mode])" ;;
+      esac
+      func_echo "Remember to add \`LTDL_INIT$ltdl_init_args' to $configure_ac."
+    fi
+
+    if $opt_ltdl; then
+      # Remind the user to call LT_CONFIG_LTDL_DIR:
+      test -n "$ac_ltdldir" ||
+        func_echo "Remember to add \`LT_CONFIG_LTDL_DIR([$ltdldir])' to \`$configure_ac'."
+
+      # For subproject mode, offer some suggestions for avoiding duplicate
+      # files in a project that uses libltdl:
+      if test "x$ltdl_mode" = "xsubproject"; then
+        test "$subproject_auxdir" = "$auxdir" ||
+          func_echo "Consider using \`AC_CONFIG_AUX_DIR([$subproject_auxdir])' in $configure_ac."
+        $ac_config_macro_dir_advised || test "$subproject_macrodir" = "$macrodir" ||
+          func_echo "Consider using \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' in $configure_ac."
+	ac_config_macro_dir_advised=:
+      fi
+    fi
+
+    # Suggest modern idioms for storing autoconf macros:
+    $ac_config_macro_dir_advised || if test -z "$ac_macrodir" || test x"$macrodir" = x.; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([m4])' to $configure_ac and"
+      func_echo "rerunning $progname, to keep the correct libtool macros in-tree."
+      ac_config_macro_dir_advised=:
+
+    elif test -z "$ac_macrodir$ltdldir"; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([$macrodir])' to $configure_ac,"
+      func_echo "and rerunning $progname and aclocal."
+      ac_config_macro_dir_advised=:
+    fi
+
+    if test -z "$am_macrodir$macrodir"; then
+      func_echo "Consider adding \`-I m4' to ACLOCAL_AMFLAGS in Makefile.am."
+
+    elif test -z "$am_macrodir"; then
+      if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" && test "$subproject_macrodir" != "$macrodir"; then
+	func_echo "Consider adding \`-I $subproject_macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      else
+        func_echo "Consider adding \`-I $macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      fi
+    fi
+
+    # Don't trace for this, we're just checking the user didn't invoke it
+    # directly from configure.ac.
+    $SED 's,dnl .*$,,; s,# .*$,,' "$configure_ac" | grep AC_PROG_RANLIB >/dev/null &&
+      func_echo "\`AC_PROG_RANLIB' is rendered obsolete by \`LT_INIT'"
+
+    # FIXME: Ensure ltmain.sh, libtool.m4 and ltdl.m4 are from the same release
+}
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+{
+  # Lists of all files libtoolize has ever installed.  These are removed
+  # before installing the latest files when --force was passed to help
+  # ensure a clean upgrade.
+  # Do not remove config.guess nor config.sub, we don't install them
+  # without --install, and the project may not be using Automake.
+  all_pkgconfig_files="ltmain.sh"
+  all_pkgmacro_files="argz.m4 libtool.m4 ltdl.m4 ltoptions.m4 ltsugar.m4 ltversion.in ltversion.m4 lt~obsolete.m4"
+  all_pkgltdl_files="COPYING.LIB Makefile Makefile.in Makefile.inc Makefile.am README acinclude.m4 aclocal.m4 argz_.h argz.c config.h.in config-h.in configure configure.ac configure.in libltdl/lt__alloc.h libltdl/lt__dirent.h libltdl/lt__glibc.h libltdl/lt__private.h libltdl/lt__strl.h libltdl/lt_dlloader.h libltdl/lt_error.h libltdl/lt_system.h libltdl/slist.h loaders/dld_link.c loaders/dlopen.c loaders/dyld.c loaders/load_add_on.c loaders/loadlibrary.c loaders/preopen.c loaders/shl_load.c lt__alloc.c lt__dirent.c lt__strl.c lt_dlloader.c lt_error.c ltdl.c ltdl.h slist.c"
+
+  # Locations for important files:
+  prefix=/
+  datadir=//share
+  pkgdatadir=//share/libtool
+  pkgltdldir=//share/libtool
+  aclocaldir=//share/aclocal
+  auxdir=
+  macrodir=
+  configure_ac=configure.in
+
+  seen_autoconf=false
+  seen_libtool=false
+  seen_ltdl=false
+
+  # test EBCDIC or ASCII
+  case `echo X|tr X '\101'` in
+   A) # ASCII based system
+      # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+    SP2NL='tr \040 \012'
+    NL2SP='tr \015\012 \040\040'
+    ;;
+   *) # EBCDIC based system
+    SP2NL='tr \100 \n'
+    NL2SP='tr \r\n \100\100'
+    ;;
+  esac
+
+  # Allow the user to override the master libtoolize repository:
+  if test -n "$_lt_pkgdatadir"; then
+    pkgltdldir="$_lt_pkgdatadir"
+    pkgdatadir="$_lt_pkgdatadir/libltdl"
+    aclocaldir="$_lt_pkgdatadir/libltdl/m4"
+  fi
+  func_nonemptydir_p pkgltdldir
+  func_nonemptydir_p pkgdatadir
+  func_nonemptydir_p aclocaldir
+
+  func_scan_files
+
+  case $ltdldir in
+  .) ltdlprefix= ;;
+  *) ltdlprefix=$ltdldir/ ;;
+  esac
+  subproject_auxdir=${ltdlprefix}config
+  subproject_macrodir=${ltdlprefix}m4
+
+  # :::BE CAREFUL HERE:::
+  # func_check_macros needs to check whether --ltdl was specified when
+  # LTDL_INIT was not seen, so we can't just use one variable for both
+  # conditions, or that check will be impossible.   No need to clutter the
+  # rest of the code with '$opt_ltdl || $seen_ltdl' though, because we CAN
+  # safely set opt_ltdl to true if LTDL_INIT was seen:
+  $seen_ltdl && opt_ltdl=:
+
+  func_install_pkgconfig_files
+  func_install_pkgmacro_files
+  func_install_pkgltdl_files
+
+  func_check_macros
+}
+
+exit $exit_status
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/m4
new file mode 100755
index 0000000..34a6162
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/m4
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/make b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/make
new file mode 100755
index 0000000..35a9586
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/bin/make
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.a b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.a
new file mode 100644
index 0000000..b8c513f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.a
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.la b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.la
new file mode 100755
index 0000000..4864bdc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.la
@@ -0,0 +1,41 @@
+# libltdl.la - a libtool library file
+# Generated by libtool (GNU libtool) 2.4.2
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='libltdl.so.7'
+
+# Names of this library.
+library_names='libltdl.so.7.3.0 libltdl.so.7 libltdl.so'
+
+# The name of the static archive.
+old_library='libltdl.a'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags=''
+
+# Libraries that this one depends upon.
+dependency_libs=' -ldl'
+
+# Names of additional weak libraries provided by this library
+weak_library_names=''
+
+# Version information for libltdl.
+current=10
+age=3
+revision=0
+
+# Is this an already installed library?
+installed=yes
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=no
+
+# Files to dlopen/dlpreopen
+dlopen=''
+dlpreopen=''
+
+# Directory that this library needs to be installed in:
+libdir='/i686-pc-linux-gnu/lib'
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so
new file mode 120000
index 0000000..2fac5cb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so
@@ -0,0 +1 @@
+libltdl.so.7.3.0
\ No newline at end of file
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7
new file mode 120000
index 0000000..2fac5cb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7
@@ -0,0 +1 @@
+libltdl.so.7.3.0
\ No newline at end of file
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7.3.0 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7.3.0
new file mode 100755
index 0000000..3ef80f6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/i686-pc-linux-gnu/lib/libltdl.so.7.3.0
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_dlloader.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_dlloader.h
new file mode 100644
index 0000000..589fd0d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_dlloader.h
@@ -0,0 +1,90 @@
+/* lt_dlloader.h -- dynamic library loader interface
+
+   Copyright (C) 2004, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if !defined(LT_DLLOADER_H)
+#define LT_DLLOADER_H 1
+
+#include <libltdl/lt_system.h>
+
+LT_BEGIN_C_DECLS
+
+typedef	void *	lt_dlloader;
+typedef void *	lt_module;
+typedef void *	lt_user_data;
+typedef struct lt__advise *	lt_dladvise;
+
+/* Function pointer types for module loader vtable entries:  */
+typedef lt_module   lt_module_open	(lt_user_data data,
+					 const char *filename,
+					 lt_dladvise advise);
+typedef int	    lt_module_close	(lt_user_data data,
+					 lt_module module);
+typedef void *	    lt_find_sym		(lt_user_data data, lt_module module,
+					 const char *symbolname);
+typedef int	    lt_dlloader_init	(lt_user_data data);
+typedef int	    lt_dlloader_exit	(lt_user_data data);
+
+/* Default priority is LT_DLLOADER_PREPEND if none is explicitly given.  */
+typedef enum {
+  LT_DLLOADER_PREPEND = 0, LT_DLLOADER_APPEND
+} lt_dlloader_priority;
+
+/* This structure defines a module loader, as populated by the get_vtable
+   entry point of each loader.  */
+typedef struct {
+  const char *		name;
+  const char *		sym_prefix;
+  lt_module_open *	module_open;
+  lt_module_close *	module_close;
+  lt_find_sym *		find_sym;
+  lt_dlloader_init *	dlloader_init;
+  lt_dlloader_exit *	dlloader_exit;
+  lt_user_data		dlloader_data;
+  lt_dlloader_priority	priority;
+} lt_dlvtable;
+
+LT_SCOPE int		lt_dlloader_add	   (const lt_dlvtable *vtable);
+LT_SCOPE lt_dlloader	lt_dlloader_next   (const lt_dlloader loader);
+
+LT_SCOPE lt_dlvtable *	lt_dlloader_remove	(const char *name);
+LT_SCOPE const lt_dlvtable *lt_dlloader_find	(const char *name);
+LT_SCOPE const lt_dlvtable *lt_dlloader_get	(lt_dlloader loader);
+
+
+/* Type of a function to get a loader's vtable:  */
+typedef  const lt_dlvtable *lt_get_vtable	(lt_user_data data);
+
+#ifdef LT_DEBUG_LOADERS
+LT_SCOPE void		lt_dlloader_dump	(void);
+#endif
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT_DLLOADER_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_error.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_error.h
new file mode 100644
index 0000000..e789b3a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_error.h
@@ -0,0 +1,85 @@
+/* lt_error.h -- error propogation interface
+
+   Copyright (C) 1999, 2000, 2001, 2004, 2007 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1999
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+/* Only include this header file once. */
+#if !defined(LT_ERROR_H)
+#define LT_ERROR_H 1
+
+#include <libltdl/lt_system.h>
+
+LT_BEGIN_C_DECLS
+
+/* Defining error strings alongside their symbolic names in a macro in
+   this way allows us to expand the macro in different contexts with
+   confidence that the enumeration of symbolic names will map correctly
+   onto the table of error strings.  \0 is appended to the strings to
+   expilicitely initialize the string terminator. */
+#define lt_dlerror_table						\
+    LT_ERROR(UNKNOWN,		    "unknown error\0")			\
+    LT_ERROR(DLOPEN_NOT_SUPPORTED,  "dlopen support not available\0")	\
+    LT_ERROR(INVALID_LOADER,	    "invalid loader\0")			\
+    LT_ERROR(INIT_LOADER,	    "loader initialization failed\0")	\
+    LT_ERROR(REMOVE_LOADER,	    "loader removal failed\0")		\
+    LT_ERROR(FILE_NOT_FOUND,	    "file not found\0")			\
+    LT_ERROR(DEPLIB_NOT_FOUND,	    "dependency library not found\0")	\
+    LT_ERROR(NO_SYMBOLS,	    "no symbols defined\0")		\
+    LT_ERROR(CANNOT_OPEN,	    "can't open the module\0")		\
+    LT_ERROR(CANNOT_CLOSE,	    "can't close the module\0")		\
+    LT_ERROR(SYMBOL_NOT_FOUND,	    "symbol not found\0")		\
+    LT_ERROR(NO_MEMORY,		    "not enough memory\0")		\
+    LT_ERROR(INVALID_HANDLE,	    "invalid module handle\0")		\
+    LT_ERROR(BUFFER_OVERFLOW,	    "internal buffer overflow\0")	\
+    LT_ERROR(INVALID_ERRORCODE,	    "invalid errorcode\0")		\
+    LT_ERROR(SHUTDOWN,		    "library already shutdown\0")	\
+    LT_ERROR(CLOSE_RESIDENT_MODULE, "can't close resident module\0")	\
+    LT_ERROR(INVALID_MUTEX_ARGS,    "internal error (code withdrawn)\0")\
+    LT_ERROR(INVALID_POSITION,	    "invalid search path insert position\0")\
+    LT_ERROR(CONFLICTING_FLAGS,	    "symbol visibility can be global or local\0")
+
+/* Enumerate the symbolic error names. */
+enum {
+#define LT_ERROR(name, diagnostic)	LT_CONC(LT_ERROR_, name),
+	lt_dlerror_table
+#undef LT_ERROR
+
+	LT_ERROR_MAX
+};
+
+/* Should be max of the error string lengths above (plus one for C++) */
+#define LT_ERROR_LEN_MAX (41)
+
+/* These functions are only useful from inside custom module loaders. */
+LT_SCOPE int	lt_dladderror	(const char *diagnostic);
+LT_SCOPE int	lt_dlseterror	(int errorcode);
+
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT_ERROR_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_system.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_system.h
new file mode 100644
index 0000000..f1545ce
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/libltdl/lt_system.h
@@ -0,0 +1,166 @@
+/* lt_system.h -- system portability abstraction layer
+
+   Copyright (C) 2004, 2007, 2010 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if !defined(LT_SYSTEM_H)
+#define LT_SYSTEM_H 1
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <sys/types.h>
+
+/* Some systems do not define EXIT_*, even with STDC_HEADERS.  */
+#if !defined(EXIT_SUCCESS)
+# define EXIT_SUCCESS 0
+#endif
+#if !defined(EXIT_FAILURE)
+# define EXIT_FAILURE 1
+#endif
+
+/* Just pick a big number... */
+#define LT_FILENAME_MAX 2048
+
+
+/* Saves on those hard to debug '\0' typos....  */
+#define LT_EOS_CHAR	'\0'
+
+/* LTDL_BEGIN_C_DECLS should be used at the beginning of your declarations,
+   so that C++ compilers don't mangle their names.  Use LTDL_END_C_DECLS at
+   the end of C declarations. */
+#if defined(__cplusplus)
+# define LT_BEGIN_C_DECLS	extern "C" {
+# define LT_END_C_DECLS		}
+#else
+# define LT_BEGIN_C_DECLS	/* empty */
+# define LT_END_C_DECLS		/* empty */
+#endif
+
+/* LT_STMT_START/END are used to create macros which expand to a
+   a single compound statement in a portable way.  */
+#if defined (__GNUC__) && !defined (__STRICT_ANSI__) && !defined (__cplusplus)
+#  define LT_STMT_START        (void)(
+#  define LT_STMT_END          )
+#else
+#  if (defined (sun) || defined (__sun__))
+#    define LT_STMT_START      if (1)
+#    define LT_STMT_END        else (void)0
+#  else
+#    define LT_STMT_START      do
+#    define LT_STMT_END        while (0)
+#  endif
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* Canonicalise Windows and Cygwin recognition macros.
+   To match the values set by recent Cygwin compilers, make sure that if
+   __CYGWIN__ is defined (after canonicalisation), __WINDOWS__ is NOT!  */
+#if defined(__CYGWIN32__) && !defined(__CYGWIN__)
+# define __CYGWIN__ __CYGWIN32__
+#endif
+#if defined(__CYGWIN__)
+# if defined(__WINDOWS__)
+#   undef __WINDOWS__
+# endif
+#elif defined(_WIN32)
+# define __WINDOWS__ _WIN32
+#elif defined(WIN32)
+# define __WINDOWS__ WIN32
+#endif
+#if defined(__CYGWIN__) && defined(__WINDOWS__)
+# undef __WINDOWS__
+#endif
+
+
+/* DLL building support on win32 hosts;  mostly to workaround their
+   ridiculous implementation of data symbol exporting. */
+#if !defined(LT_SCOPE)
+#  if defined(__WINDOWS__) || defined(__CYGWIN__)
+#    if defined(DLL_EXPORT)		/* defined by libtool (if required) */
+#      define LT_SCOPE	extern __declspec(dllexport)
+#    endif
+#    if defined(LIBLTDL_DLL_IMPORT)	/* define if linking with this dll */
+       /* note: cygwin/mingw compilers can rely instead on auto-import */
+#      define LT_SCOPE	extern __declspec(dllimport)
+#    endif
+#  endif
+#  if !defined(LT_SCOPE)		/* static linking or !__WINDOWS__ */
+#    define LT_SCOPE	extern
+#  endif
+#endif
+
+#if defined(__WINDOWS__)
+/* LT_DIRSEP_CHAR is accepted *in addition* to '/' as a directory
+   separator when it is set. */
+# define LT_DIRSEP_CHAR		'\\'
+# define LT_PATHSEP_CHAR	';'
+#else
+# define LT_PATHSEP_CHAR	':'
+#endif
+
+#if defined(_MSC_VER) /* Visual Studio */
+#  define R_OK 4
+#endif
+
+/* fopen() mode flags for reading a text file */
+#undef	LT_READTEXT_MODE
+#if defined(__WINDOWS__) || defined(__CYGWIN__)
+#  define LT_READTEXT_MODE "rt"
+#else
+#  define LT_READTEXT_MODE "r"
+#endif
+
+/* The extra indirection to the LT__STR and LT__CONC macros is required so
+   that if the arguments to LT_STR() (or LT_CONC()) are themselves macros,
+   they will be expanded before being quoted.   */
+#ifndef LT_STR
+#  define LT__STR(arg)		#arg
+#  define LT_STR(arg)		LT__STR(arg)
+#endif
+
+#ifndef LT_CONC
+#  define LT__CONC(a, b)	a##b
+#  define LT_CONC(a, b)		LT__CONC(a, b)
+#endif
+#ifndef LT_CONC3
+#  define LT__CONC3(a, b, c)	a##b##c
+#  define LT_CONC3(a, b, c)	LT__CONC3(a, b, c)
+#endif
+
+#endif /*!defined(LT_SYSTEM_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/ltdl.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/ltdl.h
new file mode 100644
index 0000000..749a54d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/include/ltdl.h
@@ -0,0 +1,163 @@
+/* ltdl.h -- generic dlopen functions
+
+   Copyright (C) 1998-2000, 2004, 2005,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+/* Only include this header file once. */
+#if !defined(LTDL_H)
+#define LTDL_H 1
+
+#include <libltdl/lt_system.h>
+#include <libltdl/lt_error.h>
+#include <libltdl/lt_dlloader.h>
+
+LT_BEGIN_C_DECLS
+
+
+/* LT_STRLEN can be used safely on NULL pointers.  */
+#define LT_STRLEN(s)	(((s) && (s)[0]) ? strlen (s) : 0)
+
+
+/* --- DYNAMIC MODULE LOADING API --- */
+
+
+typedef	struct lt__handle *lt_dlhandle;	/* A loaded module.  */
+
+/* Initialisation and finalisation functions for libltdl. */
+LT_SCOPE int	    lt_dlinit		(void);
+LT_SCOPE int	    lt_dlexit		(void);
+
+/* Module search path manipulation.  */
+LT_SCOPE int	    lt_dladdsearchdir	 (const char *search_dir);
+LT_SCOPE int	    lt_dlinsertsearchdir (const char *before,
+						  const char *search_dir);
+LT_SCOPE int 	    lt_dlsetsearchpath	 (const char *search_path);
+LT_SCOPE const char *lt_dlgetsearchpath	 (void);
+LT_SCOPE int	    lt_dlforeachfile	 (
+			const char *search_path,
+			int (*func) (const char *filename, void *data),
+			void *data);
+
+/* User module loading advisors.  */
+LT_SCOPE int	    lt_dladvise_init	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_destroy  (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_ext	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_resident (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_local	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_global   (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_preload	 (lt_dladvise *advise);
+
+/* Portable libltdl versions of the system dlopen() API. */
+LT_SCOPE lt_dlhandle lt_dlopen		(const char *filename);
+LT_SCOPE lt_dlhandle lt_dlopenext	(const char *filename);
+LT_SCOPE lt_dlhandle lt_dlopenadvise	(const char *filename,
+					 lt_dladvise advise);
+LT_SCOPE void *	    lt_dlsym		(lt_dlhandle handle, const char *name);
+LT_SCOPE const char *lt_dlerror		(void);
+LT_SCOPE int	    lt_dlclose		(lt_dlhandle handle);
+
+
+
+/* --- PRELOADED MODULE SUPPORT --- */
+
+
+/* A preopened symbol. Arrays of this type comprise the exported
+   symbols for a dlpreopened module. */
+typedef struct {
+  const char *name;
+  void       *address;
+} lt_dlsymlist;
+
+typedef int lt_dlpreload_callback_func (lt_dlhandle handle);
+
+LT_SCOPE int	lt_dlpreload	     (const lt_dlsymlist *preloaded);
+LT_SCOPE int	lt_dlpreload_default (const lt_dlsymlist *preloaded);
+LT_SCOPE int	lt_dlpreload_open    (const char *originator,
+				      lt_dlpreload_callback_func *func);
+
+#define lt_preloaded_symbols	lt__PROGRAM__LTX_preloaded_symbols
+/* Ensure C linkage.  */
+extern LT_DLSYM_CONST lt_dlsymlist lt__PROGRAM__LTX_preloaded_symbols[];
+
+#define LTDL_SET_PRELOADED_SYMBOLS() \
+	lt_dlpreload_default(lt_preloaded_symbols)
+
+
+
+
+/* --- MODULE INFORMATION --- */
+
+
+/* Associating user data with loaded modules. */
+typedef void * lt_dlinterface_id;
+typedef int lt_dlhandle_interface (lt_dlhandle handle, const char *id_string);
+
+LT_SCOPE lt_dlinterface_id lt_dlinterface_register (const char *id_string,
+					  lt_dlhandle_interface *iface);
+LT_SCOPE void	lt_dlinterface_free (lt_dlinterface_id key);
+LT_SCOPE void *	lt_dlcaller_set_data  (lt_dlinterface_id key,
+					  lt_dlhandle handle, void *data);
+LT_SCOPE void *	lt_dlcaller_get_data  (lt_dlinterface_id key,
+					  lt_dlhandle handle);
+
+
+/* Read only information pertaining to a loaded module. */
+typedef	struct {
+  char *	filename;	/* file name */
+  char *	name;		/* module name */
+  int		ref_count;	/* number of times lt_dlopened minus
+				   number of times lt_dlclosed. */
+  unsigned int	is_resident:1;	/* module can't be unloaded. */
+  unsigned int	is_symglobal:1;	/* module symbols can satisfy
+				   subsequently loaded modules.  */
+  unsigned int	is_symlocal:1;	/* module symbols are only available
+				   locally. */
+} lt_dlinfo;
+
+LT_SCOPE const lt_dlinfo *lt_dlgetinfo	    (lt_dlhandle handle);
+
+LT_SCOPE lt_dlhandle	lt_dlhandle_iterate (lt_dlinterface_id iface,
+					     lt_dlhandle place);
+LT_SCOPE lt_dlhandle	lt_dlhandle_fetch   (lt_dlinterface_id iface,
+					     const char *module_name);
+LT_SCOPE int		lt_dlhandle_map	    (lt_dlinterface_id iface,
+				int (*func) (lt_dlhandle handle, void *data),
+				void *data);
+
+
+
+/* Deprecated module residency management API. */
+LT_SCOPE int	    lt_dlmakeresident	(lt_dlhandle handle);
+LT_SCOPE int	    lt_dlisresident	(lt_dlhandle handle);
+
+#define lt_ptr void *
+
+LT_END_C_DECLS
+
+#endif /*!defined(LTDL_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/amversion.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/amversion.m4
new file mode 100644
index 0000000..0454321
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/amversion.m4
@@ -0,0 +1,37 @@
+##                                                          -*- Autoconf -*-
+## Generated from amversion.in; do not edit by hand.
+# Copyright (C) 2002-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_AUTOMAKE_VERSION(VERSION)
+# ----------------------------
+# Automake X.Y traces this macro to ensure aclocal.m4 has been
+# generated from the m4 files accompanying Automake X.Y.
+# (This private macro should not be called outside this file.)
+AC_DEFUN([AM_AUTOMAKE_VERSION],
+[am__api_version='1.14'
+dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+dnl require some minimum version.  Point them to the right macro.
+m4_if([$1], [1.14.1], [],
+      [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+])
+
+# _AM_AUTOCONF_VERSION(VERSION)
+# -----------------------------
+# aclocal traces this macro to find the Autoconf version.
+# This is a private macro too.  Using m4_define simplifies
+# the logic in aclocal, which can simply ignore this definition.
+m4_define([_AM_AUTOCONF_VERSION], [])
+
+# AM_SET_CURRENT_AUTOMAKE_VERSION
+# -------------------------------
+# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+# This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+[AM_AUTOMAKE_VERSION([1.14.1])dnl
+m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/ar-lib.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/ar-lib.m4
new file mode 100644
index 0000000..58726d0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/ar-lib.m4
@@ -0,0 +1,60 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2011-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_AR([ACT-IF-FAIL])
+# -------------------------
+# Try to determine the archiver interface, and trigger the ar-lib wrapper
+# if it is needed.  If the detection of archiver interface fails, run
+# ACT-IF-FAIL (default is to abort configure with a proper error message).
+AC_DEFUN([AM_PROG_AR],
+[AC_BEFORE([$0], [LT_INIT])dnl
+AC_BEFORE([$0], [AC_PROG_LIBTOOL])dnl
+AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([ar-lib])dnl
+AC_CHECK_TOOLS([AR], [ar lib "link -lib"], [false])
+: ${AR=ar}
+
+AC_CACHE_CHECK([the archiver ($AR) interface], [am_cv_ar_interface],
+  [AC_LANG_PUSH([C])
+   am_cv_ar_interface=ar
+   AC_COMPILE_IFELSE([AC_LANG_SOURCE([[int some_variable = 0;]])],
+     [am_ar_try='$AR cru libconftest.a conftest.$ac_objext >&AS_MESSAGE_LOG_FD'
+      AC_TRY_EVAL([am_ar_try])
+      if test "$ac_status" -eq 0; then
+        am_cv_ar_interface=ar
+      else
+        am_ar_try='$AR -NOLOGO -OUT:conftest.lib conftest.$ac_objext >&AS_MESSAGE_LOG_FD'
+        AC_TRY_EVAL([am_ar_try])
+        if test "$ac_status" -eq 0; then
+          am_cv_ar_interface=lib
+        else
+          am_cv_ar_interface=unknown
+        fi
+      fi
+      rm -f conftest.lib libconftest.a
+     ])
+   AC_LANG_POP([C])])
+
+case $am_cv_ar_interface in
+ar)
+  ;;
+lib)
+  # Microsoft lib, so override with the ar-lib wrapper script.
+  # FIXME: It is wrong to rewrite AR.
+  # But if we don't then we get into trouble of one sort or another.
+  # A longer-term fix would be to have automake use am__AR in this case,
+  # and then we could set am__AR="$am_aux_dir/ar-lib \$(AR)" or something
+  # similar.
+  AR="$am_aux_dir/ar-lib $AR"
+  ;;
+unknown)
+  m4_default([$1],
+             [AC_MSG_ERROR([could not determine $AR interface])])
+  ;;
+esac
+AC_SUBST([AR])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/as.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/as.m4
new file mode 100644
index 0000000..46695dd
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/as.m4
@@ -0,0 +1,19 @@
+# Figure out how to run the assembler.                      -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_AS
+# ----------
+AC_DEFUN([AM_PROG_AS],
+[# By default we simply use the C compiler to build assembly code.
+AC_REQUIRE([AC_PROG_CC])
+test "${CCAS+set}" = set || CCAS=$CC
+test "${CCASFLAGS+set}" = set || CCASFLAGS=$CFLAGS
+AC_ARG_VAR([CCAS],      [assembler compiler command (defaults to CC)])
+AC_ARG_VAR([CCASFLAGS], [assembler compiler flags (defaults to CFLAGS)])
+_AM_IF_OPTION([no-dependencies],, [_AM_DEPENDENCIES([CCAS])])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/auxdir.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/auxdir.m4
new file mode 100644
index 0000000..a34f38e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/auxdir.m4
@@ -0,0 +1,52 @@
+# AM_AUX_DIR_EXPAND                                         -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
+# $ac_aux_dir to '$srcdir/foo'.  In other projects, it is set to
+# '$srcdir', '$srcdir/..', or '$srcdir/../..'.
+#
+# Of course, Automake must honor this variable whenever it calls a
+# tool from the auxiliary directory.  The problem is that $srcdir (and
+# therefore $ac_aux_dir as well) can be either absolute or relative,
+# depending on how configure is run.  This is pretty annoying, since
+# it makes $ac_aux_dir quite unusable in subdirectories: in the top
+# source directory, any form will work fine, but in subdirectories a
+# relative path needs to be adjusted first.
+#
+# $ac_aux_dir/missing
+#    fails when called from a subdirectory if $ac_aux_dir is relative
+# $top_srcdir/$ac_aux_dir/missing
+#    fails if $ac_aux_dir is absolute,
+#    fails when called from a subdirectory in a VPATH build with
+#          a relative $ac_aux_dir
+#
+# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
+# are both prefixed by $srcdir.  In an in-source build this is usually
+# harmless because $srcdir is '.', but things will broke when you
+# start a VPATH build or use an absolute $srcdir.
+#
+# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
+# iff we strip the leading $srcdir from $ac_aux_dir.  That would be:
+#   am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
+# and then we would define $MISSING as
+#   MISSING="\${SHELL} $am_aux_dir/missing"
+# This will work as long as MISSING is not called from configure, because
+# unfortunately $(top_srcdir) has no meaning in configure.
+# However there are other variables, like CC, which are often used in
+# configure, and could therefore not use this "fixed" $ac_aux_dir.
+#
+# Another solution, used here, is to always expand $ac_aux_dir to an
+# absolute PATH.  The drawback is that using absolute paths prevent a
+# configured tree to be moved without reconfiguration.
+
+AC_DEFUN([AM_AUX_DIR_EXPAND],
+[dnl Rely on autoconf to set up CDPATH properly.
+AC_PREREQ([2.50])dnl
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond-if.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond-if.m4
new file mode 100644
index 0000000..b6b372d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond-if.m4
@@ -0,0 +1,36 @@
+# AM_COND_IF                                            -*- Autoconf -*-
+
+# Copyright (C) 2008-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_COND_IF
+# _AM_COND_ELSE
+# _AM_COND_ENDIF
+# --------------
+# These macros are only used for tracing.
+m4_define([_AM_COND_IF])
+m4_define([_AM_COND_ELSE])
+m4_define([_AM_COND_ENDIF])
+
+# AM_COND_IF(COND, [IF-TRUE], [IF-FALSE])
+# ---------------------------------------
+# If the shell condition COND is true, execute IF-TRUE, otherwise execute
+# IF-FALSE.  Allow automake to learn about conditional instantiating macros
+# (the AC_CONFIG_FOOS).
+AC_DEFUN([AM_COND_IF],
+[m4_ifndef([_AM_COND_VALUE_$1],
+	   [m4_fatal([$0: no such condition "$1"])])dnl
+_AM_COND_IF([$1])dnl
+if test -z "$$1_TRUE"; then :
+  m4_n([$2])[]dnl
+m4_ifval([$3],
+[_AM_COND_ELSE([$1])dnl
+else
+  $3
+])dnl
+_AM_COND_ENDIF([$1])dnl
+fi[]dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond.m4
new file mode 100644
index 0000000..2f91e05
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/cond.m4
@@ -0,0 +1,32 @@
+# AM_CONDITIONAL                                            -*- Autoconf -*-
+
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_CONDITIONAL(NAME, SHELL-CONDITION)
+# -------------------------------------
+# Define a conditional.
+AC_DEFUN([AM_CONDITIONAL],
+[AC_PREREQ([2.52])dnl
+ m4_if([$1], [TRUE],  [AC_FATAL([$0: invalid condition: $1])],
+       [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
+AC_SUBST([$1_TRUE])dnl
+AC_SUBST([$1_FALSE])dnl
+_AM_SUBST_NOTMAKE([$1_TRUE])dnl
+_AM_SUBST_NOTMAKE([$1_FALSE])dnl
+m4_define([_AM_COND_VALUE_$1], [$2])dnl
+if $2; then
+  $1_TRUE=
+  $1_FALSE='#'
+else
+  $1_TRUE='#'
+  $1_FALSE=
+fi
+AC_CONFIG_COMMANDS_PRE(
+[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
+  AC_MSG_ERROR([[conditional "$1" was never defined.
+Usually this means the macro was only invoked conditionally.]])
+fi])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depend.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depend.m4
new file mode 100644
index 0000000..a34cd5c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depend.m4
@@ -0,0 +1,189 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+
+# There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be
+# written in clear, in which case automake, when reading aclocal.m4,
+# will think it sees a *use*, and therefore will trigger all it's
+# C support machinery.  Also note that it means that autoscan, seeing
+# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
+
+
+# _AM_DEPENDENCIES(NAME)
+# ----------------------
+# See how the compiler implements dependency checking.
+# NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC".
+# We try a few techniques and use that to set a single cache variable.
+#
+# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
+# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
+# dependency, and given that the user is not expected to run this macro,
+# just rely on AC_PROG_CC.
+AC_DEFUN([_AM_DEPENDENCIES],
+[AC_REQUIRE([AM_SET_DEPDIR])dnl
+AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
+AC_REQUIRE([AM_MAKE_INCLUDE])dnl
+AC_REQUIRE([AM_DEP_TRACK])dnl
+
+m4_if([$1], [CC],   [depcc="$CC"   am_compiler_list=],
+      [$1], [CXX],  [depcc="$CXX"  am_compiler_list=],
+      [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
+      [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'],
+      [$1], [UPC],  [depcc="$UPC"  am_compiler_list=],
+      [$1], [GCJ],  [depcc="$GCJ"  am_compiler_list='gcc3 gcc'],
+                    [depcc="$$1"   am_compiler_list=])
+
+AC_CACHE_CHECK([dependency style of $depcc],
+               [am_cv_$1_dependencies_compiler_type],
+[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named 'D' -- because '-MD' means "put the output
+  # in D".
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_$1_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
+  fi
+  am__universal=false
+  m4_case([$1], [CC],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac],
+    [CXX],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac])
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
+      # Solaris 10 /bin/sh.
+      echo '/* dummy */' > sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with '-c' and '-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle '-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs.
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # After this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested.
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvc7 | msvc7msys | msvisualcpp | msvcmsys)
+      # This compiler won't grok '-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_$1_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_$1_dependencies_compiler_type=none
+fi
+])
+AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
+AM_CONDITIONAL([am__fastdep$1], [
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_$1_dependencies_compiler_type" = gcc3])
+])
+
+
+# AM_SET_DEPDIR
+# -------------
+# Choose a directory name for dependency files.
+# This macro is AC_REQUIREd in _AM_DEPENDENCIES.
+AC_DEFUN([AM_SET_DEPDIR],
+[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
+])
+
+
+# AM_DEP_TRACK
+# ------------
+AC_DEFUN([AM_DEP_TRACK],
+[AC_ARG_ENABLE([dependency-tracking], [dnl
+AS_HELP_STRING(
+  [--enable-dependency-tracking],
+  [do not reject slow dependency extractors])
+AS_HELP_STRING(
+  [--disable-dependency-tracking],
+  [speeds up one-time build])])
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+  am__nodep='_no'
+fi
+AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
+AC_SUBST([AMDEPBACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
+AC_SUBST([am__nodep])dnl
+_AM_SUBST_NOTMAKE([am__nodep])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depout.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depout.m4
new file mode 100644
index 0000000..c79d04b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/depout.m4
@@ -0,0 +1,75 @@
+# Generate code to set up dependency tracking.              -*- Autoconf -*-
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+
+# _AM_OUTPUT_DEPENDENCY_COMMANDS
+# ------------------------------
+AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+[{
+  # Older Autoconf quotes --file arguments for eval, but not when files
+  # are listed without --file.  Let's play safe and only enable the eval
+  # if we detect the quoting.
+  case $CONFIG_FILES in
+  *\'*) eval set x "$CONFIG_FILES" ;;
+  *)   set x $CONFIG_FILES ;;
+  esac
+  shift
+  for mf
+  do
+    # Strip MF so we end up with the name of the file.
+    mf=`echo "$mf" | sed -e 's/:.*$//'`
+    # Check whether this is an Automake generated Makefile or not.
+    # We used to match only the files named 'Makefile.in', but
+    # some people rename them; so instead we look at the file content.
+    # Grep'ing the first line is not enough: some people post-process
+    # each Makefile.in and add a new line on top of each file to say so.
+    # Grep'ing the whole file is not good either: AIX grep has a line
+    # limit of 2048, but all sed's we know have understand at least 4000.
+    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+      dirpart=`AS_DIRNAME("$mf")`
+    else
+      continue
+    fi
+    # Extract the definition of DEPDIR, am__include, and am__quote
+    # from the Makefile without running 'make'.
+    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+    test -z "$DEPDIR" && continue
+    am__include=`sed -n 's/^am__include = //p' < "$mf"`
+    test -z "$am__include" && continue
+    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+    # Find all dependency output files, they are included files with
+    # $(DEPDIR) in their names.  We invoke sed twice because it is the
+    # simplest approach to changing $(DEPDIR) to its actual value in the
+    # expansion.
+    for file in `sed -n "
+      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
+      # Make sure the directory exists.
+      test -f "$dirpart/$file" && continue
+      fdir=`AS_DIRNAME(["$file"])`
+      AS_MKDIR_P([$dirpart/$fdir])
+      # echo "creating $dirpart/$file"
+      echo '# dummy' > "$dirpart/$file"
+    done
+  done
+}
+])# _AM_OUTPUT_DEPENDENCY_COMMANDS
+
+
+# AM_OUTPUT_DEPENDENCY_COMMANDS
+# -----------------------------
+# This macro should only be invoked once -- use via AC_REQUIRE.
+#
+# This code is only required when automatic dependency tracking
+# is enabled.  FIXME.  This creates each '.P' file that we will
+# need in order to bootstrap the dependency handling code.
+AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
+[AC_CONFIG_COMMANDS([depfiles],
+     [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
+     [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/dmalloc.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/dmalloc.m4
new file mode 100644
index 0000000..d8819c9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/dmalloc.m4
@@ -0,0 +1,26 @@
+## ----------------------------------- ##                   -*- Autoconf -*-
+## Check if --with-dmalloc was given.  ##
+## From Franc,ois Pinard               ##
+## ----------------------------------- ##
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+AC_DEFUN([AM_WITH_DMALLOC],
+[AC_MSG_CHECKING([if malloc debugging is wanted])
+AC_ARG_WITH([dmalloc],
+[AS_HELP_STRING([--with-dmalloc],
+                [use dmalloc, as in http://www.dmalloc.com])],
+[if test "$withval" = yes; then
+  AC_MSG_RESULT([yes])
+  AC_DEFINE([WITH_DMALLOC], [1],
+	    [Define if using the dmalloc debugging malloc package])
+  LIBS="$LIBS -ldmalloc"
+  LDFLAGS="$LDFLAGS -g"
+else
+  AC_MSG_RESULT([no])
+fi], [AC_MSG_RESULT([no])])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/extra-recurs.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/extra-recurs.m4
new file mode 100644
index 0000000..02cc53f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/extra-recurs.m4
@@ -0,0 +1,16 @@
+# AM_EXTRA_RECURSIVE_TARGETS                                -*- Autoconf -*-
+
+# Copyright (C) 2012-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_EXTRA_RECURSIVE_TARGETS
+# --------------------------
+# Define the list of user recursive targets.  This macro exists only to
+# be traced by Automake, which will ensure that a proper definition of
+# user-defined recursive targets (and associated rules) is propagated
+# into all the generated Makefiles.
+# TODO: We should really reject non-literal arguments here...
+AC_DEFUN([AM_EXTRA_RECURSIVE_TARGETS], [])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/gcj.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/gcj.m4
new file mode 100644
index 0000000..aebabc1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/gcj.m4
@@ -0,0 +1,18 @@
+# Check for Java compiler.                                  -*- Autoconf -*-
+# For now we only handle the GNU compiler.
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+AC_DEFUN([AM_PROG_GCJ],
+[AC_CHECK_TOOLS([GCJ], [gcj], [gcj])
+test -z "$GCJ" && AC_MSG_ERROR([no acceptable gcj found in \$PATH])
+if test "x${GCJFLAGS-unset}" = xunset; then
+   GCJFLAGS="-g -O2"
+fi
+AC_SUBST([GCJFLAGS])
+_AM_IF_OPTION([no-dependencies],, [_AM_DEPENDENCIES(GCJ)])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/init.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/init.m4
new file mode 100644
index 0000000..432ff20
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/init.m4
@@ -0,0 +1,194 @@
+# Do all the work for Automake.                             -*- Autoconf -*-
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This macro actually does too much.  Some checks are only needed if
+# your package does certain things.  But this isn't really a big deal.
+
+dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O.
+m4_define([AC_PROG_CC],
+m4_defn([AC_PROG_CC])
+[_AM_PROG_CC_C_O
+])
+
+# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
+# AM_INIT_AUTOMAKE([OPTIONS])
+# -----------------------------------------------
+# The call with PACKAGE and VERSION arguments is the old style
+# call (pre autoconf-2.50), which is being phased out.  PACKAGE
+# and VERSION should now be passed to AC_INIT and removed from
+# the call to AM_INIT_AUTOMAKE.
+# We support both call styles for the transition.  After
+# the next Automake release, Autoconf can make the AC_INIT
+# arguments mandatory, and then we can depend on a new Autoconf
+# release and drop the old call support.
+AC_DEFUN([AM_INIT_AUTOMAKE],
+[AC_PREREQ([2.65])dnl
+dnl Autoconf wants to disallow AM_ names.  We explicitly allow
+dnl the ones we care about.
+m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
+AC_REQUIRE([AC_PROG_INSTALL])dnl
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+AC_SUBST([CYGPATH_W])
+
+# Define the identity of the package.
+dnl Distinguish between old-style and new-style calls.
+m4_ifval([$2],
+[AC_DIAGNOSE([obsolete],
+             [$0: two- and three-arguments forms are deprecated.])
+m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ AC_SUBST([PACKAGE], [$1])dnl
+ AC_SUBST([VERSION], [$2])],
+[_AM_SET_OPTIONS([$1])dnl
+dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+m4_if(
+  m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]),
+  [ok:ok],,
+  [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+ AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
+
+_AM_IF_OPTION([no-define],,
+[AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package])
+ AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl
+
+# Some tools Automake needs.
+AC_REQUIRE([AM_SANITY_CHECK])dnl
+AC_REQUIRE([AC_ARG_PROGRAM])dnl
+AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}])
+AM_MISSING_PROG([AUTOCONF], [autoconf])
+AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}])
+AM_MISSING_PROG([AUTOHEADER], [autoheader])
+AM_MISSING_PROG([MAKEINFO], [makeinfo])
+AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
+AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+# For better backward compatibility.  To be removed once Automake 1.9.x
+# dies out for good.  For more background, see:
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+AC_SUBST([mkdir_p], ['$(MKDIR_P)'])
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([AC_PROG_MAKE_SET])dnl
+AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
+	      [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
+			     [_AM_PROG_TAR([v7])])])
+_AM_IF_OPTION([no-dependencies],,
+[AC_PROVIDE_IFELSE([AC_PROG_CC],
+		  [_AM_DEPENDENCIES([CC])],
+		  [m4_define([AC_PROG_CC],
+			     m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_CXX],
+		  [_AM_DEPENDENCIES([CXX])],
+		  [m4_define([AC_PROG_CXX],
+			     m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJC],
+		  [_AM_DEPENDENCIES([OBJC])],
+		  [m4_define([AC_PROG_OBJC],
+			     m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJCXX],
+		  [_AM_DEPENDENCIES([OBJCXX])],
+		  [m4_define([AC_PROG_OBJCXX],
+			     m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl
+])
+AC_REQUIRE([AM_SILENT_RULES])dnl
+dnl The testsuite driver may need to know about EXEEXT, so add the
+dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen.  This
+dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below.
+AC_CONFIG_COMMANDS_PRE(dnl
+[m4_provide_if([_AM_COMPILER_EXEEXT],
+  [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl
+
+# POSIX will say in a future version that running "rm -f" with no argument
+# is OK; and we want to be able to make that assumption in our Makefile
+# recipes.  So use an aggressive probe to check that the usage we want is
+# actually supported "in the wild" to an acceptable degree.
+# See automake bug#10828.
+# To make any issue more visible, cause the running configure to be aborted
+# by default if the 'rm' program in use doesn't match our expectations; the
+# user can still override this though.
+if rm -f && rm -fr && rm -rf; then : OK; else
+  cat >&2 <<'END'
+Oops!
+
+Your 'rm' program seems unable to run without file operands specified
+on the command line, even when the '-f' option is present.  This is contrary
+to the behaviour of most rm programs out there, and not conforming with
+the upcoming POSIX standard: <http://austingroupbugs.net/view.php?id=542>
+
+Please tell bug-automake@gnu.org about your system, including the value
+of your $PATH and any error possibly output before this message.  This
+can help us improve future automake versions.
+
+END
+  if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then
+    echo 'Configuration will proceed anyway, since you have set the' >&2
+    echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2
+    echo >&2
+  else
+    cat >&2 <<'END'
+Aborting the configuration process, to ensure you take notice of the issue.
+
+You can download and install GNU coreutils to get an 'rm' implementation
+that behaves properly: <http://www.gnu.org/software/coreutils/>.
+
+If you want to complete the configuration process using your problematic
+'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+to "yes", and re-run configure.
+
+END
+    AC_MSG_ERROR([Your 'rm' program is bad, sorry.])
+  fi
+fi])
+
+dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion.  Do not
+dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further
+dnl mangled by Autoconf and run in a shell conditional statement.
+m4_define([_AC_COMPILER_EXEEXT],
+m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])])
+
+# When config.status generates a header, we must update the stamp-h file.
+# This file resides in the same directory as the config header
+# that is generated.  The stamp files are numbered to have different names.
+
+# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
+# loop where config.status creates the headers, so we can generate
+# our stamp files there.
+AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
+[# Compute $1's index in $config_headers.
+_am_arg=$1
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $_am_arg | $_am_arg:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/install-sh.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/install-sh.m4
new file mode 100644
index 0000000..dca2b69
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/install-sh.m4
@@ -0,0 +1,21 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_SH
+# ------------------
+# Define $install_sh.
+AC_DEFUN([AM_PROG_INSTALL_SH],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+if test x"${install_sh}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
+  *)
+    install_sh="\${SHELL} $am_aux_dir/install-sh"
+  esac
+fi
+AC_SUBST([install_sh])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/internal/ac-config-macro-dirs.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/internal/ac-config-macro-dirs.m4
new file mode 100644
index 0000000..2684883
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/internal/ac-config-macro-dirs.m4
@@ -0,0 +1,15 @@
+# Support AC_CONFIG_MACRO_DIRS with older autoconf.     -*- Autoconf -*-
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with the contents of the variable
+#        '$ac_config_macro_dirs_fallback' in aclocal.in.
+
+# Copyright (C) 2012-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+m4_ifndef([AC_CONFIG_MACRO_DIRS],
+[m4_defun([_AM_CONFIG_MACRO_DIRS],[])]dnl
+[m4_defun([AC_CONFIG_MACRO_DIRS],[_AM_CONFIG_MACRO_DIRS($@)])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lead-dot.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lead-dot.m4
new file mode 100644
index 0000000..4dd798b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lead-dot.m4
@@ -0,0 +1,19 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# Check whether the underlying file-system supports filenames
+# with a leading dot.  For instance MS-DOS doesn't.
+AC_DEFUN([AM_SET_LEADING_DOT],
+[rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+AC_SUBST([am__leading_dot])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lex.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lex.m4
new file mode 100644
index 0000000..d97daa3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lex.m4
@@ -0,0 +1,19 @@
+## Replacement for AC_PROG_LEX.                            -*-  Autoconf -*-
+## by Alexandre Oliva <oliva@dcc.unicamp.br>
+# Copyright (C) 1998-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_LEX
+# -----------
+# Autoconf leaves LEX=: if lex or flex can't be found.  Change that to a
+# "missing" invocation, for better error output.
+AC_DEFUN([AM_PROG_LEX],
+[AC_PREREQ([2.50])dnl
+AC_REQUIRE([AM_MISSING_HAS_RUN])dnl
+AC_REQUIRE([AC_PROG_LEX])dnl
+if test "$LEX" = :; then
+  LEX=${am_missing_run}flex
+fi])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lispdir.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lispdir.m4
new file mode 100644
index 0000000..4e0c914
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/lispdir.m4
@@ -0,0 +1,51 @@
+## ------------------------                                 -*- Autoconf -*-
+## Emacs LISP file handling
+## From Ulrich Drepper
+## Almost entirely rewritten by Alexandre Oliva
+## ------------------------
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PATH_LISPDIR
+# ---------------
+AC_DEFUN([AM_PATH_LISPDIR],
+[AC_PREREQ([2.60])dnl
+ # If set to t, that means we are running in a shell under Emacs.
+ # If you have an Emacs named "t", then use the full path.
+ test x"$EMACS" = xt && EMACS=
+ AC_CHECK_PROGS([EMACS], [emacs xemacs], [no])
+ AC_ARG_VAR([EMACS], [the Emacs editor command])
+ AC_ARG_VAR([EMACSLOADPATH], [the Emacs library search path])
+ AC_ARG_WITH([lispdir],
+ [AS_HELP_STRING([--with-lispdir],
+                 [override the default lisp directory])],
+ [ lispdir="$withval"
+   AC_MSG_CHECKING([where .elc files should go])
+   AC_MSG_RESULT([$lispdir])],
+ [
+ AC_CACHE_CHECK([where .elc files should go], [am_cv_lispdir], [
+   if test $EMACS != "no"; then
+     if test x${lispdir+set} != xset; then
+  # If $EMACS isn't GNU Emacs or XEmacs, this can blow up pretty badly
+  # Some emacsen will start up in interactive mode, requiring C-x C-c to exit,
+  #  which is non-obvious for non-emacs users.
+  # Redirecting /dev/null should help a bit; pity we can't detect "broken"
+  #  emacsen earlier and avoid running this altogether.
+  AC_RUN_LOG([$EMACS -batch -q -eval '(while load-path (princ (concat (car load-path) "\n")) (setq load-path (cdr load-path)))' </dev/null >conftest.out])
+	am_cv_lispdir=`sed -n \
+       -e 's,/$,,' \
+       -e '/.*\/lib\/x*emacs\/site-lisp$/{s,.*/lib/\(x*emacs/site-lisp\)$,${libdir}/\1,;p;q;}' \
+       -e '/.*\/share\/x*emacs\/site-lisp$/{s,.*/share/\(x*emacs/site-lisp\),${datarootdir}/\1,;p;q;}' \
+       conftest.out`
+       rm conftest.out
+     fi
+   fi
+   test -z "$am_cv_lispdir" && am_cv_lispdir='${datadir}/emacs/site-lisp'
+  ])
+  lispdir="$am_cv_lispdir"
+])
+AC_SUBST([lispdir])
+])# AM_PATH_LISPDIR
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/maintainer.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/maintainer.m4
new file mode 100644
index 0000000..fc98244
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/maintainer.m4
@@ -0,0 +1,35 @@
+# Add --enable-maintainer-mode option to configure.         -*- Autoconf -*-
+# From Jim Meyering
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MAINTAINER_MODE([DEFAULT-MODE])
+# ----------------------------------
+# Control maintainer-specific portions of Makefiles.
+# Default is to disable them, unless 'enable' is passed literally.
+# For symmetry, 'disable' may be passed as well.  Anyway, the user
+# can override the default with the --enable/--disable switch.
+AC_DEFUN([AM_MAINTAINER_MODE],
+[m4_case(m4_default([$1], [disable]),
+       [enable], [m4_define([am_maintainer_other], [disable])],
+       [disable], [m4_define([am_maintainer_other], [enable])],
+       [m4_define([am_maintainer_other], [enable])
+        m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])])
+AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles])
+  dnl maintainer-mode's default is 'disable' unless 'enable' is passed
+  AC_ARG_ENABLE([maintainer-mode],
+    [AS_HELP_STRING([--]am_maintainer_other[-maintainer-mode],
+      am_maintainer_other[ make rules and dependencies not useful
+      (and sometimes confusing) to the casual installer])],
+    [USE_MAINTAINER_MODE=$enableval],
+    [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes]))
+  AC_MSG_RESULT([$USE_MAINTAINER_MODE])
+  AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes])
+  MAINT=$MAINTAINER_MODE_TRUE
+  AC_SUBST([MAINT])dnl
+]
+)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/make.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/make.m4
new file mode 100644
index 0000000..3412d17
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/make.m4
@@ -0,0 +1,49 @@
+# Check to see how 'make' treats includes.	            -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MAKE_INCLUDE()
+# -----------------
+# Check to see how make treats includes.
+AC_DEFUN([AM_MAKE_INCLUDE],
+[am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo this is the am__doit target
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+AC_MSG_CHECKING([for style of include used by $am_make])
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# Ignore all kinds of additional output from 'make'.
+case `$am_make -s -f confmf 2> /dev/null` in #(
+*the\ am__doit\ target*)
+  am__include=include
+  am__quote=
+  _am_result=GNU
+  ;;
+esac
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   case `$am_make -s -f confmf 2> /dev/null` in #(
+   *the\ am__doit\ target*)
+     am__include=.include
+     am__quote="\""
+     _am_result=BSD
+     ;;
+   esac
+fi
+AC_SUBST([am__include])
+AC_SUBST([am__quote])
+AC_MSG_RESULT([$_am_result])
+rm -f confinc confmf
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/missing.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/missing.m4
new file mode 100644
index 0000000..6dea461
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/missing.m4
@@ -0,0 +1,38 @@
+# Fake the existence of programs that GNU maintainers use.  -*- Autoconf -*-
+
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_MISSING_PROG(NAME, PROGRAM)
+# ------------------------------
+AC_DEFUN([AM_MISSING_PROG],
+[AC_REQUIRE([AM_MISSING_HAS_RUN])
+$1=${$1-"${am_missing_run}$2"}
+AC_SUBST($1)])
+
+# AM_MISSING_HAS_RUN
+# ------------------
+# Define MISSING if not defined so far and test if it is modern enough.
+# If it is, set am_missing_run to use it, otherwise, to nothing.
+AC_DEFUN([AM_MISSING_HAS_RUN],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([missing])dnl
+if test x"${MISSING+set}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+  *)
+    MISSING="\${SHELL} $am_aux_dir/missing" ;;
+  esac
+fi
+# Use eval to expand $SHELL
+if eval "$MISSING --is-lightweight"; then
+  am_missing_run="$MISSING "
+else
+  am_missing_run=
+  AC_MSG_WARN(['missing' script is too old or missing])
+fi
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/mkdirp.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/mkdirp.m4
new file mode 100644
index 0000000..0b90d2c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/mkdirp.m4
@@ -0,0 +1,32 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_MKDIR_P
+# ---------------
+# Check for 'mkdir -p'.
+AC_DEFUN([AM_PROG_MKDIR_P],
+[AC_PREREQ([2.60])dnl
+AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+dnl FIXME we are no longer going to remove this! adjust warning
+dnl FIXME message accordingly.
+AC_DIAGNOSE([obsolete],
+[$0: this macro is deprecated, and will soon be removed.
+You should use the Autoconf-provided 'AC][_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.])
+dnl Automake 1.8 to 1.9.6 used to define mkdir_p.  We now use MKDIR_P,
+dnl while keeping a definition of mkdir_p for backward compatibility.
+dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile.
+dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of
+dnl Makefile.ins that do not define MKDIR_P, so we do our own
+dnl adjustment using top_builddir (which is defined more often than
+dnl MKDIR_P).
+AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl
+case $mkdir_p in
+  [[\\/$]]* | ?:[[\\/]]*) ;;
+  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
+esac
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/obsolete.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/obsolete.m4
new file mode 100644
index 0000000..f7ec1db
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/obsolete.m4
@@ -0,0 +1,28 @@
+#  -*- Autoconf -*-
+# Obsolete and "removed" macros, that must however still report explicit
+# error messages when used, to smooth transition.
+#
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+AC_DEFUN([AM_CONFIG_HEADER],
+[AC_DIAGNOSE([obsolete],
+['$0': this macro is obsolete.
+You should use the 'AC][_CONFIG_HEADERS' macro instead.])dnl
+AC_CONFIG_HEADERS($@)])
+
+AC_DEFUN([AM_PROG_CC_STDC],
+[AC_PROG_CC
+am_cv_prog_cc_stdc=$ac_cv_prog_cc_stdc
+AC_DIAGNOSE([obsolete],
+['$0': this macro is obsolete.
+You should simply use the 'AC][_PROG_CC' macro instead.
+Also, your code should no longer depend upon 'am_cv_prog_cc_stdc',
+but upon 'ac_cv_prog_cc_stdc'.])])
+
+AC_DEFUN([AM_C_PROTOTYPES],
+         [AC_FATAL([automatic de-ANSI-fication support has been removed])])
+AU_DEFUN([fp_C_PROTOTYPES], [AM_C_PROTOTYPES])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/options.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/options.m4
new file mode 100644
index 0000000..01ce20c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/options.m4
@@ -0,0 +1,30 @@
+# Helper functions for option handling.                     -*- Autoconf -*-
+
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_MANGLE_OPTION(NAME)
+# -----------------------
+AC_DEFUN([_AM_MANGLE_OPTION],
+[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
+
+# _AM_SET_OPTION(NAME)
+# --------------------
+# Set option NAME.  Presently that only means defining a flag for this option.
+AC_DEFUN([_AM_SET_OPTION],
+[m4_define(_AM_MANGLE_OPTION([$1]), [1])])
+
+# _AM_SET_OPTIONS(OPTIONS)
+# ------------------------
+# OPTIONS is a space-separated list of Automake options.
+AC_DEFUN([_AM_SET_OPTIONS],
+[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
+
+# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
+# -------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+AC_DEFUN([_AM_IF_OPTION],
+[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/prog-cc-c-o.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/prog-cc-c-o.m4
new file mode 100644
index 0000000..3b2a849
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/prog-cc-c-o.m4
@@ -0,0 +1,47 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_PROG_CC_C_O
+# ---------------
+# Like AC_PROG_CC_C_O, but changed for automake.  We rewrite AC_PROG_CC
+# to automatically call this.
+AC_DEFUN([_AM_PROG_CC_C_O],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([compile])dnl
+AC_LANG_PUSH([C])dnl
+AC_CACHE_CHECK(
+  [whether $CC understands -c and -o together],
+  [am_cv_prog_cc_c_o],
+  [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+  # Make sure it works both with $CC and with simple cc.
+  # Following AC_PROG_CC_C_O, we do the test twice because some
+  # compilers refuse to overwrite an existing .o file with -o,
+  # though they will create one.
+  am_cv_prog_cc_c_o=yes
+  for am_i in 1 2; do
+    if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \
+         && test -f conftest2.$ac_objext; then
+      : OK
+    else
+      am_cv_prog_cc_c_o=no
+      break
+    fi
+  done
+  rm -f core conftest*
+  unset am_i])
+if test "$am_cv_prog_cc_c_o" != yes; then
+   # Losing compiler, so override with the script.
+   # FIXME: It is wrong to rewrite CC.
+   # But if we don't then we get into trouble of one sort or another.
+   # A longer-term fix would be to have automake use am__CC in this case,
+   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
+   CC="$am_aux_dir/compile $CC"
+fi
+AC_LANG_POP([C])])
+
+# For backward compatibility.
+AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/python.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/python.m4
new file mode 100644
index 0000000..4f16355
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/python.m4
@@ -0,0 +1,239 @@
+## ------------------------                                 -*- Autoconf -*-
+## Python file handling
+## From Andrew Dalke
+## Updated by James Henstridge
+## ------------------------
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+
+# AM_PATH_PYTHON([MINIMUM-VERSION], [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ---------------------------------------------------------------------------
+# Adds support for distributing Python modules and packages.  To
+# install modules, copy them to $(pythondir), using the python_PYTHON
+# automake variable.  To install a package with the same name as the
+# automake package, install to $(pkgpythondir), or use the
+# pkgpython_PYTHON automake variable.
+#
+# The variables $(pyexecdir) and $(pkgpyexecdir) are provided as
+# locations to install python extension modules (shared libraries).
+# Another macro is required to find the appropriate flags to compile
+# extension modules.
+#
+# If your package is configured with a different prefix to python,
+# users will have to add the install directory to the PYTHONPATH
+# environment variable, or create a .pth file (see the python
+# documentation for details).
+#
+# If the MINIMUM-VERSION argument is passed, AM_PATH_PYTHON will
+# cause an error if the version of python installed on the system
+# doesn't meet the requirement.  MINIMUM-VERSION should consist of
+# numbers and dots only.
+AC_DEFUN([AM_PATH_PYTHON],
+ [
+  dnl Find a Python interpreter.  Python versions prior to 2.0 are not
+  dnl supported. (2.0 was released on October 16, 2000).
+  m4_define_default([_AM_PYTHON_INTERPRETER_LIST],
+[python python2 python3 python3.3 python3.2 python3.1 python3.0 python2.7 dnl
+ python2.6 python2.5 python2.4 python2.3 python2.2 python2.1 python2.0])
+
+  AC_ARG_VAR([PYTHON], [the Python interpreter])
+
+  m4_if([$1],[],[
+    dnl No version check is needed.
+    # Find any Python interpreter.
+    if test -z "$PYTHON"; then
+      AC_PATH_PROGS([PYTHON], _AM_PYTHON_INTERPRETER_LIST, :)
+    fi
+    am_display_PYTHON=python
+  ], [
+    dnl A version check is needed.
+    if test -n "$PYTHON"; then
+      # If the user set $PYTHON, use it and don't search something else.
+      AC_MSG_CHECKING([whether $PYTHON version is >= $1])
+      AM_PYTHON_CHECK_VERSION([$PYTHON], [$1],
+			      [AC_MSG_RESULT([yes])],
+			      [AC_MSG_RESULT([no])
+			       AC_MSG_ERROR([Python interpreter is too old])])
+      am_display_PYTHON=$PYTHON
+    else
+      # Otherwise, try each interpreter until we find one that satisfies
+      # VERSION.
+      AC_CACHE_CHECK([for a Python interpreter with version >= $1],
+	[am_cv_pathless_PYTHON],[
+	for am_cv_pathless_PYTHON in _AM_PYTHON_INTERPRETER_LIST none; do
+	  test "$am_cv_pathless_PYTHON" = none && break
+	  AM_PYTHON_CHECK_VERSION([$am_cv_pathless_PYTHON], [$1], [break])
+	done])
+      # Set $PYTHON to the absolute path of $am_cv_pathless_PYTHON.
+      if test "$am_cv_pathless_PYTHON" = none; then
+	PYTHON=:
+      else
+        AC_PATH_PROG([PYTHON], [$am_cv_pathless_PYTHON])
+      fi
+      am_display_PYTHON=$am_cv_pathless_PYTHON
+    fi
+  ])
+
+  if test "$PYTHON" = :; then
+  dnl Run any user-specified action, or abort.
+    m4_default([$3], [AC_MSG_ERROR([no suitable Python interpreter found])])
+  else
+
+  dnl Query Python for its version number.  Getting [:3] seems to be
+  dnl the best way to do this; it's what "site.py" does in the standard
+  dnl library.
+
+  AC_CACHE_CHECK([for $am_display_PYTHON version], [am_cv_python_version],
+    [am_cv_python_version=`$PYTHON -c "import sys; sys.stdout.write(sys.version[[:3]])"`])
+  AC_SUBST([PYTHON_VERSION], [$am_cv_python_version])
+
+  dnl Use the values of $prefix and $exec_prefix for the corresponding
+  dnl values of PYTHON_PREFIX and PYTHON_EXEC_PREFIX.  These are made
+  dnl distinct variables so they can be overridden if need be.  However,
+  dnl general consensus is that you shouldn't need this ability.
+
+  AC_SUBST([PYTHON_PREFIX], ['${prefix}'])
+  AC_SUBST([PYTHON_EXEC_PREFIX], ['${exec_prefix}'])
+
+  dnl At times (like when building shared libraries) you may want
+  dnl to know which OS platform Python thinks this is.
+
+  AC_CACHE_CHECK([for $am_display_PYTHON platform], [am_cv_python_platform],
+    [am_cv_python_platform=`$PYTHON -c "import sys; sys.stdout.write(sys.platform)"`])
+  AC_SUBST([PYTHON_PLATFORM], [$am_cv_python_platform])
+
+  # Just factor out some code duplication.
+  am_python_setup_sysconfig="\
+import sys
+# Prefer sysconfig over distutils.sysconfig, for better compatibility
+# with python 3.x.  See automake bug#10227.
+try:
+    import sysconfig
+except ImportError:
+    can_use_sysconfig = 0
+else:
+    can_use_sysconfig = 1
+# Can't use sysconfig in CPython 2.7, since it's broken in virtualenvs:
+# <https://github.com/pypa/virtualenv/issues/118>
+try:
+    from platform import python_implementation
+    if python_implementation() == 'CPython' and sys.version[[:3]] == '2.7':
+        can_use_sysconfig = 0
+except ImportError:
+    pass"
+
+  dnl Set up 4 directories:
+
+  dnl pythondir -- where to install python scripts.  This is the
+  dnl   site-packages directory, not the python standard library
+  dnl   directory like in previous automake betas.  This behavior
+  dnl   is more consistent with lispdir.m4 for example.
+  dnl Query distutils for this directory.
+  AC_CACHE_CHECK([for $am_display_PYTHON script directory],
+    [am_cv_python_pythondir],
+    [if test "x$prefix" = xNONE
+     then
+       am_py_prefix=$ac_default_prefix
+     else
+       am_py_prefix=$prefix
+     fi
+     am_cv_python_pythondir=`$PYTHON -c "
+$am_python_setup_sysconfig
+if can_use_sysconfig:
+    sitedir = sysconfig.get_path('purelib', vars={'base':'$am_py_prefix'})
+else:
+    from distutils import sysconfig
+    sitedir = sysconfig.get_python_lib(0, 0, prefix='$am_py_prefix')
+sys.stdout.write(sitedir)"`
+     case $am_cv_python_pythondir in
+     $am_py_prefix*)
+       am__strip_prefix=`echo "$am_py_prefix" | sed 's|.|.|g'`
+       am_cv_python_pythondir=`echo "$am_cv_python_pythondir" | sed "s,^$am__strip_prefix,$PYTHON_PREFIX,"`
+       ;;
+     *)
+       case $am_py_prefix in
+         /usr|/System*) ;;
+         *)
+	  am_cv_python_pythondir=$PYTHON_PREFIX/lib/python$PYTHON_VERSION/site-packages
+	  ;;
+       esac
+       ;;
+     esac
+    ])
+  AC_SUBST([pythondir], [$am_cv_python_pythondir])
+
+  dnl pkgpythondir -- $PACKAGE directory under pythondir.  Was
+  dnl   PYTHON_SITE_PACKAGE in previous betas, but this naming is
+  dnl   more consistent with the rest of automake.
+
+  AC_SUBST([pkgpythondir], [\${pythondir}/$PACKAGE])
+
+  dnl pyexecdir -- directory for installing python extension modules
+  dnl   (shared libraries)
+  dnl Query distutils for this directory.
+  AC_CACHE_CHECK([for $am_display_PYTHON extension module directory],
+    [am_cv_python_pyexecdir],
+    [if test "x$exec_prefix" = xNONE
+     then
+       am_py_exec_prefix=$am_py_prefix
+     else
+       am_py_exec_prefix=$exec_prefix
+     fi
+     am_cv_python_pyexecdir=`$PYTHON -c "
+$am_python_setup_sysconfig
+if can_use_sysconfig:
+    sitedir = sysconfig.get_path('platlib', vars={'platbase':'$am_py_prefix'})
+else:
+    from distutils import sysconfig
+    sitedir = sysconfig.get_python_lib(1, 0, prefix='$am_py_prefix')
+sys.stdout.write(sitedir)"`
+     case $am_cv_python_pyexecdir in
+     $am_py_exec_prefix*)
+       am__strip_prefix=`echo "$am_py_exec_prefix" | sed 's|.|.|g'`
+       am_cv_python_pyexecdir=`echo "$am_cv_python_pyexecdir" | sed "s,^$am__strip_prefix,$PYTHON_EXEC_PREFIX,"`
+       ;;
+     *)
+       case $am_py_exec_prefix in
+         /usr|/System*) ;;
+         *)
+	   am_cv_python_pyexecdir=$PYTHON_EXEC_PREFIX/lib/python$PYTHON_VERSION/site-packages
+	   ;;
+       esac
+       ;;
+     esac
+    ])
+  AC_SUBST([pyexecdir], [$am_cv_python_pyexecdir])
+
+  dnl pkgpyexecdir -- $(pyexecdir)/$(PACKAGE)
+
+  AC_SUBST([pkgpyexecdir], [\${pyexecdir}/$PACKAGE])
+
+  dnl Run any user-specified action.
+  $2
+  fi
+
+])
+
+
+# AM_PYTHON_CHECK_VERSION(PROG, VERSION, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ---------------------------------------------------------------------------
+# Run ACTION-IF-TRUE if the Python interpreter PROG has version >= VERSION.
+# Run ACTION-IF-FALSE otherwise.
+# This test uses sys.hexversion instead of the string equivalent (first
+# word of sys.version), in order to cope with versions such as 2.2c1.
+# This supports Python 2.0 or higher. (2.0 was released on October 16, 2000).
+AC_DEFUN([AM_PYTHON_CHECK_VERSION],
+ [prog="import sys
+# split strings by '.' and convert to numeric.  Append some zeros
+# because we need at least 4 digits for the hex conversion.
+# map returns an iterator in Python 3.0 and a list in 2.x
+minver = list(map(int, '$2'.split('.'))) + [[0, 0, 0]]
+minverhex = 0
+# xrange is not present in Python 3.0 and range returns an iterator
+for i in list(range(0, 4)): minverhex = (minverhex << 8) + minver[[i]]
+sys.exit(sys.hexversion < minverhex)"
+  AS_IF([AM_RUN_LOG([$1 -c "$prog"])], [$3], [$4])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/runlog.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/runlog.m4
new file mode 100644
index 0000000..b3a6682
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/runlog.m4
@@ -0,0 +1,17 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_RUN_LOG(COMMAND)
+# -------------------
+# Run COMMAND, save the exit status in ac_status, and log it.
+# (This has been adapted from Autoconf's _AC_RUN_LOG macro.)
+AC_DEFUN([AM_RUN_LOG],
+[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD
+   ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD
+   ac_status=$?
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   (exit $ac_status); }])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/sanity.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/sanity.m4
new file mode 100644
index 0000000..8ae5560
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/sanity.m4
@@ -0,0 +1,82 @@
+# Check to make sure that the build environment is sane.    -*- Autoconf -*-
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_SANITY_CHECK
+# ---------------
+AC_DEFUN([AM_SANITY_CHECK],
+[AC_MSG_CHECKING([whether build environment is sane])
+# Reject unsafe characters in $srcdir or the absolute working directory
+# name.  Accept space and tab only in the latter.
+am_lf='
+'
+case `pwd` in
+  *[[\\\"\#\$\&\'\`$am_lf]]*)
+    AC_MSG_ERROR([unsafe absolute working directory name]);;
+esac
+case $srcdir in
+  *[[\\\"\#\$\&\'\`$am_lf\ \	]]*)
+    AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);;
+esac
+
+# Do 'set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   am_has_slept=no
+   for am_try in 1 2; do
+     echo "timestamp, slept: $am_has_slept" > conftest.file
+     set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
+     if test "$[*]" = "X"; then
+	# -L didn't work.
+	set X `ls -t "$srcdir/configure" conftest.file`
+     fi
+     if test "$[*]" != "X $srcdir/configure conftest.file" \
+	&& test "$[*]" != "X conftest.file $srcdir/configure"; then
+
+	# If neither matched, then we have a broken ls.  This can happen
+	# if, for instance, CONFIG_SHELL is bash and it inherits a
+	# broken ls alias from the environment.  This has actually
+	# happened.  Such a system could not be considered "sane".
+	AC_MSG_ERROR([ls -t appears to fail.  Make sure there is not a broken
+  alias in your environment])
+     fi
+     if test "$[2]" = conftest.file || test $am_try -eq 2; then
+       break
+     fi
+     # Just in case.
+     sleep 1
+     am_has_slept=yes
+   done
+   test "$[2]" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   AC_MSG_ERROR([newly created file is older than distributed files!
+Check your system clock])
+fi
+AC_MSG_RESULT([yes])
+# If we didn't sleep, we still need to ensure time stamps of config.status and
+# generated files are strictly newer.
+am_sleep_pid=
+if grep 'slept: no' conftest.file >/dev/null 2>&1; then
+  ( sleep 1 ) &
+  am_sleep_pid=$!
+fi
+AC_CONFIG_COMMANDS_PRE(
+  [AC_MSG_CHECKING([that generated files are newer than configure])
+   if test -n "$am_sleep_pid"; then
+     # Hide warnings about reused PIDs.
+     wait $am_sleep_pid 2>/dev/null
+   fi
+   AC_MSG_RESULT([done])])
+rm -f conftest.file
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/silent.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/silent.m4
new file mode 100644
index 0000000..6fb3c22
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/silent.m4
@@ -0,0 +1,60 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2009-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_SILENT_RULES([DEFAULT])
+# --------------------------
+# Enable less verbose build rules; with the default set to DEFAULT
+# ("yes" being less verbose, "no" or empty being verbose).
+AC_DEFUN([AM_SILENT_RULES],
+[AC_ARG_ENABLE([silent-rules], [dnl
+AS_HELP_STRING(
+  [--enable-silent-rules],
+  [less verbose build output (undo: "make V=1")])
+AS_HELP_STRING(
+  [--disable-silent-rules],
+  [verbose build output (undo: "make V=0")])dnl
+])
+case $enable_silent_rules in @%:@ (((
+  yes) AM_DEFAULT_VERBOSITY=0;;
+   no) AM_DEFAULT_VERBOSITY=1;;
+    *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);;
+esac
+dnl
+dnl A few 'make' implementations (e.g., NonStop OS and NextStep)
+dnl do not support nested variable expansions.
+dnl See automake bug#9928 and bug#10237.
+am_make=${MAKE-make}
+AC_CACHE_CHECK([whether $am_make supports nested variables],
+   [am_cv_make_support_nested_variables],
+   [if AS_ECHO([['TRUE=$(BAR$(V))
+BAR0=false
+BAR1=true
+V=1
+am__doit:
+	@$(TRUE)
+.PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then
+  am_cv_make_support_nested_variables=yes
+else
+  am_cv_make_support_nested_variables=no
+fi])
+if test $am_cv_make_support_nested_variables = yes; then
+  dnl Using '$V' instead of '$(V)' breaks IRIX make.
+  AM_V='$(V)'
+  AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
+else
+  AM_V=$AM_DEFAULT_VERBOSITY
+  AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
+fi
+AC_SUBST([AM_V])dnl
+AM_SUBST_NOTMAKE([AM_V])dnl
+AC_SUBST([AM_DEFAULT_V])dnl
+AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl
+AC_SUBST([AM_DEFAULT_VERBOSITY])dnl
+AM_BACKSLASH='\'
+AC_SUBST([AM_BACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/strip.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/strip.m4
new file mode 100644
index 0000000..d0b7c65
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/strip.m4
@@ -0,0 +1,28 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_STRIP
+# ---------------------
+# One issue with vendor 'install' (even GNU) is that you can't
+# specify the program used to strip binaries.  This is especially
+# annoying in cross-compiling environments, where the build's strip
+# is unlikely to handle the host's binaries.
+# Fortunately install-sh will honor a STRIPPROG variable, so we
+# always use install-sh in "make install-strip", and initialize
+# STRIPPROG with the value of the STRIP variable (set by the user).
+AC_DEFUN([AM_PROG_INSTALL_STRIP],
+[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+# Installed binaries are usually stripped using 'strip' when the user
+# run "make install-strip".  However 'strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the 'STRIP' environment variable to overrule this program.
+dnl Don't test for $cross_compiling = yes, because it might be 'maybe'.
+if test "$cross_compiling" != no; then
+  AC_CHECK_TOOL([STRIP], [strip], :)
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+AC_SUBST([INSTALL_STRIP_PROGRAM])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/substnot.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/substnot.m4
new file mode 100644
index 0000000..52dc5a1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/substnot.m4
@@ -0,0 +1,17 @@
+##                                                          -*- Autoconf -*-
+# Copyright (C) 2006-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_SUBST_NOTMAKE(VARIABLE)
+# ---------------------------
+# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in.
+# This macro is traced by Automake.
+AC_DEFUN([_AM_SUBST_NOTMAKE])
+
+# AM_SUBST_NOTMAKE(VARIABLE)
+# --------------------------
+# Public sister of _AM_SUBST_NOTMAKE.
+AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/tar.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/tar.m4
new file mode 100644
index 0000000..539aa3a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/tar.m4
@@ -0,0 +1,132 @@
+# Check how to create a tarball.                            -*- Autoconf -*-
+
+# Copyright (C) 2004-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_PROG_TAR(FORMAT)
+# --------------------
+# Check how to create a tarball in format FORMAT.
+# FORMAT should be one of 'v7', 'ustar', or 'pax'.
+#
+# Substitute a variable $(am__tar) that is a command
+# writing to stdout a FORMAT-tarball containing the directory
+# $tardir.
+#     tardir=directory && $(am__tar) > result.tar
+#
+# Substitute a variable $(am__untar) that extract such
+# a tarball read from stdin.
+#     $(am__untar) < result.tar
+#
+AC_DEFUN([_AM_PROG_TAR],
+[# Always define AMTAR for backward compatibility.  Yes, it's still used
+# in the wild :-(  We should find a proper way to deprecate it ...
+AC_SUBST([AMTAR], ['$${TAR-tar}'])
+
+# We'll loop over all known methods to create a tar archive until one works.
+_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
+
+m4_if([$1], [v7],
+  [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'],
+
+  [m4_case([$1],
+    [ustar],
+     [# The POSIX 1988 'ustar' format is defined with fixed-size fields.
+      # There is notably a 21 bits limit for the UID and the GID.  In fact,
+      # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343
+      # and bug#13588).
+      am_max_uid=2097151 # 2^21 - 1
+      am_max_gid=$am_max_uid
+      # The $UID and $GID variables are not portable, so we need to resort
+      # to the POSIX-mandated id(1) utility.  Errors in the 'id' calls
+      # below are definitely unexpected, so allow the users to see them
+      # (that is, avoid stderr redirection).
+      am_uid=`id -u || echo unknown`
+      am_gid=`id -g || echo unknown`
+      AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format])
+      if test $am_uid -le $am_max_uid; then
+         AC_MSG_RESULT([yes])
+      else
+         AC_MSG_RESULT([no])
+         _am_tools=none
+      fi
+      AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format])
+      if test $am_gid -le $am_max_gid; then
+         AC_MSG_RESULT([yes])
+      else
+        AC_MSG_RESULT([no])
+        _am_tools=none
+      fi],
+
+  [pax],
+    [],
+
+  [m4_fatal([Unknown tar format])])
+
+  AC_MSG_CHECKING([how to create a $1 tar archive])
+
+  # Go ahead even if we have the value already cached.  We do so because we
+  # need to set the values for the 'am__tar' and 'am__untar' variables.
+  _am_tools=${am_cv_prog_tar_$1-$_am_tools}
+
+  for _am_tool in $_am_tools; do
+    case $_am_tool in
+    gnutar)
+      for _am_tar in tar gnutar gtar; do
+        AM_RUN_LOG([$_am_tar --version]) && break
+      done
+      am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
+      am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
+      am__untar="$_am_tar -xf -"
+      ;;
+    plaintar)
+      # Must skip GNU tar: if it does not support --format= it doesn't create
+      # ustar tarball either.
+      (tar --version) >/dev/null 2>&1 && continue
+      am__tar='tar chf - "$$tardir"'
+      am__tar_='tar chf - "$tardir"'
+      am__untar='tar xf -'
+      ;;
+    pax)
+      am__tar='pax -L -x $1 -w "$$tardir"'
+      am__tar_='pax -L -x $1 -w "$tardir"'
+      am__untar='pax -r'
+      ;;
+    cpio)
+      am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
+      am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
+      am__untar='cpio -i -H $1 -d'
+      ;;
+    none)
+      am__tar=false
+      am__tar_=false
+      am__untar=false
+      ;;
+    esac
+
+    # If the value was cached, stop now.  We just wanted to have am__tar
+    # and am__untar set.
+    test -n "${am_cv_prog_tar_$1}" && break
+
+    # tar/untar a dummy directory, and stop if the command works.
+    rm -rf conftest.dir
+    mkdir conftest.dir
+    echo GrepMe > conftest.dir/file
+    AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
+    rm -rf conftest.dir
+    if test -s conftest.tar; then
+      AM_RUN_LOG([$am__untar <conftest.tar])
+      AM_RUN_LOG([cat conftest.dir/file])
+      grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
+    fi
+  done
+  rm -rf conftest.dir
+
+  AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
+  AC_MSG_RESULT([$am_cv_prog_tar_$1])])
+
+AC_SUBST([am__tar])
+AC_SUBST([am__untar])
+]) # _AM_PROG_TAR
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/upc.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/upc.m4
new file mode 100644
index 0000000..b85fc02
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/upc.m4
@@ -0,0 +1,20 @@
+# Find a compiler for Unified Parallel C.	            -*- Autoconf -*-
+
+# Copyright (C) 2006-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+AC_DEFUN([AM_PROG_UPC],
+[dnl We need OBJEXT and EXEEXT, but Autoconf doesn't offer any public
+dnl macro to compute them.  Use AC_PROG_CC instead.
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_ARG_VAR([UPC], [Unified Parallel C compiler command])dnl
+AC_ARG_VAR([UPCFLAGS], [Unified Parallel C compiler flags])dnl
+AC_CHECK_TOOLS([UPC], [m4_default([$1], [upcc upc])], [:])
+if test "$UPC" = :; then
+  AC_MSG_ERROR([no Unified Parallel C compiler was found], [77])
+fi
+_AM_IF_OPTION([no-dependencies],, [_AM_DEPENDENCIES([UPC])])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/vala.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/vala.m4
new file mode 100644
index 0000000..c0104f1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal-1.14/vala.m4
@@ -0,0 +1,37 @@
+# Autoconf support for the Vala compiler
+
+# Copyright (C) 2008-2013 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# Check whether the Vala compiler exists in $PATH.  If it is found, the
+# variable VALAC is set pointing to its absolute path.  Otherwise, it is
+# simply set to 'valac'.
+# Optionally a minimum release number of the compiler can be requested.
+# If the ACTION-IF-FOUND parameter is given, it will be run if a proper
+# Vala compiler is found.
+# Similarly, if the ACTION-IF-FOUND is given, it will be run if no proper
+# Vala compiler is found.  It defaults to simply print a warning about the
+# situation, but otherwise proceeding with the configuration.
+#
+# AM_PROG_VALAC([MINIMUM-VERSION], [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# --------------------------------------------------------------------------
+AC_DEFUN([AM_PROG_VALAC],
+  [AC_PATH_PROG([VALAC], [valac], [valac])
+   AS_IF([test "$VALAC" != valac && test -n "$1"],
+      [AC_MSG_CHECKING([whether $VALAC is at least version $1])
+       am__vala_version=`$VALAC --version | sed 's/Vala  *//'`
+       AS_VERSION_COMPARE([$1], ["$am__vala_version"],
+         [AC_MSG_RESULT([yes])],
+         [AC_MSG_RESULT([yes])],
+         [AC_MSG_RESULT([no])
+          VALAC=valac])])
+    if test "$VALAC" = valac; then
+      m4_default([$3],
+        [AC_MSG_WARN([no proper vala compiler found])
+         AC_MSG_WARN([you will not be able to compile vala source files])])
+    else
+      m4_default([$2], [:])
+    fi])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/README b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/README
new file mode 100644
index 0000000..7c6f487
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/README
@@ -0,0 +1,9 @@
+This directory is where .m4 files providing third-party autoconf
+macros can be placed to be automatically found by the aclocal(1)
+program.
+
+The .m4 files placed here could be shared among different versions
+of aclocal, so be careful.
+
+Even if no actual .m4 files are present, this directory is required
+in order for aclocal to work properly.  Please do not remove it.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/argz.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/argz.m4
new file mode 100644
index 0000000..d1f4ec5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/argz.m4
@@ -0,0 +1,79 @@
+# Portability macros for glibc argz.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+#   Written by Gary V. Vaughan <gary@gnu.org>
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 argz.m4
+
+AC_DEFUN([gl_FUNC_ARGZ],
+[gl_PREREQ_ARGZ
+
+AC_CHECK_HEADERS([argz.h], [], [], [AC_INCLUDES_DEFAULT])
+
+AC_CHECK_TYPES([error_t],
+  [],
+  [AC_DEFINE([error_t], [int],
+   [Define to a type to use for `error_t' if it is not otherwise available.])
+   AC_DEFINE([__error_t_defined], [1], [Define so that glibc/gnulib argp.h
+    does not typedef error_t.])],
+  [#if defined(HAVE_ARGZ_H)
+#  include <argz.h>
+#endif])
+
+ARGZ_H=
+AC_CHECK_FUNCS([argz_add argz_append argz_count argz_create_sep argz_insert \
+	argz_next argz_stringify], [], [ARGZ_H=argz.h; AC_LIBOBJ([argz])])
+
+dnl if have system argz functions, allow forced use of
+dnl libltdl-supplied implementation (and default to do so
+dnl on "known bad" systems). Could use a runtime check, but
+dnl (a) detecting malloc issues is notoriously unreliable
+dnl (b) only known system that declares argz functions,
+dnl     provides them, yet they are broken, is cygwin
+dnl     releases prior to 16-Mar-2007 (1.5.24 and earlier)
+dnl So, it's more straightforward simply to special case
+dnl this for known bad systems.
+AS_IF([test -z "$ARGZ_H"],
+    [AC_CACHE_CHECK(
+        [if argz actually works],
+        [lt_cv_sys_argz_works],
+        [[case $host_os in #(
+	 *cygwin*)
+	   lt_cv_sys_argz_works=no
+	   if test "$cross_compiling" != no; then
+	     lt_cv_sys_argz_works="guessing no"
+	   else
+	     lt_sed_extract_leading_digits='s/^\([0-9\.]*\).*/\1/'
+	     save_IFS=$IFS
+	     IFS=-.
+	     set x `uname -r | sed -e "$lt_sed_extract_leading_digits"`
+	     IFS=$save_IFS
+	     lt_os_major=${2-0}
+	     lt_os_minor=${3-0}
+	     lt_os_micro=${4-0}
+	     if test "$lt_os_major" -gt 1 \
+		|| { test "$lt_os_major" -eq 1 \
+		  && { test "$lt_os_minor" -gt 5 \
+		    || { test "$lt_os_minor" -eq 5 \
+		      && test "$lt_os_micro" -gt 24; }; }; }; then
+	       lt_cv_sys_argz_works=yes
+	     fi
+	   fi
+	   ;; #(
+	 *) lt_cv_sys_argz_works=yes ;;
+	 esac]])
+     AS_IF([test "$lt_cv_sys_argz_works" = yes],
+        [AC_DEFINE([HAVE_WORKING_ARGZ], 1,
+                   [This value is set to 1 to indicate that the system argz facility works])],
+        [ARGZ_H=argz.h
+        AC_LIBOBJ([argz])])])
+
+AC_SUBST([ARGZ_H])
+])
+
+# Prerequisites of lib/argz.c.
+AC_DEFUN([gl_PREREQ_ARGZ], [:])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/libtool.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/libtool.m4
new file mode 100644
index 0000000..44e0ecf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/libtool.m4
@@ -0,0 +1,7982 @@
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+])
+
+# serial 57 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+       [m4_default([$3],
+		   [m4_fatal([Libtool version $1 or higher is required],
+		             63)])],
+       [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+  *\ * | *\	*)
+    AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# Calculate cc_basename.  Skip known compiler wrappers and cross-prefix.
+m4_defun([_LT_CC_BASENAME],
+[for cc_temp in $1""; do
+  case $cc_temp in
+    compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+    distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl
+dnl
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    _LT_PATH_MAGIC
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from `configure', and `config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool.  Notably,
+# `config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain="$ac_aux_dir/ltmain.sh"
+])# _LT_PROG_LTMAIN
+
+
+## ------------------------------------- ##
+## Accumulate code for creating libtool. ##
+## ------------------------------------- ##
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the `libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+              [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+## ------------------------ ##
+## FIXME: Eliminate VARNAME ##
+## ------------------------ ##
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME.  Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+    [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+	[m4_ifval([$1], [$1], [$2])])
+    lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+    m4_ifval([$4],
+	[lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+    lt_dict_add_subkey([lt_decl_dict], [$2],
+	[tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+  [0], [m4_fatal([$0: too few arguments: $#])],
+  [1], [m4_fatal([$0: too few arguments: $#: $1])],
+  [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+  [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+  [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+    m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+    m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+     m4_if([$2], [],
+	   m4_quote(lt_decl_varnames),
+	m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+			lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to `config.status' so that its
+# declaration there will have the same value as in `configure'.  VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly.  In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+#    <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+    [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags="_LT_TAGS"dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+#    # Some comment about what VAR is for.
+#    visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+					   [description])))[]dnl
+m4_pushdef([_libtool_name],
+    m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+    [0], [_libtool_name=[$]$1],
+    [1], [_libtool_name=$lt_[]$1],
+    [2], [_libtool_name=$lt_[]$1],
+    [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
+# script.  Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+    m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS.  Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into `config.status', and then the shell code to quote escape them in
+# for loops in `config.status'.  Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+	dnl If the libtool generation code has been placed in $CONFIG_LT,
+	dnl instead of duplicating it all over again into config.status,
+	dnl then we will have config.status run $CONFIG_LT later, so it
+	dnl needs to know what name is stored there:
+        [AC_CONFIG_COMMANDS([libtool],
+            [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+    dnl If the libtool generation code is destined for config.status,
+    dnl expand the accumulated commands and init code now:
+    [AC_CONFIG_COMMANDS([libtool],
+        [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test $lt_write_fail = 0 && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+\`$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+  -q, --quiet     do not print progress messages
+  -d, --debug     don't remove temporary files
+
+Report bugs to <bug-libtool@gnu.org>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2011 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test $[#] != 0
+do
+  case $[1] in
+    --version | --v* | -V )
+      echo "$lt_cl_version"; exit 0 ;;
+    --help | --h* | -h )
+      echo "$lt_cl_help"; exit 0 ;;
+    --debug | --d* | -d )
+      debug=: ;;
+    --quiet | --q* | --silent | --s* | -q )
+      lt_cl_silent=: ;;
+
+    -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try \`$[0] --help' for more information.]) ;;
+
+    *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try \`$[0] --help' for more information.]) ;;
+  esac
+  shift
+done
+
+if $lt_cl_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure.  Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test "$silent" = yes &&
+  lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars.  Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+  m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+  m4_if(_LT_TAG, [C], [
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+  _LT_PROG_LTMAIN
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+  _LT_PROG_REPLACE_SHELLFNS
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+#    autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+  [C],			[_LT_LANG(C)],
+  [C++],		[_LT_LANG(CXX)],
+  [Go],			[_LT_LANG(GO)],
+  [Java],		[_LT_LANG(GCJ)],
+  [Fortran 77],		[_LT_LANG(F77)],
+  [Fortran],		[_LT_LANG(FC)],
+  [Windows Resource],	[_LT_LANG(RC)],
+  [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+    [_LT_LANG($1)],
+    [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+  [LT_SUPPORTED_TAG([$1])dnl
+  m4_append([_LT_TAGS], [$1 ])dnl
+  m4_define([_LT_LANG_]$1[_enabled], [])dnl
+  _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+m4_ifndef([AC_PROG_GO], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_GO.  When it is available in    #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+m4_defun([AC_PROG_GO],
+[AC_LANG_PUSH(Go)dnl
+AC_ARG_VAR([GOC],     [Go compiler command])dnl
+AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+AC_CHECK_TOOL(GOC, gccgo)
+if test -z "$GOC"; then
+  if test -n "$ac_tool_prefix"; then
+    AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo])
+  fi
+fi
+if test -z "$GOC"; then
+  AC_CHECK_PROG(GOC, gccgo, gccgo, false)
+fi
+])#m4_defun
+])#m4_ifndef
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+  [LT_LANG(CXX)],
+  [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+  [LT_LANG(F77)],
+  [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+  [LT_LANG(FC)],
+  [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+  [LT_LANG(GCJ)],
+  [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+    [LT_LANG(GCJ)],
+    [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+      [LT_LANG(GCJ)],
+      [m4_ifdef([AC_PROG_GCJ],
+	[m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([A][M_PROG_GCJ],
+	[m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([LT_PROG_GCJ],
+	[m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([AC_PROG_GO],
+  [LT_LANG(GO)],
+  [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+  [LT_LANG(RC)],
+  [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+  case $host_os in
+    rhapsody* | darwin*)
+    AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+    AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+    AC_CHECK_TOOL([LIPO], [lipo], [:])
+    AC_CHECK_TOOL([OTOOL], [otool], [:])
+    AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+    _LT_DECL([], [DSYMUTIL], [1],
+      [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+    _LT_DECL([], [NMEDIT], [1],
+      [Tool to change global to local symbols on Mac OS X])
+    _LT_DECL([], [LIPO], [1],
+      [Tool to manipulate fat objects and archives on Mac OS X])
+    _LT_DECL([], [OTOOL], [1],
+      [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+    _LT_DECL([], [OTOOL64], [1],
+      [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+    AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+      [lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	# If there is a non-empty error log, and "single_module"
+	# appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	# Otherwise, if the output was created with a 0 exit code from
+	# the compiler, it worked.
+	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi])
+
+    AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+      [lt_cv_ld_exported_symbols_list],
+      [lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+	[lt_cv_ld_exported_symbols_list=yes],
+	[lt_cv_ld_exported_symbols_list=no])
+	LDFLAGS="$save_LDFLAGS"
+    ])
+
+    AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+      [lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+      echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+      $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+      echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+      $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+    ])
+    case $host_os in
+    rhapsody* | darwin1.[[012]])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[[012]]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES([TAG])
+# ---------------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+  m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_automatic, $1)=yes
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+    m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes],
+                  [FC],  [_LT_TAGVAR(compiler_needs_object, $1)=yes])
+  else
+    _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+  fi
+  _LT_TAGVAR(link_all_deplibs, $1)=yes
+  _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+    m4_if([$1], [CXX],
+[   if test "$lt_cv_apple_cc_single_mod" != "yes"; then
+      _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
+      _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
+    fi
+],[])
+  else
+  _LT_TAGVAR(ld_shlibs, $1)=no
+  fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+  [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+  lt_aix_libpath_sed='[
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }]'
+  _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi],[])
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
+  fi
+  ])
+  aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script which will find a shell with a builtin
+# printf (which we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*" 
+}
+
+case "$ECHO" in
+  printf*) AC_MSG_RESULT([printf]) ;;
+  print*) AC_MSG_RESULT([print -r]) ;;
+  *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+  test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test "X`printf %s $ECHO`" = "X$ECHO" \
+      || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[  --with-sysroot[=DIR] Search for dependent libraries within DIR
+                        (or the compiler's sysroot if not specified).],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted.  We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+   if test "$GCC" = yes; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   AC_MSG_RESULT([${with_sysroot}])
+   AC_MSG_ERROR([The sysroot must be an absolute path.])
+   ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and in which our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+  [AS_HELP_STRING([--disable-libtool-lock],
+    [avoid locking (might break parallel builds)])])
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+    [AC_LANG_PUSH(C)
+     AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+     AC_LANG_POP])
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD="${LD-ld}_sol2"
+        fi
+        ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+  [lt_cv_ar_at_file=no
+   AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+     [echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+      AC_TRY_EVAL([lt_ar_try])
+      if test "$ac_status" -eq 0; then
+	# Ensure the archiver fails upon bogus file names.
+	rm -f conftest.$ac_objext libconftest.a
+	AC_TRY_EVAL([lt_ar_try])
+	if test "$ac_status" -ne 0; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+     ])
+  ])
+
+if test "x$lt_cv_ar_at_file" = xno; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+  [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+    [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+    [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+    [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#		[OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$3"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       $2=yes
+     fi
+   fi
+   $RM conftest*
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$5], , :, [$5])
+else
+    m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#                  [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $3"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&AS_MESSAGE_LOG_FD
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         $2=yes
+       fi
+     else
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$4], , :, [$4])
+else
+    m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+  i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[	 ]]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+])
+if test -n $lt_cv_sys_max_cmd_len ; then
+  AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+  AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+    [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+#                      ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "$cross_compiling" = yes; then :
+  [$4]
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}]
+_LT_EOF
+  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) $1 ;;
+      x$lt_dlneed_uscore) $2 ;;
+      x$lt_dlunknown|x*) $3 ;;
+    esac
+  else :
+    # compilation failed
+    $3
+  fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ])
+    ;;
+
+  *)
+    AC_CHECK_FUNC([shl_load],
+	  [lt_cv_dlopen="shl_load"],
+      [AC_CHECK_LIB([dld], [shl_load],
+	    [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
+	[AC_CHECK_FUNC([dlopen],
+	      [lt_cv_dlopen="dlopen"],
+	  [AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
+	    [AC_CHECK_LIB([svld], [dlopen],
+		  [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
+	      [AC_CHECK_LIB([dld], [dld_link],
+		    [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
+	      ])
+	    ])
+	  ])
+	])
+      ])
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    AC_CACHE_CHECK([whether a program can dlopen itself],
+	  lt_cv_dlopen_self, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+	    lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+    ])
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+	  lt_cv_dlopen_self_static, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+	    lt_cv_dlopen_self_static=no,  lt_cv_dlopen_self_static=cross)
+      ])
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+	 [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+	 [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+	 [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+     fi
+   fi
+   chmod u+w . 2>&AS_MESSAGE_LOG_FD
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+	[Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links="nottested"
+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  AC_MSG_CHECKING([if we can lock with hard links])
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  AC_MSG_RESULT([$hard_links])
+  if test "$hard_links" = no; then
+    AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+         [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
+  [Define to the sub-directory in which libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+   test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+   test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
+     test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
+    # Linking always hardcodes the temporary library directory.
+    _LT_TAGVAR(hardcode_action, $1)=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    _LT_TAGVAR(hardcode_action, $1)=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
+   test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+    [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      AC_MSG_RESULT([yes])
+    else
+      AC_MSG_RESULT([no])
+    fi
+    ;;
+  *)
+    AC_MSG_RESULT([no])
+    ;;
+  esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+	[], [
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+  if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([[A-Za-z]]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[[4-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[[01]] | aix4.[[01]].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[[45]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+m4_if([$1], [],[
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+    library_names_spec='${libname}.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec="$LIB"
+      if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[[23]].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+  freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[[3-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux # correct to gnu/linux during the next big refactor
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+    [lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+	 LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+      [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+	 [lt_cv_shlibpath_overrides_runpath=yes])])
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+    ])
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[[89]] | openbsd2.[[89]].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+    [Variables whose values should be saved in libtool wrapper scripts and
+    restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+    [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0],  [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+    [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+    [[List of archive names.  First name is the real one, the rest are links.
+    The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+    [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+    [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+    [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+    [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+    [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+    [[As "finish_cmds", except a single script fragment to be evaled but
+    not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+    [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+    [Compile-time system search path for libraries])
+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
+    [Run-time system search path for libraries])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program which can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] |  ?:[\\/]*])
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word.  This closes a longstanding sh security hole.
+  ac_dummy="m4_if([$2], , $PATH, [$2])"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/$1; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/$1"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac])
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  AC_MSG_RESULT($MAGIC_CMD)
+else
+  AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+	 [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program which can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+  else
+    MAGIC_CMD=:
+  fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+    [AS_HELP_STRING([--with-gnu-ld],
+	[assume the C compiler uses GNU ld @<:@default=no@:>@])],
+    [test "$withval" = no || with_gnu_ld=yes],
+    [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  AC_MSG_CHECKING([for ld used by $CC])
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [[\\/]]* | ?:[[\\/]]*)
+      re_direlt='/[[^/]][[^/]]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  AC_MSG_CHECKING([for GNU ld])
+else
+  AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi])
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  AC_MSG_RESULT($LD)
+else
+  AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+#   -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+  lt_cv_ld_reload_flag,
+  [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test "$GCC" != yes; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[[45]]*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[[3-9]]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+    [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+    [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+    [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+    [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi])
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+  AC_SUBST([DUMPBIN])
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+  [lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+  cat conftest.out >&AS_MESSAGE_LOG_FD
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh
+  # decide which to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+  ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+    [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
+  [lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*])
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+  MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+  # These system don't have libm, or don't need it
+  ;;
+*-ncr-sysv4.3*)
+  AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
+  AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+  ;;
+*)
+  AC_CHECK_LIB(m, cos, LIBM="-lm")
+  ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+  esac
+
+  _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+    lt_cv_prog_compiler_rtti_exceptions,
+    [-fno-rtti -fno-exceptions], [],
+    [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+	[Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[[BCDT]]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[[ABCDGISTW]]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[[ABCDEGRST]]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[[BCDEGRST]]'
+  ;;
+osf*)
+  symcode='[[BCDEGQRST]]'
+  ;;
+solaris*)
+  symcode='[[BDRT]]'
+  ;;
+sco3.2v5*)
+  symcode='[[DT]]'
+  ;;
+sysv4.2uw2*)
+  symcode='[[DT]]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[[ABDT]]'
+  ;;
+sysv4)
+  symcode='[[DFNSTU]]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK ['"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx]"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[	 ]]\($symcode$symcode*\)[[	 ]][[	 ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT@&t@_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT@&t@_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_globsym_save_LIBS=$LIBS
+	  lt_globsym_save_CFLAGS=$CFLAGS
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+	  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS=$lt_globsym_save_LIBS
+	  CFLAGS=$lt_globsym_save_CFLAGS
+	else
+	  echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  AC_MSG_RESULT(failed)
+else
+  AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+    [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+    [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_c_name_address],
+    [lt_cv_sys_global_symbol_to_c_name_address], [1],
+    [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+    [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+    [Transform the output of nm in a C name address pair when lib prefix is needed])
+_LT_DECL([], [nm_file_list_spec], [1],
+    [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+  # C++ specific cases for pic, static, wl, etc.
+  if test "$GXX" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+    aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+    mingw* | cygwin* | os2* | pw32* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+    *djgpp*)
+      # DJGPP does not support shared libraries at all
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+      ;;
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+    *qnx* | *nto*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+  else
+    case $host_os in
+      aix[[4-9]]*)
+	# All AIX code is PIC.
+	if test "$host_cpu" = ia64; then
+	  # AIX 5 now supports IA64 processor
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	else
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+	fi
+	;;
+      chorus*)
+	case $cc_basename in
+	cxch68*)
+	  # Green Hills C++ Compiler
+	  # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+	  ;;
+	esac
+	;;
+      mingw* | cygwin* | os2* | pw32* | cegcc*)
+	# This hack is so that the source file can tell whether it is being
+	# built for inclusion in a dll (and should export symbols for example).
+	m4_if([$1], [GCJ], [],
+	  [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+	;;
+      dgux*)
+	case $cc_basename in
+	  ec++*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  ghcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      freebsd* | dragonfly*)
+	# FreeBSD uses GNU C++
+	;;
+      hpux9* | hpux10* | hpux11*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    if test "$host_cpu" != ia64; then
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	    fi
+	    ;;
+	  aCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    case $host_cpu in
+	    hppa*64*|ia64*)
+	      # +Z the default
+	      ;;
+	    *)
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	      ;;
+	    esac
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      interix*)
+	# This is c89, which is MS Visual C++ (no shared libs)
+	# Anyone wants to do a port?
+	;;
+      irix5* | irix6* | nonstopux*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    # CC pic flag -KPIC is the default.
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+	case $cc_basename in
+	  KCC*)
+	    # KAI C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    ;;
+	  ecpc* )
+	    # old Intel C++ for x86_64 which still supported -KPIC.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  icpc* )
+	    # Intel C++, used to be incompatible with GCC.
+	    # ICC 10 doesn't accept -KPIC any more.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  pgCC* | pgcpp*)
+	    # Portland Group C++ compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  cxx*)
+	    # Compaq C++
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+	    # IBM XL 8.0, 9.0 on PPC and BlueGene
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+      lynxos*)
+	;;
+      m88k*)
+	;;
+      mvs*)
+	case $cc_basename in
+	  cxx*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      netbsd*)
+	;;
+      *qnx* | *nto*)
+        # QNX uses GNU C++, but need to define -shared option too, otherwise
+        # it will coredump.
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+        ;;
+      osf3* | osf4* | osf5*)
+	case $cc_basename in
+	  KCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    ;;
+	  RCC*)
+	    # Rational C++ 2.4.1
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  cxx*)
+	    # Digital/Compaq C++
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      psos*)
+	;;
+      solaris*)
+	case $cc_basename in
+	  CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	    ;;
+	  gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sunos4*)
+	case $cc_basename in
+	  CC*)
+	    # Sun C++ 4.x
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  lcc*)
+	    # Lucid
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	esac
+	;;
+      tandem*)
+	case $cc_basename in
+	  NCC*)
+	    # NonStop-UX NCC 3.20
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      vxworks*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+	;;
+    esac
+  fi
+],
+[
+  if test "$GCC" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+      if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      else
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC (with -KPIC) is the default.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+	;;
+      nagfor*)
+	# NAG Fortran compiler
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      ccc*)
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+        # All Alpha code is PIC.
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+	  ;;
+	*Sun\ F* | *Sun*Fortran*)
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  ;;
+        *Intel*\ [[CF]]*Compiler*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	  ;;
+	*Portland\ Group*)
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # All OSF/1 code is PIC.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    rdos*)
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    unicos*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+    esac
+  fi
+])
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+    ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+    ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+  _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+    [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+    [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+    [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+     "" | " "*) ;;
+     *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+     esac],
+    [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+     _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+	[Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+	[How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+  _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+  $lt_tmp_static_flag,
+  [],
+  [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+	[Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  case $host_os in
+  aix[[4-9]]*)
+    # If we're using GNU nm, then we don't want the "-C" option.
+    # -C means demangle to AIX nm, but means don't demangle with GNU nm
+    # Also, AIX nm treats weak defined symbols like other global defined
+    # symbols, whereas GNU nm marks them as "W".
+    if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    else
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    fi
+    ;;
+  pw32*)
+    _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+    ;;
+  cygwin* | mingw* | cegcc*)
+    case $cc_basename in
+    cl*)
+      _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+      ;;
+    *)
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+      ;;
+    esac
+    ;;
+  *)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+    ;;
+  esac
+], [
+  runpath_var=
+  _LT_TAGVAR(allow_undefined_flag, $1)=
+  _LT_TAGVAR(always_export_symbols, $1)=no
+  _LT_TAGVAR(archive_cmds, $1)=
+  _LT_TAGVAR(archive_expsym_cmds, $1)=
+  _LT_TAGVAR(compiler_needs_object, $1)=no
+  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+  _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(hardcode_automatic, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+  _LT_TAGVAR(hardcode_minus_L, $1)=no
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  _LT_TAGVAR(inherit_rpath, $1)=no
+  _LT_TAGVAR(link_all_deplibs, $1)=unknown
+  _LT_TAGVAR(module_cmds, $1)=
+  _LT_TAGVAR(module_expsym_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+  _LT_TAGVAR(thread_safe_flag_spec, $1)=
+  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  _LT_TAGVAR(include_expsyms, $1)=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  _LT_TAGVAR(ld_shlibs, $1)=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+	  *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[[3-9]]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	_LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+      # as there is no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=no
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    haiku*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    interix[[3-9]]*)
+      _LT_TAGVAR(hardcode_direct, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=' $pic_flag'
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+	  tmp_sharedflag='--shared' ;;
+	xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	_LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+    esac
+
+    if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
+      runpath_var=
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	_LT_TAGVAR(hardcode_direct, $1)=unsupported
+      fi
+      ;;
+
+    aix[[4-9]]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      _LT_TAGVAR(archive_cmds, $1)=''
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[[012]]|aix4.[[012]].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	_LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX([$1])
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	  _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 _LT_SYS_MODULE_PATH_AIX([$1])
+	 _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	  fi
+	  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[[45]]*)
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+	# Native MSVC
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	_LT_TAGVAR(always_export_symbols, $1)=yes
+	_LT_TAGVAR(file_list_spec, $1)='@'
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	  else
+	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	  fi~
+	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	  linknames='
+	# The linker will not automatically build a static lib if we build a DLL.
+	# _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	_LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+	_LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+	# Don't use ranlib
+	_LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	_LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	  lt_tool_outputfile="@TOOL_OUTPUT@"~
+	  case $lt_outputfile in
+	    *.exe|*.EXE) ;;
+	    *)
+	      lt_outputfile="$lt_outputfile.exe"
+	      lt_tool_outputfile="$lt_tool_outputfile.exe"
+	      ;;
+	  esac~
+	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	    $RM "$lt_outputfile.manifest";
+	  fi'
+	;;
+      *)
+	# Assume MSVC wrapper
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+	# The linker will automatically build a .lib file if we build a DLL.
+	_LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	# FIXME: Should let the user specify the lib program.
+	_LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+	_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+      _LT_DARWIN_LINKER_FEATURES($1)
+      ;;
+
+    dgux*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	_LT_TAGVAR(hardcode_minus_L, $1)=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	m4_if($1, [], [
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  _LT_LINKER_OPTION([if $CC understands -b],
+	    _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+	    [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+	    [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+	  [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  ;;
+	*)
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+	# This should be the same for all languages, so no per-tag cache variable.
+	AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+	  [lt_cv_irix_exported_symbol],
+	  [save_LDFLAGS="$LDFLAGS"
+	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+	   AC_LINK_IFELSE(
+	     [AC_LANG_SOURCE(
+	        [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+			      [C++], [[int foo (void) { return 0; }]],
+			      [Fortran 77], [[
+      subroutine foo
+      end]],
+			      [Fortran], [[
+      subroutine foo
+      end]])])],
+	      [lt_cv_irix_exported_symbol=yes],
+	      [lt_cv_irix_exported_symbol=no])
+           LDFLAGS="$save_LDFLAGS"])
+	if test "$lt_cv_irix_exported_symbol" = yes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+	fi
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(inherit_rpath, $1)=yes
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
+	     _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	     ;;
+	   *)
+	     _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      case $host_os in
+      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+        ;;
+	motorola)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4.3*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	_LT_TAGVAR(ld_shlibs, $1)=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+    [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+  # Assume -lc should be added
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $_LT_TAGVAR(archive_cmds, $1) in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+	[lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+	[$RM conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+	  pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+	  _LT_TAGVAR(allow_undefined_flag, $1)=
+	  if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+	  then
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	  else
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  fi
+	  _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM conftest*
+	])
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+    [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+    [enable_shared_with_static_runtimes], [0],
+    [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+    [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+    [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+    [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+    [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+    [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+    [Commands used to build a loadable module if different from building
+    a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+    [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+    [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+    [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+    [Flag to hardcode $libdir into a binary during linking.
+    This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+    [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary and the resulting library dependency is
+    "absolute", i.e impossible to change by setting ${shlibpath_var} if the
+    library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+    [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+    [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+    [Set to "yes" if building a shared library automatically hardcodes DIR
+    into the library and all subsequent libraries and executables linked
+    against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+    [Set to yes if linker adds runtime paths of dependent libraries
+    to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+    [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+    [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+    [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+    [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+    [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+    [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+    [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+    [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl    [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC="$CC"
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_SYS_DYNAMIC_LINKER($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+  LT_SYS_DLOPEN_SELF
+  _LT_CMD_STRIPLIB
+
+  # Report which library types will actually be built
+  AC_MSG_CHECKING([if libtool supports shared libraries])
+  AC_MSG_RESULT([$can_build_shared])
+
+  AC_MSG_CHECKING([whether to build shared libraries])
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[[4-9]]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  AC_MSG_RESULT([$enable_shared])
+
+  AC_MSG_CHECKING([whether to build static libraries])
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  AC_MSG_RESULT([$enable_static])
+
+  _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC="$lt_save_CC"
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+    ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+    (test "X$CXX" != "Xg++"))) ; then
+  AC_PROG_CXXCPP
+else
+  _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_caught_CXX_error" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="int some_variable = 0;"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_CFLAGS=$CFLAGS
+  lt_save_LD=$LD
+  lt_save_GCC=$GCC
+  GCC=$GXX
+  lt_save_with_gnu_ld=$with_gnu_ld
+  lt_save_path_LD=$lt_cv_path_LD
+  if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+    lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+  else
+    $as_unset lt_cv_prog_gnu_ld
+  fi
+  if test -n "${lt_cv_path_LDCXX+set}"; then
+    lt_cv_path_LD=$lt_cv_path_LDCXX
+  else
+    $as_unset lt_cv_path_LD
+  fi
+  test -z "${LDCXX+set}" || LD=$LDCXX
+  CC=${CXX-"c++"}
+  CFLAGS=$CXXFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    # We don't want -fno-exception when compiling C++ code, so set the
+    # no_builtin_flag separately
+    if test "$GXX" = yes; then
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+    else
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+    fi
+
+    if test "$GXX" = yes; then
+      # Set up default GNU C++ configuration
+
+      LT_PATH_LD
+
+      # Check if GNU C++ uses GNU ld as the underlying linker, since the
+      # archiving commands below assume that GNU ld is being used.
+      if test "$with_gnu_ld" = yes; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+        # If archive_cmds runs LD, not CC, wlarc should be empty
+        # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+        #     investigate it a little bit more. (MM)
+        wlarc='${wl}'
+
+        # ancient GNU ld didn't support --whole-archive et. al.
+        if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+	  $GREP 'no-whole-archive' > /dev/null; then
+          _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+        else
+          _LT_TAGVAR(whole_archive_flag_spec, $1)=
+        fi
+      else
+        with_gnu_ld=no
+        wlarc=
+
+        # A generic and very simple default shared library creation
+        # command for GNU C++ for the case where it uses the native
+        # linker, instead of GNU ld.  If possible, this setting should
+        # overridden to take advantage of the native linker features on
+        # the platform it is being used on.
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+      fi
+
+      # Commands to make compiler produce verbose output that lists
+      # what "hidden" libraries, object files and flags are used when
+      # linking a shared library.
+      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+    else
+      GXX=no
+      with_gnu_ld=no
+      wlarc=
+    fi
+
+    # PORTME: fill in a description of your system's C++ link characteristics
+    AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+    _LT_TAGVAR(ld_shlibs, $1)=yes
+    case $host_os in
+      aix3*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+      aix[[4-9]]*)
+        if test "$host_cpu" = ia64; then
+          # On IA64, the linker does run time linking by default, so we don't
+          # have to do anything special.
+          aix_use_runtimelinking=no
+          exp_sym_flag='-Bexport'
+          no_entry_flag=""
+        else
+          aix_use_runtimelinking=no
+
+          # Test if we are trying to use run time linking or normal
+          # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+          # need to do runtime linking.
+          case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	    for ld_flag in $LDFLAGS; do
+	      case $ld_flag in
+	      *-brtl*)
+	        aix_use_runtimelinking=yes
+	        break
+	        ;;
+	      esac
+	    done
+	    ;;
+          esac
+
+          exp_sym_flag='-bexport'
+          no_entry_flag='-bnoentry'
+        fi
+
+        # When large executables or shared objects are built, AIX ld can
+        # have problems creating the table of contents.  If linking a library
+        # or program results in "error TOC overflow" add -mminimal-toc to
+        # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+        # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+        _LT_TAGVAR(archive_cmds, $1)=''
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+        if test "$GXX" = yes; then
+          case $host_os in aix4.[[012]]|aix4.[[012]].*)
+          # We only want to do this on AIX 4.2 and lower, the check
+          # below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	     strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	    # We have reworked collect2
+	    :
+	  else
+	    # We have old collect2
+	    _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	    # It fails to find uninstalled libraries when the uninstalled
+	    # path is not listed in the libpath.  Setting hardcode_minus_L
+	    # to unsupported forces relinking
+	    _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+          esac
+          shared_flag='-shared'
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag="$shared_flag "'${wl}-G'
+	  fi
+        else
+          # not using gcc
+          if test "$host_cpu" = ia64; then
+	  # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	  # chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+          else
+	    if test "$aix_use_runtimelinking" = yes; then
+	      shared_flag='${wl}-G'
+	    else
+	      shared_flag='${wl}-bM:SRE'
+	    fi
+          fi
+        fi
+
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+        # It seems that -bexpall does not export symbols beginning with
+        # underscore (_), so it is better to generate a list of symbols to
+	# export.
+        _LT_TAGVAR(always_export_symbols, $1)=yes
+        if test "$aix_use_runtimelinking" = yes; then
+          # Warning - without using the other runtime loading flags (-brtl),
+          # -berok will link without error, but may produce a broken library.
+          _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+          # Determine the default libpath from the value encoded in an empty
+          # executable.
+          _LT_SYS_MODULE_PATH_AIX([$1])
+          _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+        else
+          if test "$host_cpu" = ia64; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	    _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+          else
+	    # Determine the default libpath from the value encoded in an
+	    # empty executable.
+	    _LT_SYS_MODULE_PATH_AIX([$1])
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	    # Warning - without using the other run time loading flags,
+	    # -berok will link without error, but may produce a broken library.
+	    _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	    _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	    if test "$with_gnu_ld" = yes; then
+	      # We only use this code for GNU lds that support --whole-archive.
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    else
+	      # Exported symbols can be pulled into shared objects from archives
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	    fi
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	    # This is similar to how AIX traditionally builds its shared
+	    # libraries.
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+          fi
+        fi
+        ;;
+
+      beos*)
+	if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	  # support --undefined.  This deserves some investigation.  FIXME
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      chorus*)
+        case $cc_basename in
+          *)
+	  # FIXME: insert proper C++ library support
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	  ;;
+        esac
+        ;;
+
+      cygwin* | mingw* | pw32* | cegcc*)
+	case $GXX,$cc_basename in
+	,cl* | no,cl*)
+	  # Native MSVC
+	  # hardcode_libdir_flag_spec is actually meaningless, as there is
+	  # no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=yes
+	  _LT_TAGVAR(file_list_spec, $1)='@'
+	  # Tell ltmain to make .lib files, not .a files.
+	  libext=lib
+	  # Tell ltmain to make .dll files, not .so files.
+	  shrext_cmds=".dll"
+	  # FIXME: Setting linknames here is a bad hack.
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	    else
+	      $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	    fi~
+	    $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	    linknames='
+	  # The linker will not automatically build a static lib if we build a DLL.
+	  # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+	  # Don't use ranlib
+	  _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+	  _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+	    lt_tool_outputfile="@TOOL_OUTPUT@"~
+	    case $lt_outputfile in
+	      *.exe|*.EXE) ;;
+	      *)
+		lt_outputfile="$lt_outputfile.exe"
+		lt_tool_outputfile="$lt_tool_outputfile.exe"
+		;;
+	    esac~
+	    func_to_tool_file "$lt_outputfile"~
+	    if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	      $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	      $RM "$lt_outputfile.manifest";
+	    fi'
+	  ;;
+	*)
+	  # g++
+	  # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+	  # as there is no search path for DLLs.
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  _LT_TAGVAR(always_export_symbols, $1)=no
+	  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+	  if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	    # If the export-symbols file already is a .def file (1st line
+	    # is EXPORTS), use it as is; otherwise, prepend...
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	      cp $export_symbols $output_objdir/$soname.def;
+	    else
+	      echo EXPORTS > $output_objdir/$soname.def;
+	      cat $export_symbols >> $output_objdir/$soname.def;
+	    fi~
+	    $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	  ;;
+	esac
+	;;
+      darwin* | rhapsody*)
+        _LT_DARWIN_LINKER_FEATURES($1)
+	;;
+
+      dgux*)
+        case $cc_basename in
+          ec++*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          ghcx*)
+	    # Green Hills C++ Compiler
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      freebsd2.*)
+        # C++ shared libraries reported to be fairly broken before
+	# switch to ELF
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      freebsd-elf*)
+        _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+        ;;
+
+      freebsd* | dragonfly*)
+        # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+        # conventions
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+        ;;
+
+      gnu*)
+        ;;
+
+      haiku*)
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        ;;
+
+      hpux9*)
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+				             # but as the default
+				             # location of the library.
+
+        case $cc_basename in
+          CC*)
+            # FIXME: insert proper C++ library support
+            _LT_TAGVAR(ld_shlibs, $1)=no
+            ;;
+          aCC*)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            # Commands to make compiler produce verbose output that lists
+            # what "hidden" libraries, object files and flags are used when
+            # linking a shared library.
+            #
+            # There doesn't appear to be a way to prevent this compiler from
+            # explicitly linking system object files so we need to strip them
+            # from the output so that they don't get included in the library
+            # dependencies.
+            output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+            ;;
+          *)
+            if test "$GXX" = yes; then
+              _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            else
+              # FIXME: insert proper C++ library support
+              _LT_TAGVAR(ld_shlibs, $1)=no
+            fi
+            ;;
+        esac
+        ;;
+
+      hpux10*|hpux11*)
+        if test $with_gnu_ld = no; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+          case $host_cpu in
+            hppa*64*|ia64*)
+              ;;
+            *)
+	      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+              ;;
+          esac
+        fi
+        case $host_cpu in
+          hppa*64*|ia64*)
+            _LT_TAGVAR(hardcode_direct, $1)=no
+            _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+            ;;
+          *)
+            _LT_TAGVAR(hardcode_direct, $1)=yes
+            _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+					         # but as the default
+					         # location of the library.
+            ;;
+        esac
+
+        case $cc_basename in
+          CC*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          aCC*)
+	    case $host_cpu in
+	      hppa*64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      ia64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      *)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	    esac
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test $with_gnu_ld = no; then
+	        case $host_cpu in
+	          hppa*64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          ia64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          *)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	        esac
+	      fi
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      interix[[3-9]]*)
+	_LT_TAGVAR(hardcode_direct, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+	# Instead, shared libraries are loaded at an image base (0x10000000 by
+	# default) and relocated if they conflict, which is a slow very memory
+	# consuming and fragmenting process.  To avoid this, we pick a random,
+	# 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+	# time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	;;
+      irix5* | irix6*)
+        case $cc_basename in
+          CC*)
+	    # SGI C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -ar", where "CC" is the IRIX C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test "$with_gnu_ld" = no; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	      else
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+	      fi
+	    fi
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+	    ;;
+        esac
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(inherit_rpath, $1)=yes
+        ;;
+
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+	    ;;
+	  icpc* | ecpc* )
+	    # Intel C++
+	    with_gnu_ld=yes
+	    # version 8.0 and above of icpc choke on multiply defined symbols
+	    # if we add $predep_objects and $postdep_objects, however 7.1 and
+	    # earlier do not add the objects themselves.
+	    case `$CC -V 2>&1` in
+	      *"Version 7."*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	      *)  # Version 8.0 or newer
+	        tmp_idyn=
+	        case $host_cpu in
+		  ia64*) tmp_idyn=' -i_dynamic';;
+		esac
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	    esac
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    ;;
+          pgCC* | pgcpp*)
+            # Portland Group C++ compiler
+	    case `$CC -V` in
+	    *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+	      _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+		compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+	      _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+		$AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+		$RANLIB $oldlib'
+	      _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    *) # Version 6 and above use weak symbols
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+            ;;
+	  cxx*)
+	    # Compaq C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname  -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
+
+	    runpath_var=LD_RUN_PATH
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+	    ;;
+	  xl* | mpixl* | bgxl*)
+	    # IBM XL 8.0 on PPC, with GNU ld
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    if test "x$supports_anon_versioning" = xyes; then
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+		cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+		echo "local: *; };" >> $output_objdir/$libname.ver~
+		$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+	    fi
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	      _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+	      # Not sure whether something based on
+	      # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+	      # would be better.
+	      output_verbose_link_cmd='func_echo_all'
+
+	      # Archives containing C++ object files must be created using
+	      # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	      # necessary to make sure instantiated templates are included
+	      # in the archive.
+	      _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+
+      lynxos*)
+        # FIXME: insert proper C++ library support
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      m88k*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      mvs*)
+        case $cc_basename in
+          cxx*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	  *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	esac
+	;;
+
+      netbsd*)
+        if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable  -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+	  wlarc=
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	fi
+	# Workaround some broken pre-1.5 toolchains
+	output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+	;;
+
+      *nto* | *qnx*)
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+	;;
+
+      openbsd2*)
+        # C++ shared libraries are fairly broken
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      openbsd*)
+	if test -f /usr/libexec/ld.so; then
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+	  fi
+	  output_verbose_link_cmd=func_echo_all
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      osf3* | osf4* | osf5*)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Archives containing C++ object files must be created using
+	    # the KAI C++ compiler.
+	    case $host in
+	      osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+	      *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+	    esac
+	    ;;
+          RCC*)
+	    # Rational C++ 2.4.1
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          cxx*)
+	    case $host in
+	      osf3*)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+		;;
+	      *)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+	          echo "-hidden">> $lib.exp~
+	          $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp  `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~
+	          $RM $lib.exp'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+		;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+	  *)
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	      case $host in
+	        osf3*)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	        *)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	      esac
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	      # Commands to make compiler produce verbose output that lists
+	      # what "hidden" libraries, object files and flags are used when
+	      # linking a shared library.
+	      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      psos*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      sunos4*)
+        case $cc_basename in
+          CC*)
+	    # Sun C++ 4.x
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          lcc*)
+	    # Lucid
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      solaris*)
+        case $cc_basename in
+          CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+            _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+	    _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag}  -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	      $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	    _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	    case $host_os in
+	      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+	      *)
+		# The compiler driver will combine and reorder linker options,
+		# but understands `-z linker_flag'.
+	        # Supported since Solaris 2.6 (maybe 2.5.1?)
+		_LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	        ;;
+	    esac
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+	    output_verbose_link_cmd='func_echo_all'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	    ;;
+          gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+
+	    # The C++ compiler must be used to create the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    # GNU C++ compiler with Solaris linker
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+	      if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      else
+	        # g++ 2.7 appears to require `-G' NOT `-shared' on this
+	        # platform.
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      fi
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
+	      case $host_os in
+		solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+		*)
+		  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+		  ;;
+	      esac
+	    fi
+	    ;;
+        esac
+        ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      case $cc_basename in
+        CC*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+      esac
+      ;;
+
+      sysv5* | sco3.2v5* | sco5v6*)
+	# Note: We can NOT use -z defs as we might desire, because we do not
+	# link with -lc, and that would cause any symbols used from libc to
+	# always be unresolved, which means just about no library would
+	# ever link correctly.  If we're not using GNU ld we use -z text
+	# though, which does catch some bad symbols but isn't as heavy-handed
+	# as -z defs.
+	_LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+	_LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+	_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+	_LT_TAGVAR(link_all_deplibs, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+	runpath_var='LD_RUN_PATH'
+
+	case $cc_basename in
+          CC*)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+	      '"$_LT_TAGVAR(old_archive_cmds, $1)"
+	    _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+	      '"$_LT_TAGVAR(reload_cmds, $1)"
+	    ;;
+	  *)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    ;;
+	esac
+      ;;
+
+      tandem*)
+        case $cc_basename in
+          NCC*)
+	    # NonStop-UX NCC 3.20
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      vxworks*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      *)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+    esac
+
+    AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+    test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+    _LT_TAGVAR(GCC, $1)="$GXX"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+  LDCXX=$LD
+  LD=$lt_save_LD
+  GCC=$lt_save_GCC
+  with_gnu_ld=$lt_save_with_gnu_ld
+  lt_cv_path_LDCXX=$lt_cv_path_LD
+  lt_cv_path_LD=$lt_save_path_LD
+  lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+  lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test "$_lt_caught_CXX_error" != yes
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+  case ${2} in
+  .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+  *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+  esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library.  It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+  Foo (void) { a = 0; }
+private:
+  int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer*4 a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+  private int a;
+  public void bar (void) {
+    a = 0;
+  }
+};
+_LT_EOF
+], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF
+package foo
+func foo() {
+}
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+  # Parse the compiler output and extract the necessary
+  # objects, libraries and library flags.
+
+  # Sentinel used to keep track of whether or not we are before
+  # the conftest object file.
+  pre_test_object_deps_done=no
+
+  for p in `eval "$output_verbose_link_cmd"`; do
+    case ${prev}${p} in
+
+    -L* | -R* | -l*)
+       # Some compilers place space between "-{L,R}" and the path.
+       # Remove the space.
+       if test $p = "-L" ||
+          test $p = "-R"; then
+	 prev=$p
+	 continue
+       fi
+
+       # Expand the sysroot to ease extracting the directories later.
+       if test -z "$prev"; then
+         case $p in
+         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
+         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
+         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
+         esac
+       fi
+       case $p in
+       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
+       esac
+       if test "$pre_test_object_deps_done" = no; then
+	 case ${prev} in
+	 -L | -R)
+	   # Internal compiler library paths should come after those
+	   # provided the user.  The postdeps already come after the
+	   # user supplied libs so there is no need to process them.
+	   if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
+	   else
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
+	   fi
+	   ;;
+	 # The "-l" case would never come before the object being
+	 # linked, so don't bother handling this case.
+	 esac
+       else
+	 if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+	   _LT_TAGVAR(postdeps, $1)="${prev}${p}"
+	 else
+	   _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+	 fi
+       fi
+       prev=
+       ;;
+
+    *.lto.$objext) ;; # Ignore GCC LTO objects
+    *.$objext)
+       # This assumes that the test object file only shows up
+       # once in the compiler output.
+       if test "$p" = "conftest.$objext"; then
+	 pre_test_object_deps_done=yes
+	 continue
+       fi
+
+       if test "$pre_test_object_deps_done" = no; then
+	 if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+	   _LT_TAGVAR(predep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+	 fi
+       else
+	 if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+	   _LT_TAGVAR(postdep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+	 fi
+       fi
+       ;;
+
+    *) ;; # Ignore the rest.
+
+    esac
+  done
+
+  # Clean up.
+  rm -f a.out a.exe
+else
+  echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+  # Interix 3.5 installs completely hosed .la files for C++, so rather than
+  # hack all around it, let's just trust "g++" to DTRT.
+  _LT_TAGVAR(predep_objects,$1)=
+  _LT_TAGVAR(postdep_objects,$1)=
+  _LT_TAGVAR(postdeps,$1)=
+  ;;
+
+linux*)
+  case `$CC -V 2>&1 | sed 5q` in
+  *Sun\ C*)
+    # Sun C++ 5.9
+
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+
+solaris*)
+  case $cc_basename in
+  CC* | sunCC*)
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    # Adding this requires a known-good setup of shared libraries for
+    # Sun compiler versions before 5.6, else PIC objects from an old
+    # archive will be linked into the output, leading to subtle bugs.
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+    [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+    [Dependencies to place before and after the objects being linked to
+    create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+    [The library search path used internally by the compiler when linking
+    a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test "X$F77" = "Xno"; then
+  _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_F77" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${F77-"f77"}
+  CFLAGS=$FFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+  GCC=$G77
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$G77"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC="$lt_save_CC"
+  CFLAGS="$lt_save_CFLAGS"
+fi # test "$_lt_disable_F77" != yes
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test "X$FC" = "Xno"; then
+  _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_FC" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${FC-"f95"}
+  CFLAGS=$FCFLAGS
+  compiler=$CC
+  GCC=$ac_cv_fc_compiler_gnu
+
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+fi # test "$_lt_disable_FC" != yes
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_GO_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Go compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GO_CONFIG],
+[AC_REQUIRE([LT_PROG_GO])dnl
+AC_LANG_SAVE
+
+# Source file extension for Go test sources.
+ac_ext=go
+
+# Object file extension for compiled Go test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="package main; func main() { }"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='package main; func main() { }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GOC-"gccgo"}
+CFLAGS=$GOFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# Go did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GO_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code="$lt_simple_compile_test_code"
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+  :
+  _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+  [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+    [AC_CHECK_TOOL(GCJ, gcj,)
+      test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
+      AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_GO
+# ----------
+AC_DEFUN([LT_PROG_GO],
+[AC_CHECK_TOOL(GOC, gccgo,)
+])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+    [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_SED.  When it is available in   #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for lt_ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+        lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+      fi
+    done
+  done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+  test ! -f $lt_ac_sed && continue
+  cat /dev/null > conftest.in
+  lt_ac_count=0
+  echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+  # Check for GNU sed and select it if it is found.
+  if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+    lt_cv_path_SED=$lt_ac_sed
+    break
+  fi
+  while true; do
+    cat conftest.in conftest.in >conftest.tmp
+    mv conftest.tmp conftest.in
+    cp conftest.in conftest.nl
+    echo >>conftest.nl
+    $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+    cmp -s conftest.out conftest.nl || break
+    # 10000 chars as input seems more than enough
+    test $lt_ac_count -gt 10 && break
+    lt_ac_count=`expr $lt_ac_count + 1`
+    if test $lt_ac_count -gt $lt_ac_max; then
+      lt_ac_max=$lt_ac_count
+      lt_cv_path_SED=$lt_ac_sed
+    fi
+  done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,b/c, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+AC_MSG_RESULT([$xsi_shell])
+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
+
+AC_MSG_CHECKING([whether the shell understands "+="])
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+AC_MSG_RESULT([$lt_shell_append])
+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
+# ------------------------------------------------------
+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
+m4_defun([_LT_PROG_FUNCTION_REPLACE],
+[dnl {
+sed -e '/^$1 ()$/,/^} # $1 /c\
+$1 ()\
+{\
+m4_bpatsubsts([$2], [$], [\\], [^\([	 ]\)], [\\\1])
+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+])
+
+
+# _LT_PROG_REPLACE_SHELLFNS
+# -------------------------
+# Replace existing portable implementations of several shell functions with
+# equivalent extended shell implementations where those features are available..
+m4_defun([_LT_PROG_REPLACE_SHELLFNS],
+[if test x"$xsi_shell" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
+
+  _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac])
+
+  _LT_PROG_FUNCTION_REPLACE([func_xform], [    func_xform_result=${1%.*}.lo])
+
+  _LT_PROG_FUNCTION_REPLACE([func_arith], [    func_arith_result=$(( $[*] ))])
+
+  _LT_PROG_FUNCTION_REPLACE([func_len], [    func_len_result=${#1}])
+fi
+
+if test x"$lt_shell_append" = xyes; then
+  _LT_PROG_FUNCTION_REPLACE([func_append], [    eval "${1}+=\\${2}"])
+
+  _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
+    func_quote_for_eval "${2}"
+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
+    eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
+
+  # Save a `func_append' function call where possible by direct use of '+='
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+else
+  # Save a `func_append' function call even when '+=' is not available
+  sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+  AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
+fi
+])
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine which file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path).  These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+         [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+         [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltdl.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltdl.m4
new file mode 100644
index 0000000..ea76f4d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltdl.m4
@@ -0,0 +1,817 @@
+# ltdl.m4 - Configure ltdl for the target system. -*-Autoconf-*-
+#
+#   Copyright (C) 1999-2006, 2007, 2008, 2011 Free Software Foundation, Inc.
+#   Written by Thomas Tanner, 1999
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 18 LTDL_INIT
+
+# LT_CONFIG_LTDL_DIR(DIRECTORY, [LTDL-MODE])
+# ------------------------------------------
+# DIRECTORY contains the libltdl sources.  It is okay to call this
+# function multiple times, as long as the same DIRECTORY is always given.
+AC_DEFUN([LT_CONFIG_LTDL_DIR],
+[AC_BEFORE([$0], [LTDL_INIT])
+_$0($*)
+])# LT_CONFIG_LTDL_DIR
+
+# We break this out into a separate macro, so that we can call it safely
+# internally without being caught accidentally by the sed scan in libtoolize.
+m4_defun([_LT_CONFIG_LTDL_DIR],
+[dnl remove trailing slashes
+m4_pushdef([_ARG_DIR], m4_bpatsubst([$1], [/*$]))
+m4_case(_LTDL_DIR,
+	[], [dnl only set lt_ltdl_dir if _ARG_DIR is not simply `.'
+	     m4_if(_ARG_DIR, [.],
+	             [],
+		 [m4_define([_LTDL_DIR], _ARG_DIR)
+	          _LT_SHELL_INIT([lt_ltdl_dir=']_ARG_DIR['])])],
+    [m4_if(_ARG_DIR, _LTDL_DIR,
+	    [],
+	[m4_fatal([multiple libltdl directories: `]_LTDL_DIR[', `]_ARG_DIR['])])])
+m4_popdef([_ARG_DIR])
+])# _LT_CONFIG_LTDL_DIR
+
+# Initialise:
+m4_define([_LTDL_DIR], [])
+
+
+# _LT_BUILD_PREFIX
+# ----------------
+# If Autoconf is new enough, expand to `${top_build_prefix}', otherwise
+# to `${top_builddir}/'.
+m4_define([_LT_BUILD_PREFIX],
+[m4_ifdef([AC_AUTOCONF_VERSION],
+   [m4_if(m4_version_compare(m4_defn([AC_AUTOCONF_VERSION]), [2.62]),
+	  [-1], [m4_ifdef([_AC_HAVE_TOP_BUILD_PREFIX],
+			  [${top_build_prefix}],
+			  [${top_builddir}/])],
+	  [${top_build_prefix}])],
+   [${top_builddir}/])[]dnl
+])
+
+
+# LTDL_CONVENIENCE
+# ----------------
+# sets LIBLTDL to the link flags for the libltdl convenience library and
+# LTDLINCL to the include flags for the libltdl header and adds
+# --enable-ltdl-convenience to the configure arguments.  Note that
+# AC_CONFIG_SUBDIRS is not called here.  LIBLTDL will be prefixed with
+# '${top_build_prefix}' if available, otherwise with '${top_builddir}/',
+# and LTDLINCL will be prefixed with '${top_srcdir}/' (note the single
+# quotes!).  If your package is not flat and you're not using automake,
+# define top_build_prefix, top_builddir, and top_srcdir appropriately
+# in your Makefiles.
+AC_DEFUN([LTDL_CONVENIENCE],
+[AC_BEFORE([$0], [LTDL_INIT])dnl
+dnl Although the argument is deprecated and no longer documented,
+dnl LTDL_CONVENIENCE used to take a DIRECTORY orgument, if we have one
+dnl here make sure it is the same as any other declaration of libltdl's
+dnl location!  This also ensures lt_ltdl_dir is set when configure.ac is
+dnl not yet using an explicit LT_CONFIG_LTDL_DIR.
+m4_ifval([$1], [_LT_CONFIG_LTDL_DIR([$1])])dnl
+_$0()
+])# LTDL_CONVENIENCE
+
+# AC_LIBLTDL_CONVENIENCE accepted a directory argument in older libtools,
+# now we have LT_CONFIG_LTDL_DIR:
+AU_DEFUN([AC_LIBLTDL_CONVENIENCE],
+[_LT_CONFIG_LTDL_DIR([m4_default([$1], [libltdl])])
+_LTDL_CONVENIENCE])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBLTDL_CONVENIENCE], [])
+
+
+# _LTDL_CONVENIENCE
+# -----------------
+# Code shared by LTDL_CONVENIENCE and LTDL_INIT([convenience]).
+m4_defun([_LTDL_CONVENIENCE],
+[case $enable_ltdl_convenience in
+  no) AC_MSG_ERROR([this package needs a convenience libltdl]) ;;
+  "") enable_ltdl_convenience=yes
+      ac_configure_args="$ac_configure_args --enable-ltdl-convenience" ;;
+esac
+LIBLTDL='_LT_BUILD_PREFIX'"${lt_ltdl_dir+$lt_ltdl_dir/}libltdlc.la"
+LTDLDEPS=$LIBLTDL
+LTDLINCL='-I${top_srcdir}'"${lt_ltdl_dir+/$lt_ltdl_dir}"
+
+AC_SUBST([LIBLTDL])
+AC_SUBST([LTDLDEPS])
+AC_SUBST([LTDLINCL])
+
+# For backwards non-gettext consistent compatibility...
+INCLTDL="$LTDLINCL"
+AC_SUBST([INCLTDL])
+])# _LTDL_CONVENIENCE
+
+
+# LTDL_INSTALLABLE
+# ----------------
+# sets LIBLTDL to the link flags for the libltdl installable library
+# and LTDLINCL to the include flags for the libltdl header and adds
+# --enable-ltdl-install to the configure arguments.  Note that
+# AC_CONFIG_SUBDIRS is not called from here.  If an installed libltdl
+# is not found, LIBLTDL will be prefixed with '${top_build_prefix}' if
+# available, otherwise with '${top_builddir}/', and LTDLINCL will be
+# prefixed with '${top_srcdir}/' (note the single quotes!).  If your
+# package is not flat and you're not using automake, define top_build_prefix,
+# top_builddir, and top_srcdir appropriately in your Makefiles.
+# In the future, this macro may have to be called after LT_INIT.
+AC_DEFUN([LTDL_INSTALLABLE],
+[AC_BEFORE([$0], [LTDL_INIT])dnl
+dnl Although the argument is deprecated and no longer documented,
+dnl LTDL_INSTALLABLE used to take a DIRECTORY orgument, if we have one
+dnl here make sure it is the same as any other declaration of libltdl's
+dnl location!  This also ensures lt_ltdl_dir is set when configure.ac is
+dnl not yet using an explicit LT_CONFIG_LTDL_DIR.
+m4_ifval([$1], [_LT_CONFIG_LTDL_DIR([$1])])dnl
+_$0()
+])# LTDL_INSTALLABLE
+
+# AC_LIBLTDL_INSTALLABLE accepted a directory argument in older libtools,
+# now we have LT_CONFIG_LTDL_DIR:
+AU_DEFUN([AC_LIBLTDL_INSTALLABLE],
+[_LT_CONFIG_LTDL_DIR([m4_default([$1], [libltdl])])
+_LTDL_INSTALLABLE])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBLTDL_INSTALLABLE], [])
+
+
+# _LTDL_INSTALLABLE
+# -----------------
+# Code shared by LTDL_INSTALLABLE and LTDL_INIT([installable]).
+m4_defun([_LTDL_INSTALLABLE],
+[if test -f $prefix/lib/libltdl.la; then
+  lt_save_LDFLAGS="$LDFLAGS"
+  LDFLAGS="-L$prefix/lib $LDFLAGS"
+  AC_CHECK_LIB([ltdl], [lt_dlinit], [lt_lib_ltdl=yes])
+  LDFLAGS="$lt_save_LDFLAGS"
+  if test x"${lt_lib_ltdl-no}" = xyes; then
+    if test x"$enable_ltdl_install" != xyes; then
+      # Don't overwrite $prefix/lib/libltdl.la without --enable-ltdl-install
+      AC_MSG_WARN([not overwriting libltdl at $prefix, force with `--enable-ltdl-install'])
+      enable_ltdl_install=no
+    fi
+  elif test x"$enable_ltdl_install" = xno; then
+    AC_MSG_WARN([libltdl not installed, but installation disabled])
+  fi
+fi
+
+# If configure.ac declared an installable ltdl, and the user didn't override
+# with --disable-ltdl-install, we will install the shipped libltdl.
+case $enable_ltdl_install in
+  no) ac_configure_args="$ac_configure_args --enable-ltdl-install=no"
+      LIBLTDL="-lltdl"
+      LTDLDEPS=
+      LTDLINCL=
+      ;;
+  *)  enable_ltdl_install=yes
+      ac_configure_args="$ac_configure_args --enable-ltdl-install"
+      LIBLTDL='_LT_BUILD_PREFIX'"${lt_ltdl_dir+$lt_ltdl_dir/}libltdl.la"
+      LTDLDEPS=$LIBLTDL
+      LTDLINCL='-I${top_srcdir}'"${lt_ltdl_dir+/$lt_ltdl_dir}"
+      ;;
+esac
+
+AC_SUBST([LIBLTDL])
+AC_SUBST([LTDLDEPS])
+AC_SUBST([LTDLINCL])
+
+# For backwards non-gettext consistent compatibility...
+INCLTDL="$LTDLINCL"
+AC_SUBST([INCLTDL])
+])# LTDL_INSTALLABLE
+
+
+# _LTDL_MODE_DISPATCH
+# -------------------
+m4_define([_LTDL_MODE_DISPATCH],
+[dnl If _LTDL_DIR is `.', then we are configuring libltdl itself:
+m4_if(_LTDL_DIR, [],
+	[],
+    dnl if _LTDL_MODE was not set already, the default value is `subproject':
+    [m4_case(m4_default(_LTDL_MODE, [subproject]),
+	  [subproject], [AC_CONFIG_SUBDIRS(_LTDL_DIR)
+			  _LT_SHELL_INIT([lt_dlopen_dir="$lt_ltdl_dir"])],
+	  [nonrecursive], [_LT_SHELL_INIT([lt_dlopen_dir="$lt_ltdl_dir"; lt_libobj_prefix="$lt_ltdl_dir/"])],
+	  [recursive], [],
+	[m4_fatal([unknown libltdl mode: ]_LTDL_MODE)])])dnl
+dnl Be careful not to expand twice:
+m4_define([$0], [])
+])# _LTDL_MODE_DISPATCH
+
+
+# _LT_LIBOBJ(MODULE_NAME)
+# -----------------------
+# Like AC_LIBOBJ, except that MODULE_NAME goes into _LT_LIBOBJS instead
+# of into LIBOBJS.
+AC_DEFUN([_LT_LIBOBJ], [
+  m4_pattern_allow([^_LT_LIBOBJS$])
+  _LT_LIBOBJS="$_LT_LIBOBJS $1.$ac_objext"
+])# _LT_LIBOBJS
+
+
+# LTDL_INIT([OPTIONS])
+# --------------------
+# Clients of libltdl can use this macro to allow the installer to
+# choose between a shipped copy of the ltdl sources or a preinstalled
+# version of the library.  If the shipped ltdl sources are not in a
+# subdirectory named libltdl, the directory name must be given by
+# LT_CONFIG_LTDL_DIR.
+AC_DEFUN([LTDL_INIT],
+[dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+dnl We need to keep our own list of libobjs separate from our parent project,
+dnl and the easiest way to do that is redefine the AC_LIBOBJs macro while
+dnl we look for our own LIBOBJs.
+m4_pushdef([AC_LIBOBJ], m4_defn([_LT_LIBOBJ]))
+m4_pushdef([AC_LIBSOURCES])
+
+dnl If not otherwise defined, default to the 1.5.x compatible subproject mode:
+m4_if(_LTDL_MODE, [],
+        [m4_define([_LTDL_MODE], m4_default([$2], [subproject]))
+        m4_if([-1], [m4_bregexp(_LTDL_MODE, [\(subproject\|\(non\)?recursive\)])],
+                [m4_fatal([unknown libltdl mode: ]_LTDL_MODE)])])
+
+AC_ARG_WITH([included_ltdl],
+    [AS_HELP_STRING([--with-included-ltdl],
+                    [use the GNU ltdl sources included here])])
+
+if test "x$with_included_ltdl" != xyes; then
+  # We are not being forced to use the included libltdl sources, so
+  # decide whether there is a useful installed version we can use.
+  AC_CHECK_HEADER([ltdl.h],
+      [AC_CHECK_DECL([lt_dlinterface_register],
+	   [AC_CHECK_LIB([ltdl], [lt_dladvise_preload],
+	       [with_included_ltdl=no],
+	       [with_included_ltdl=yes])],
+	   [with_included_ltdl=yes],
+	   [AC_INCLUDES_DEFAULT
+	    #include <ltdl.h>])],
+      [with_included_ltdl=yes],
+      [AC_INCLUDES_DEFAULT]
+  )
+fi
+
+dnl If neither LT_CONFIG_LTDL_DIR, LTDL_CONVENIENCE nor LTDL_INSTALLABLE
+dnl was called yet, then for old times' sake, we assume libltdl is in an
+dnl eponymous directory:
+AC_PROVIDE_IFELSE([LT_CONFIG_LTDL_DIR], [], [_LT_CONFIG_LTDL_DIR([libltdl])])
+
+AC_ARG_WITH([ltdl_include],
+    [AS_HELP_STRING([--with-ltdl-include=DIR],
+                    [use the ltdl headers installed in DIR])])
+
+if test -n "$with_ltdl_include"; then
+  if test -f "$with_ltdl_include/ltdl.h"; then :
+  else
+    AC_MSG_ERROR([invalid ltdl include directory: `$with_ltdl_include'])
+  fi
+else
+  with_ltdl_include=no
+fi
+
+AC_ARG_WITH([ltdl_lib],
+    [AS_HELP_STRING([--with-ltdl-lib=DIR],
+                    [use the libltdl.la installed in DIR])])
+
+if test -n "$with_ltdl_lib"; then
+  if test -f "$with_ltdl_lib/libltdl.la"; then :
+  else
+    AC_MSG_ERROR([invalid ltdl library directory: `$with_ltdl_lib'])
+  fi
+else
+  with_ltdl_lib=no
+fi
+
+case ,$with_included_ltdl,$with_ltdl_include,$with_ltdl_lib, in
+  ,yes,no,no,)
+	m4_case(m4_default(_LTDL_TYPE, [convenience]),
+	    [convenience], [_LTDL_CONVENIENCE],
+	    [installable], [_LTDL_INSTALLABLE],
+	  [m4_fatal([unknown libltdl build type: ]_LTDL_TYPE)])
+	;;
+  ,no,no,no,)
+	# If the included ltdl is not to be used, then use the
+	# preinstalled libltdl we found.
+	AC_DEFINE([HAVE_LTDL], [1],
+	  [Define this if a modern libltdl is already installed])
+	LIBLTDL=-lltdl
+	LTDLDEPS=
+	LTDLINCL=
+	;;
+  ,no*,no,*)
+	AC_MSG_ERROR([`--with-ltdl-include' and `--with-ltdl-lib' options must be used together])
+	;;
+  *)	with_included_ltdl=no
+	LIBLTDL="-L$with_ltdl_lib -lltdl"
+	LTDLDEPS=
+	LTDLINCL="-I$with_ltdl_include"
+	;;
+esac
+INCLTDL="$LTDLINCL"
+
+# Report our decision...
+AC_MSG_CHECKING([where to find libltdl headers])
+AC_MSG_RESULT([$LTDLINCL])
+AC_MSG_CHECKING([where to find libltdl library])
+AC_MSG_RESULT([$LIBLTDL])
+
+_LTDL_SETUP
+
+dnl restore autoconf definition.
+m4_popdef([AC_LIBOBJ])
+m4_popdef([AC_LIBSOURCES])
+
+AC_CONFIG_COMMANDS_PRE([
+    _ltdl_libobjs=
+    _ltdl_ltlibobjs=
+    if test -n "$_LT_LIBOBJS"; then
+      # Remove the extension.
+      _lt_sed_drop_objext='s/\.o$//;s/\.obj$//'
+      for i in `for i in $_LT_LIBOBJS; do echo "$i"; done | sed "$_lt_sed_drop_objext" | sort -u`; do
+        _ltdl_libobjs="$_ltdl_libobjs $lt_libobj_prefix$i.$ac_objext"
+        _ltdl_ltlibobjs="$_ltdl_ltlibobjs $lt_libobj_prefix$i.lo"
+      done
+    fi
+    AC_SUBST([ltdl_LIBOBJS], [$_ltdl_libobjs])
+    AC_SUBST([ltdl_LTLIBOBJS], [$_ltdl_ltlibobjs])
+])
+
+# Only expand once:
+m4_define([LTDL_INIT])
+])# LTDL_INIT
+
+# Old names:
+AU_DEFUN([AC_LIB_LTDL], [LTDL_INIT($@)])
+AU_DEFUN([AC_WITH_LTDL], [LTDL_INIT($@)])
+AU_DEFUN([LT_WITH_LTDL], [LTDL_INIT($@)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIB_LTDL], [])
+dnl AC_DEFUN([AC_WITH_LTDL], [])
+dnl AC_DEFUN([LT_WITH_LTDL], [])
+
+
+# _LTDL_SETUP
+# -----------
+# Perform all the checks necessary for compilation of the ltdl objects
+#  -- including compiler checks and header checks.  This is a public
+# interface  mainly for the benefit of libltdl's own configure.ac, most
+# other users should call LTDL_INIT instead.
+AC_DEFUN([_LTDL_SETUP],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_SYS_MODULE_EXT])dnl
+AC_REQUIRE([LT_SYS_MODULE_PATH])dnl
+AC_REQUIRE([LT_SYS_DLSEARCH_PATH])dnl
+AC_REQUIRE([LT_LIB_DLLOAD])dnl
+AC_REQUIRE([LT_SYS_SYMBOL_USCORE])dnl
+AC_REQUIRE([LT_FUNC_DLSYM_USCORE])dnl
+AC_REQUIRE([LT_SYS_DLOPEN_DEPLIBS])dnl
+AC_REQUIRE([gl_FUNC_ARGZ])dnl
+
+m4_require([_LT_CHECK_OBJDIR])dnl
+m4_require([_LT_HEADER_DLFCN])dnl
+m4_require([_LT_CHECK_DLPREOPEN])dnl
+m4_require([_LT_DECL_SED])dnl
+
+dnl Don't require this, or it will be expanded earlier than the code
+dnl that sets the variables it relies on:
+_LT_ENABLE_INSTALL
+
+dnl _LTDL_MODE specific code must be called at least once:
+_LTDL_MODE_DISPATCH
+
+# In order that ltdl.c can compile, find out the first AC_CONFIG_HEADERS
+# the user used.  This is so that ltdl.h can pick up the parent projects
+# config.h file, The first file in AC_CONFIG_HEADERS must contain the
+# definitions required by ltdl.c.
+# FIXME: Remove use of undocumented AC_LIST_HEADERS (2.59 compatibility).
+AC_CONFIG_COMMANDS_PRE([dnl
+m4_pattern_allow([^LT_CONFIG_H$])dnl
+m4_ifset([AH_HEADER],
+    [LT_CONFIG_H=AH_HEADER],
+    [m4_ifset([AC_LIST_HEADERS],
+	    [LT_CONFIG_H=`echo "AC_LIST_HEADERS" | $SED 's,^[[      ]]*,,;s,[[ :]].*$,,'`],
+	[])])])
+AC_SUBST([LT_CONFIG_H])
+
+AC_CHECK_HEADERS([unistd.h dl.h sys/dl.h dld.h mach-o/dyld.h dirent.h],
+	[], [], [AC_INCLUDES_DEFAULT])
+
+AC_CHECK_FUNCS([closedir opendir readdir], [], [AC_LIBOBJ([lt__dirent])])
+AC_CHECK_FUNCS([strlcat strlcpy], [], [AC_LIBOBJ([lt__strl])])
+
+m4_pattern_allow([LT_LIBEXT])dnl
+AC_DEFINE_UNQUOTED([LT_LIBEXT],["$libext"],[The archive extension])
+
+name=
+eval "lt_libprefix=\"$libname_spec\""
+m4_pattern_allow([LT_LIBPREFIX])dnl
+AC_DEFINE_UNQUOTED([LT_LIBPREFIX],["$lt_libprefix"],[The archive prefix])
+
+name=ltdl
+eval "LTDLOPEN=\"$libname_spec\""
+AC_SUBST([LTDLOPEN])
+])# _LTDL_SETUP
+
+
+# _LT_ENABLE_INSTALL
+# ------------------
+m4_define([_LT_ENABLE_INSTALL],
+[AC_ARG_ENABLE([ltdl-install],
+    [AS_HELP_STRING([--enable-ltdl-install], [install libltdl])])
+
+case ,${enable_ltdl_install},${enable_ltdl_convenience} in
+  *yes*) ;;
+  *) enable_ltdl_convenience=yes ;;
+esac
+
+m4_ifdef([AM_CONDITIONAL],
+[AM_CONDITIONAL(INSTALL_LTDL, test x"${enable_ltdl_install-no}" != xno)
+ AM_CONDITIONAL(CONVENIENCE_LTDL, test x"${enable_ltdl_convenience-no}" != xno)])
+])# _LT_ENABLE_INSTALL
+
+
+# LT_SYS_DLOPEN_DEPLIBS
+# ---------------------
+AC_DEFUN([LT_SYS_DLOPEN_DEPLIBS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_CACHE_CHECK([whether deplibs are loaded by dlopen],
+  [lt_cv_sys_dlopen_deplibs],
+  [# PORTME does your system automatically load deplibs for dlopen?
+  # or its logical equivalent (e.g. shl_load for HP-UX < 11)
+  # For now, we just catch OSes we know something about -- in the
+  # future, we'll try test this programmatically.
+  lt_cv_sys_dlopen_deplibs=unknown
+  case $host_os in
+  aix3*|aix4.1.*|aix4.2.*)
+    # Unknown whether this is true for these versions of AIX, but
+    # we want this `case' here to explicitly catch those versions.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  aix[[4-9]]*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  amigaos*)
+    case $host_cpu in
+    powerpc)
+      lt_cv_sys_dlopen_deplibs=no
+      ;;
+    esac
+    ;;
+  darwin*)
+    # Assuming the user has installed a libdl from somewhere, this is true
+    # If you are looking for one http://www.opendarwin.org/projects/dlcompat
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  freebsd* | dragonfly*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  gnu* | linux* | k*bsd*-gnu | kopensolaris*-gnu)
+    # GNU and its variants, using gnu ld.so (Glibc)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  hpux10*|hpux11*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  interix*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  irix[[12345]]*|irix6.[[01]]*)
+    # Catch all versions of IRIX before 6.2, and indicate that we don't
+    # know how it worked for any of those versions.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  irix*)
+    # The case above catches anything before 6.2, and it's known that
+    # at 6.2 and later dlopen does load deplibs.
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  netbsd*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  openbsd*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  osf[[1234]]*)
+    # dlopen did load deplibs (at least at 4.x), but until the 5.x series,
+    # it did *not* use an RPATH in a shared library to find objects the
+    # library depends on, so we explicitly say `no'.
+    lt_cv_sys_dlopen_deplibs=no
+    ;;
+  osf5.0|osf5.0a|osf5.1)
+    # dlopen *does* load deplibs and with the right loader patch applied
+    # it even uses RPATH in a shared library to search for shared objects
+    # that the library depends on, but there's no easy way to know if that
+    # patch is installed.  Since this is the case, all we can really
+    # say is unknown -- it depends on the patch being installed.  If
+    # it is, this changes to `yes'.  Without it, it would be `no'.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  osf*)
+    # the two cases above should catch all versions of osf <= 5.1.  Read
+    # the comments above for what we know about them.
+    # At > 5.1, deplibs are loaded *and* any RPATH in a shared library
+    # is used to find them so we can finally say `yes'.
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  qnx*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  solaris*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+    libltdl_cv_sys_dlopen_deplibs=yes
+    ;;
+  esac
+  ])
+if test "$lt_cv_sys_dlopen_deplibs" != yes; then
+ AC_DEFINE([LTDL_DLOPEN_DEPLIBS], [1],
+    [Define if the OS needs help to load dependent libraries for dlopen().])
+fi
+])# LT_SYS_DLOPEN_DEPLIBS
+
+# Old name:
+AU_ALIAS([AC_LTDL_SYS_DLOPEN_DEPLIBS], [LT_SYS_DLOPEN_DEPLIBS])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_SYS_DLOPEN_DEPLIBS], [])
+
+
+# LT_SYS_MODULE_EXT
+# -----------------
+AC_DEFUN([LT_SYS_MODULE_EXT],
+[m4_require([_LT_SYS_DYNAMIC_LINKER])dnl
+AC_CACHE_CHECK([which extension is used for runtime loadable modules],
+  [libltdl_cv_shlibext],
+[
+module=yes
+eval libltdl_cv_shlibext=$shrext_cmds
+module=no
+eval libltdl_cv_shrext=$shrext_cmds
+  ])
+if test -n "$libltdl_cv_shlibext"; then
+  m4_pattern_allow([LT_MODULE_EXT])dnl
+  AC_DEFINE_UNQUOTED([LT_MODULE_EXT], ["$libltdl_cv_shlibext"],
+    [Define to the extension used for runtime loadable modules, say, ".so".])
+fi
+if test "$libltdl_cv_shrext" != "$libltdl_cv_shlibext"; then
+  m4_pattern_allow([LT_SHARED_EXT])dnl
+  AC_DEFINE_UNQUOTED([LT_SHARED_EXT], ["$libltdl_cv_shrext"],
+    [Define to the shared library suffix, say, ".dylib".])
+fi
+])# LT_SYS_MODULE_EXT
+
+# Old name:
+AU_ALIAS([AC_LTDL_SHLIBEXT], [LT_SYS_MODULE_EXT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_SHLIBEXT], [])
+
+
+# LT_SYS_MODULE_PATH
+# ------------------
+AC_DEFUN([LT_SYS_MODULE_PATH],
+[m4_require([_LT_SYS_DYNAMIC_LINKER])dnl
+AC_CACHE_CHECK([which variable specifies run-time module search path],
+  [lt_cv_module_path_var], [lt_cv_module_path_var="$shlibpath_var"])
+if test -n "$lt_cv_module_path_var"; then
+  m4_pattern_allow([LT_MODULE_PATH_VAR])dnl
+  AC_DEFINE_UNQUOTED([LT_MODULE_PATH_VAR], ["$lt_cv_module_path_var"],
+    [Define to the name of the environment variable that determines the run-time module search path.])
+fi
+])# LT_SYS_MODULE_PATH
+
+# Old name:
+AU_ALIAS([AC_LTDL_SHLIBPATH], [LT_SYS_MODULE_PATH])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_SHLIBPATH], [])
+
+
+# LT_SYS_DLSEARCH_PATH
+# --------------------
+AC_DEFUN([LT_SYS_DLSEARCH_PATH],
+[m4_require([_LT_SYS_DYNAMIC_LINKER])dnl
+AC_CACHE_CHECK([for the default library search path],
+  [lt_cv_sys_dlsearch_path],
+  [lt_cv_sys_dlsearch_path="$sys_lib_dlsearch_path_spec"])
+if test -n "$lt_cv_sys_dlsearch_path"; then
+  sys_dlsearch_path=
+  for dir in $lt_cv_sys_dlsearch_path; do
+    if test -z "$sys_dlsearch_path"; then
+      sys_dlsearch_path="$dir"
+    else
+      sys_dlsearch_path="$sys_dlsearch_path$PATH_SEPARATOR$dir"
+    fi
+  done
+  m4_pattern_allow([LT_DLSEARCH_PATH])dnl
+  AC_DEFINE_UNQUOTED([LT_DLSEARCH_PATH], ["$sys_dlsearch_path"],
+    [Define to the system default library search path.])
+fi
+])# LT_SYS_DLSEARCH_PATH
+
+# Old name:
+AU_ALIAS([AC_LTDL_SYSSEARCHPATH], [LT_SYS_DLSEARCH_PATH])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_SYSSEARCHPATH], [])
+
+
+# _LT_CHECK_DLPREOPEN
+# -------------------
+m4_defun([_LT_CHECK_DLPREOPEN],
+[m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+AC_CACHE_CHECK([whether libtool supports -dlopen/-dlpreopen],
+  [libltdl_cv_preloaded_symbols],
+  [if test -n "$lt_cv_sys_global_symbol_pipe"; then
+    libltdl_cv_preloaded_symbols=yes
+  else
+    libltdl_cv_preloaded_symbols=no
+  fi
+  ])
+if test x"$libltdl_cv_preloaded_symbols" = xyes; then
+  AC_DEFINE([HAVE_PRELOADED_SYMBOLS], [1],
+    [Define if libtool can extract symbol lists from object files.])
+fi
+])# _LT_CHECK_DLPREOPEN
+
+
+# LT_LIB_DLLOAD
+# -------------
+AC_DEFUN([LT_LIB_DLLOAD],
+[m4_pattern_allow([^LT_DLLOADERS$])
+LT_DLLOADERS=
+AC_SUBST([LT_DLLOADERS])
+
+AC_LANG_PUSH([C])
+
+LIBADD_DLOPEN=
+AC_SEARCH_LIBS([dlopen], [dl],
+	[AC_DEFINE([HAVE_LIBDL], [1],
+		   [Define if you have the libdl library or equivalent.])
+	if test "$ac_cv_search_dlopen" != "none required" ; then
+	  LIBADD_DLOPEN="-ldl"
+	fi
+	libltdl_cv_lib_dl_dlopen="yes"
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"],
+    [AC_LINK_IFELSE([AC_LANG_PROGRAM([[#if HAVE_DLFCN_H
+#  include <dlfcn.h>
+#endif
+    ]], [[dlopen(0, 0);]])],
+	    [AC_DEFINE([HAVE_LIBDL], [1],
+		       [Define if you have the libdl library or equivalent.])
+	    libltdl_cv_func_dlopen="yes"
+	    LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"],
+	[AC_CHECK_LIB([svld], [dlopen],
+		[AC_DEFINE([HAVE_LIBDL], [1],
+			 [Define if you have the libdl library or equivalent.])
+	        LIBADD_DLOPEN="-lsvld" libltdl_cv_func_dlopen="yes"
+		LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"])])])
+if test x"$libltdl_cv_func_dlopen" = xyes || test x"$libltdl_cv_lib_dl_dlopen" = xyes
+then
+  lt_save_LIBS="$LIBS"
+  LIBS="$LIBS $LIBADD_DLOPEN"
+  AC_CHECK_FUNCS([dlerror])
+  LIBS="$lt_save_LIBS"
+fi
+AC_SUBST([LIBADD_DLOPEN])
+
+LIBADD_SHL_LOAD=
+AC_CHECK_FUNC([shl_load],
+	[AC_DEFINE([HAVE_SHL_LOAD], [1],
+		   [Define if you have the shl_load function.])
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}shl_load.la"],
+    [AC_CHECK_LIB([dld], [shl_load],
+	    [AC_DEFINE([HAVE_SHL_LOAD], [1],
+		       [Define if you have the shl_load function.])
+	    LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}shl_load.la"
+	    LIBADD_SHL_LOAD="-ldld"])])
+AC_SUBST([LIBADD_SHL_LOAD])
+
+case $host_os in
+darwin[[1567]].*)
+# We only want this for pre-Mac OS X 10.4.
+  AC_CHECK_FUNC([_dyld_func_lookup],
+	[AC_DEFINE([HAVE_DYLD], [1],
+		   [Define if you have the _dyld_func_lookup function.])
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dyld.la"])
+  ;;
+beos*)
+  LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}load_add_on.la"
+  ;;
+cygwin* | mingw* | os2* | pw32*)
+  AC_CHECK_DECLS([cygwin_conv_path], [], [], [[#include <sys/cygwin.h>]])
+  LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}loadlibrary.la"
+  ;;
+esac
+
+AC_CHECK_LIB([dld], [dld_link],
+	[AC_DEFINE([HAVE_DLD], [1],
+		   [Define if you have the GNU dld library.])
+		LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dld_link.la"])
+AC_SUBST([LIBADD_DLD_LINK])
+
+m4_pattern_allow([^LT_DLPREOPEN$])
+LT_DLPREOPEN=
+if test -n "$LT_DLLOADERS"
+then
+  for lt_loader in $LT_DLLOADERS; do
+    LT_DLPREOPEN="$LT_DLPREOPEN-dlpreopen $lt_loader "
+  done
+  AC_DEFINE([HAVE_LIBDLLOADER], [1],
+            [Define if libdlloader will be built on this platform])
+fi
+AC_SUBST([LT_DLPREOPEN])
+
+dnl This isn't used anymore, but set it for backwards compatibility
+LIBADD_DL="$LIBADD_DLOPEN $LIBADD_SHL_LOAD"
+AC_SUBST([LIBADD_DL])
+
+AC_LANG_POP
+])# LT_LIB_DLLOAD
+
+# Old name:
+AU_ALIAS([AC_LTDL_DLLIB], [LT_LIB_DLLOAD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_DLLIB], [])
+
+
+# LT_SYS_SYMBOL_USCORE
+# --------------------
+# does the compiler prefix global symbols with an underscore?
+AC_DEFUN([LT_SYS_SYMBOL_USCORE],
+[m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+AC_CACHE_CHECK([for _ prefix in compiled symbols],
+  [lt_cv_sys_symbol_underscore],
+  [lt_cv_sys_symbol_underscore=no
+  cat > conftest.$ac_ext <<_LT_EOF
+void nm_test_func(){}
+int main(){nm_test_func;return 0;}
+_LT_EOF
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    ac_nlist=conftest.nm
+    if AC_TRY_EVAL(NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $ac_nlist) && test -s "$ac_nlist"; then
+      # See whether the symbols have a leading underscore.
+      if grep '^. _nm_test_func' "$ac_nlist" >/dev/null; then
+        lt_cv_sys_symbol_underscore=yes
+      else
+        if grep '^. nm_test_func ' "$ac_nlist" >/dev/null; then
+	  :
+        else
+	  echo "configure: cannot find nm_test_func in $ac_nlist" >&AS_MESSAGE_LOG_FD
+        fi
+      fi
+    else
+      echo "configure: cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.c >&AS_MESSAGE_LOG_FD
+  fi
+  rm -rf conftest*
+  ])
+  sys_symbol_underscore=$lt_cv_sys_symbol_underscore
+  AC_SUBST([sys_symbol_underscore])
+])# LT_SYS_SYMBOL_USCORE
+
+# Old name:
+AU_ALIAS([AC_LTDL_SYMBOL_USCORE], [LT_SYS_SYMBOL_USCORE])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_SYMBOL_USCORE], [])
+
+
+# LT_FUNC_DLSYM_USCORE
+# --------------------
+AC_DEFUN([LT_FUNC_DLSYM_USCORE],
+[AC_REQUIRE([LT_SYS_SYMBOL_USCORE])dnl
+if test x"$lt_cv_sys_symbol_underscore" = xyes; then
+  if test x"$libltdl_cv_func_dlopen" = xyes ||
+     test x"$libltdl_cv_lib_dl_dlopen" = xyes ; then
+	AC_CACHE_CHECK([whether we have to add an underscore for dlsym],
+	  [libltdl_cv_need_uscore],
+	  [libltdl_cv_need_uscore=unknown
+          save_LIBS="$LIBS"
+          LIBS="$LIBS $LIBADD_DLOPEN"
+	  _LT_TRY_DLOPEN_SELF(
+	    [libltdl_cv_need_uscore=no], [libltdl_cv_need_uscore=yes],
+	    [],				 [libltdl_cv_need_uscore=cross])
+	  LIBS="$save_LIBS"
+	])
+  fi
+fi
+
+if test x"$libltdl_cv_need_uscore" = xyes; then
+  AC_DEFINE([NEED_USCORE], [1],
+    [Define if dlsym() requires a leading underscore in symbol names.])
+fi
+])# LT_FUNC_DLSYM_USCORE
+
+# Old name:
+AU_ALIAS([AC_LTDL_DLSYM_USCORE], [LT_FUNC_DLSYM_USCORE])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LTDL_DLSYM_USCORE], [])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltoptions.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltoptions.m4
new file mode 100644
index 0000000..5d9acd8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltoptions.m4
@@ -0,0 +1,384 @@
+# Helper functions for option handling.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
+#   Inc.
+#   Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 7 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it.  Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+        _LT_MANGLE_DEFUN([$1], [$2]),
+    [m4_warning([Unknown $1 option `$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+	    [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+		      [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME.  If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+    [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+  dnl
+  dnl Simply set some default values (i.e off) if boolean options were not
+  dnl specified:
+  _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+  ])
+  _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+  ])
+  dnl
+  dnl If no reference was made to various pairs of opposing options, then
+  dnl we run the default mode handler for the pair.  For example, if neither
+  dnl `shared' nor `disable-shared' was passed, we enable building of shared
+  dnl archives by default:
+  _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+  _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+  		   [_LT_ENABLE_FAST_INSTALL])
+  ])
+])# _LT_SET_OPTIONS
+
+
+## --------------------------------- ##
+## Macros to handle LT_INIT options. ##
+## --------------------------------- ##
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  AC_CHECK_TOOL(AS, as, false)
+  AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+  AC_CHECK_TOOL(OBJDUMP, objdump, false)
+  ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS],      [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the `shared' and
+# `disable-shared' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+    [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+	[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+    _LT_DECL([build_libtool_libs], [enable_shared], [0],
+	[Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the `static' and
+# `disable-static' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+    [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+	[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+    _LT_DECL([build_old_libs], [enable_static], [0],
+	[Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the `fast-install'
+# and `disable-fast-install' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+    [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+    [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+	 [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the `pic-only' and `no-pic'
+# LT_INIT options.
+# MODE is either `yes' or `no'.  If omitted, it defaults to `both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+    [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
+	[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+    [lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for lt_pkg in $withval; do
+	IFS="$lt_save_ifs"
+	if test "X$lt_pkg" = "X$lt_p"; then
+	  pic_mode=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [pic_mode=default])
+
+test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+## ----------------- ##
+## LTDL_INIT Options ##
+## ----------------- ##
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+		 [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+		 [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+		 [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+		 [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+		 [m4_define([_LTDL_TYPE], [convenience])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltsugar.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltsugar.m4
new file mode 100644
index 0000000..9000a05
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltsugar.m4
@@ -0,0 +1,123 @@
+# ltsugar.m4 -- libtool m4 base layer.                         -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59 which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+	   m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn.  Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+       [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+	     [m4_foreach([_Lt_suffix],
+		]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+	[_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+	  [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+		 [lt_append([$1], [$2], [$3])$4],
+		 [$5])],
+	  [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+	m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+    m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+	[$5],
+    [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+  [lt_join(m4_quote(m4_default([$4], [[, ]])),
+           lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+		      [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltversion.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltversion.m4
new file mode 100644
index 0000000..07a8602
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/ltversion.m4
@@ -0,0 +1,23 @@
+# ltversion.m4 -- version numbers			-*- Autoconf -*-
+#
+#   Copyright (C) 2004 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 3337 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4.2])
+m4_define([LT_PACKAGE_REVISION], [1.3337])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4.2'
+macro_revision='1.3337'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/lt~obsolete.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/lt~obsolete.m4
new file mode 100644
index 0000000..c573da9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/aclocal/lt~obsolete.m4
@@ -0,0 +1,98 @@
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions.    -*-Autoconf-*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick.  It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else.  This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. 
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION],	[AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP],		[AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT],		[AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX],	[AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN],		[AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR],		[AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL],	[AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN],		[AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER],	[AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK],		[AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE],	[AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF],	[AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O],	[AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR],		[AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR],		[AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP],	[AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC],		[AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU],		[AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG],	[AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD],	[AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS],	[AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP],	[AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP],		[AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED],		[AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME],		[AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE],	[AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE],	[AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL],		[AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP],		[AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN],		[AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER],	[AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG],		[AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL],	[AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX],		[AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77],		[AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ],		[AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG],	[AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG],	[AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG],	[AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG],	[AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG],	[AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG],		[AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C],	[AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS],	[AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP],		[AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS],	[AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77],		[AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC],		[AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX],		[AC_DEFUN([_LT_PROG_CXX])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/C4che.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/C4che.pm
new file mode 100644
index 0000000..e6d13c7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/C4che.pm
@@ -0,0 +1,242 @@
+# autoconf -- create `configure' using m4 macros
+# Copyright (C) 2003, 2006, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Autom4te::C4che;
+
+=head1 NAME
+
+Autom4te::C4che - a single m4 run request
+
+=head1 SYNOPSIS
+
+  use Autom4te::C4che;
+
+=head1 DESCRIPTION
+
+This Perl module handles the cache of M4 runs used by autom4te.
+
+=cut
+
+use Data::Dumper;
+use Autom4te::Request;
+use Carp;
+use strict;
+
+=over 4
+
+=item @request
+
+List of requests.
+
+We cannot declare it "my" as the loading, performed via "do", would
+refer to another scope, and @request would not be updated.  It used to
+work with "my" vars, and I do not know whether the current behavior
+(5.6) is wanted or not.
+
+=cut
+
+use vars qw(@request);
+
+=item C<$req = Autom4te::C4che-E<gt>retrieve (%attr)>
+
+Find a request with the same path and input.
+
+=cut
+
+sub retrieve($%)
+{
+  my ($self, %attr) = @_;
+
+  foreach (@request)
+    {
+      # Same path.
+      next
+	if join ("\n", @{$_->path}) ne join ("\n", @{$attr{path}});
+
+      # Same inputs.
+      next
+	if join ("\n", @{$_->input}) ne join ("\n", @{$attr{input}});
+
+      # Found it.
+      return $_;
+    }
+
+  return undef;
+}
+
+=item C<$req = Autom4te::C4che-E<gt>register (%attr)>
+
+Create and register a request for these path and input.
+
+=cut
+
+# $REQUEST-OBJ
+# register ($SELF, %ATTR)
+# -----------------------
+# NEW should not be called directly.
+# Private.
+sub register ($%)
+{
+  my ($self, %attr) = @_;
+
+  # path and input are the only ID for a request object.
+  my $obj = new Autom4te::Request ('path'  => $attr{path},
+				   'input' => $attr{input});
+  push @request, $obj;
+
+  # Assign an id for cache file.
+  $obj->id ("$#request");
+
+  return $obj;
+}
+
+
+=item C<$req = Autom4te::C4che-E<gt>request (%request)>
+
+Get (retrieve or create) a request for the path C<$request{path}> and
+the input C<$request{input}>.
+
+=cut
+
+# $REQUEST-OBJ
+# request($SELF, %REQUEST)
+# ------------------------
+sub request ($%)
+{
+  my ($self, %request) = @_;
+
+  my $req =
+    Autom4te::C4che->retrieve (%request)
+    || Autom4te::C4che->register (%request);
+
+  # If there are new traces to produce, then we are not valid.
+  foreach (@{$request{'macro'}})
+    {
+      if (! exists ${$req->macro}{$_})
+	{
+	  ${$req->macro}{$_} = 1;
+	  $req->valid (0);
+	}
+    }
+
+  # It would be great to have $REQ check that it is up to date wrt
+  # its dependencies, but that requires getting traces (to fetch the
+  # included files), which is out of the scope of Request (currently?).
+
+  return $req;
+}
+
+
+=item C<$string = Autom4te::C4che-E<gt>marshall ()>
+
+Serialize all the current requests.
+
+=cut
+
+
+# marshall($SELF)
+# ---------------
+sub marshall ($)
+{
+  my ($caller) = @_;
+  my $res = '';
+
+  my $marshall = Data::Dumper->new ([\@request], [qw (*request)]);
+  $marshall->Indent(2)->Terse(0);
+  $res = $marshall->Dump . "\n";
+
+  return $res;
+}
+
+
+=item C<Autom4te::C4che-E<gt>save ($file)>
+
+Save the cache in the C<$file> file object.
+
+=cut
+
+# SAVE ($FILE)
+# ------------
+sub save ($$)
+{
+  my ($self, $file) = @_;
+
+  confess "cannot save a single request\n"
+    if ref ($self);
+
+  $file->seek (0, 0);
+  $file->truncate (0);
+  print $file
+    "# This file was generated.\n",
+    "# It contains the lists of macros which have been traced.\n",
+    "# It can be safely removed.\n",
+    "\n",
+    $self->marshall;
+}
+
+
+=item C<Autom4te::C4che-E<gt>load ($file)>
+
+Load the cache from the C<$file> file object.
+
+=cut
+
+# LOAD ($FILE)
+# ------------
+sub load ($$)
+{
+  my ($self, $file) = @_;
+  my $fname = $file->name;
+
+  confess "cannot load a single request\n"
+    if ref ($self);
+
+  my $contents = join "", $file->getlines;
+
+  eval $contents;
+
+  confess "cannot eval $fname: $@\n" if $@;
+}
+
+
+=head1 SEE ALSO
+
+L<Autom4te::Request>
+
+=head1 HISTORY
+
+Written by Akim Demaille E<lt>F<akim@freefriends.org>E<gt>.
+
+=cut
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/ChannelDefs.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/ChannelDefs.pm
new file mode 100644
index 0000000..c78d91a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/ChannelDefs.pm
@@ -0,0 +1,390 @@
+# Copyright (C) 2002, 2003, 2006, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Autom4te::ChannelDefs;
+
+use Autom4te::Channels;
+
+=head1 NAME
+
+Autom4te::ChannelDefs - channel definitions for Automake and helper functions
+
+=head1 SYNOPSIS
+
+  use Autom4te::ChannelDefs;
+
+  print Autom4te::ChannelDefs::usage (), "\n";
+  prog_error ($MESSAGE, [%OPTIONS]);
+  error ($WHERE, $MESSAGE, [%OPTIONS]);
+  error ($MESSAGE);
+  fatal ($WHERE, $MESSAGE, [%OPTIONS]);
+  fatal ($MESSAGE);
+  verb ($MESSAGE, [%OPTIONS]);
+  switch_warning ($CATEGORY);
+  parse_WARNINGS ();
+  parse_warnings ($OPTION, @ARGUMENT);
+  Autom4te::ChannelDefs::set_strictness ($STRICTNESS_NAME);
+
+=head1 DESCRIPTION
+
+This package defines channels that can be used in Automake to
+output diagnostics and other messages (via C<msg()>).  It also defines
+some helper function to enable or disable these channels, and some
+shorthand function to output on specific channels.
+
+=cut
+
+use 5.005;
+use strict;
+use Exporter;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&prog_error &error &fatal &verb
+	      &switch_warning &parse_WARNINGS &parse_warnings);
+
+=head2 CHANNELS
+
+The following channels can be used as the first argument of
+C<Autom4te::Channel::msg>.  For some of them we list a shorthand
+function that makes the code more readable.
+
+=over 4
+
+=item C<fatal>
+
+Fatal errors.  Use C<&fatal> to send messages over this channel.
+
+=item C<error>
+
+Common errors.  Use C<&error> to send messages over this channel.
+
+=item C<error-gnu>
+
+Errors related to GNU Standards.
+
+=item C<error-gnu/warn>
+
+Errors related to GNU Standards that should be warnings in "foreign" mode.
+
+=item C<error-gnits>
+
+Errors related to GNITS Standards (silent by default).
+
+=item C<automake>
+
+Internal errors.  Use C<&prog_error> to send messages over this channel.
+
+=item C<cross>
+
+Constructs compromising the cross-compilation of the package.
+
+=item C<gnu>
+
+Warnings related to GNU Coding Standards.
+
+=item C<obsolete>
+
+Warnings about obsolete features (silent by default).
+
+=item C<override>
+
+Warnings about user redefinitions of Automake rules or
+variables (silent by default).
+
+=item C<portability>
+
+Warnings about non-portable constructs.
+
+=item C<syntax>
+
+Warnings about weird syntax, unused variables, typos ...
+
+=item C<unsupported>
+
+Warnings about unsupported (or mis-supported) features.
+
+=item C<verb>
+
+Messages output in C<--verbose> mode.  Use C<&verb> to send such messages.
+
+=item C<note>
+
+Informative messages.
+
+=back
+
+=cut
+
+# Initialize our list of error/warning channels.
+# Do not forget to update &usage and the manual
+# if you add or change a warning channel.
+
+register_channel 'fatal', type => 'fatal', ordered => 0;
+register_channel 'error', type => 'error';
+register_channel 'error-gnu', type => 'error';
+register_channel 'error-gnu/warn', type => 'error';
+register_channel 'error-gnits', type => 'error', silent => 1;
+register_channel 'automake', type => 'fatal', backtrace => 1,
+  header => ("####################\n" .
+	     "## Internal Error ##\n" .
+	     "####################\n"),
+  footer => "\nPlease contact <bug-automake\@gnu.org>.",
+  ordered => 0;
+
+register_channel 'cross', type => 'warning', silent => 1;
+register_channel 'gnu', type => 'warning';
+register_channel 'obsolete', type => 'warning', silent => 1;
+register_channel 'override', type => 'warning', silent => 1;
+register_channel 'portability', type => 'warning', silent => 1;
+register_channel 'syntax', type => 'warning';
+register_channel 'unsupported', type => 'warning';
+
+register_channel 'verb', type => 'debug', silent => 1, ordered => 0;
+register_channel 'note', type => 'debug', silent => 0;
+
+=head2 FUNCTIONS
+
+=over 4
+
+=item C<usage ()>
+
+Return the warning category descriptions.
+
+=cut
+
+sub usage ()
+{
+  return "Warning categories include:
+  `cross'         cross compilation issues
+  `gnu'           GNU coding standards (default in gnu and gnits modes)
+  `obsolete'      obsolete features or constructions
+  `override'      user redefinitions of Automake rules or variables
+  `portability'   portability issues (default in gnu and gnits modes)
+  `syntax'        dubious syntactic constructs (default)
+  `unsupported'   unsupported or incomplete features (default)
+  `all'           all the warnings
+  `no-CATEGORY'   turn off warnings in CATEGORY
+  `none'          turn off all the warnings
+  `error'         treat warnings as errors";
+}
+
+=item C<prog_error ($MESSAGE, [%OPTIONS])>
+
+Signal a programming error (on channel C<automake>),
+display C<$MESSAGE>, and exit 1.
+
+=cut
+
+sub prog_error ($;%)
+{
+  my ($msg, %opts) = @_;
+  msg 'automake', '', $msg, %opts;
+}
+
+=item C<error ($WHERE, $MESSAGE, [%OPTIONS])>
+
+=item C<error ($MESSAGE)>
+
+Uncategorized errors.
+
+=cut
+
+sub error ($;$%)
+{
+  my ($where, $msg, %opts) = @_;
+  msg ('error', $where, $msg, %opts);
+}
+
+=item C<fatal ($WHERE, $MESSAGE, [%OPTIONS])>
+
+=item C<fatal ($MESSAGE)>
+
+Fatal errors.
+
+=cut
+
+sub fatal ($;$%)
+{
+  my ($where, $msg, %opts) = @_;
+  msg ('fatal', $where, $msg, %opts);
+}
+
+=item C<verb ($MESSAGE, [%OPTIONS])>
+
+C<--verbose> messages.
+
+=cut
+
+sub verb ($;%)
+{
+  my ($msg, %opts) = @_;
+  msg 'verb', '', $msg, %opts;
+}
+
+=item C<switch_warning ($CATEGORY)>
+
+If C<$CATEGORY> is C<mumble>, turn on channel C<mumble>.
+If it is C<no-mumble>, turn C<mumble> off.
+Else handle C<all> and C<none> for completeness.
+
+=cut
+
+sub switch_warning ($)
+{
+  my ($cat) = @_;
+  my $has_no = 0;
+
+  if ($cat =~ /^no-(.*)$/)
+    {
+      $cat = $1;
+      $has_no = 1;
+    }
+
+  if ($cat eq 'all')
+    {
+      setup_channel_type 'warning', silent => $has_no;
+    }
+  elsif ($cat eq 'none')
+    {
+      setup_channel_type 'warning', silent => ! $has_no;
+    }
+  elsif ($cat eq 'error')
+    {
+      $warnings_are_errors = ! $has_no;
+      # Set exit code if Perl warns about something
+      # (like uninitialized variables).
+      $SIG{"__WARN__"} =
+	$has_no ? 'DEFAULT' : sub { print STDERR @_; $exit_code = 1; };
+    }
+  elsif (channel_type ($cat) eq 'warning')
+    {
+      setup_channel $cat, silent => $has_no;
+    }
+  else
+    {
+      return 1;
+    }
+  return 0;
+}
+
+=item C<parse_WARNINGS ()>
+
+Parse the WARNINGS environment variable.
+
+=cut
+
+sub parse_WARNINGS ()
+{
+  if (exists $ENV{'WARNINGS'})
+    {
+      # Ignore unknown categories.  This is required because WARNINGS
+      # should be honored by many tools.
+      switch_warning $_ foreach (split (',', $ENV{'WARNINGS'}));
+    }
+}
+
+=item C<parse_warnings ($OPTION, @ARGUMENT)>
+
+Parse the argument of C<--warning=CATEGORY> or C<-WCATEGORY>.
+
+C<$OPTIONS> is C<"--warning"> or C<"-W">, C<@ARGUMENT> is a list of
+C<CATEGORY>.
+
+This can be used as an argument to C<Getopt>.
+
+=cut
+
+sub parse_warnings ($@)
+{
+  my ($opt, @categories) = @_;
+
+  foreach my $cat (map { split ',' } @categories)
+    {
+      msg 'unsupported', "unknown warning category `$cat'"
+	if switch_warning $cat;
+    }
+}
+
+=item C<set_strictness ($STRICTNESS_NAME)>
+
+Configure channels for strictness C<$STRICTNESS_NAME>.
+
+=cut
+
+sub set_strictness ($)
+{
+  my ($name) = @_;
+
+  if ($name eq 'gnu')
+    {
+      setup_channel 'error-gnu', silent => 0;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'error';
+      setup_channel 'error-gnits', silent => 1;
+      setup_channel 'portability', silent => 0;
+      setup_channel 'gnu', silent => 0;
+    }
+  elsif ($name eq 'gnits')
+    {
+      setup_channel 'error-gnu', silent => 0;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'error';
+      setup_channel 'error-gnits', silent => 0;
+      setup_channel 'portability', silent => 0;
+      setup_channel 'gnu', silent => 0;
+    }
+  elsif ($name eq 'foreign')
+    {
+      setup_channel 'error-gnu', silent => 1;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'warning';
+      setup_channel 'error-gnits', silent => 1;
+      setup_channel 'portability', silent => 1;
+      setup_channel 'gnu', silent => 1;
+    }
+  else
+    {
+      prog_error "level `$name' not recognized\n";
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Autom4te::Channels>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt>.
+
+=cut
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Channels.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Channels.pm
new file mode 100644
index 0000000..0af4d00
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Channels.pm
@@ -0,0 +1,837 @@
+# Copyright (C) 2002, 2004, 2006, 2008, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's CVS repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Autom4te::Channels;
+
+=head1 NAME
+
+Autom4te::Channels - support functions for error and warning management
+
+=head1 SYNOPSIS
+
+  use Autom4te::Channels;
+
+  # Register a channel to output warnings about unused variables.
+  register_channel 'unused', type => 'warning';
+
+  # Register a channel for system errors.
+  register_channel 'system', type => 'error', exit_code => 4;
+
+  # Output a message on channel 'unused'.
+  msg 'unused', "$file:$line", "unused variable `$var'";
+
+  # Make the 'unused' channel silent.
+  setup_channel 'unused', silent => 1;
+
+  # Turn on all channels of type 'warning'.
+  setup_channel_type 'warning', silent => 0;
+
+  # Redirect all channels to push messages on a Thread::Queue using
+  # the specified serialization key.
+  setup_channel_queue $queue, $key;
+
+  # Output a message pending in a Thread::Queue.
+  pop_channel_queue $queue;
+
+  # Treat all warnings as errors.
+  $warnings_are_errors = 1;
+
+  # Exit with the greatest exit code encountered so far.
+  exit $exit_code;
+
+=head1 DESCRIPTION
+
+This perl module provides support functions for handling diagnostic
+channels in programs.  Channels can be registered to convey fatal,
+error, warning, or debug messages.  Each channel has various options
+(e.g. is the channel silent, should duplicate messages be removed,
+etc.) that can also be overridden on a per-message basis.
+
+=cut
+
+use 5.005;
+use strict;
+use Exporter;
+use Carp;
+use File::Basename;
+
+use vars qw (@ISA @EXPORT %channels $me);
+
+@ISA = qw (Exporter);
+@EXPORT = qw ($exit_code $warnings_are_errors
+	      &reset_local_duplicates &reset_global_duplicates
+	      &register_channel &msg &exists_channel &channel_type
+	      &setup_channel &setup_channel_type
+	      &dup_channel_setup &drop_channel_setup
+	      &buffer_messages &flush_messages
+	      &setup_channel_queue &pop_channel_queue
+	      US_GLOBAL US_LOCAL
+	      UP_NONE UP_TEXT UP_LOC_TEXT);
+
+$me = basename $0;
+
+=head2 Global Variables
+
+=over 4
+
+=item C<$exit_code>
+
+The greatest exit code seen so far. C<$exit_code> is updated from
+the C<exit_code> options of C<fatal> and C<error> channels.
+
+=cut
+
+use vars qw ($exit_code);
+$exit_code = 0;
+
+=item C<$warnings_are_errors>
+
+Set this variable to 1 if warning messages should be treated as
+errors (i.e. if they should update C<$exit_code>).
+
+=cut
+
+use vars qw ($warnings_are_errors);
+$warnings_are_errors = 0;
+
+=back
+
+=head2 Constants
+
+=over 4
+
+=item C<UP_NONE>, C<UP_TEXT>, C<UP_LOC_TEXT>
+
+Possible values for the C<uniq_part> options.  This selects the part
+of the message that should be considered when filtering out duplicates.
+If C<UP_LOC_TEXT> is used, the location and the explanation message
+are used for filtering.  If C<UP_TEXT> is used, only the explanation
+message is used (so the same message will be filtered out if it appears
+at different locations).  C<UP_NONE> means that duplicate messages
+should be output.
+
+=cut
+
+use constant UP_NONE => 0;
+use constant UP_TEXT => 1;
+use constant UP_LOC_TEXT => 2;
+
+=item C<US_LOCAL>, C<US_GLOBAL>
+
+Possible values for the C<uniq_scope> options.
+Use C<US_GLOBAL> for error messages that should be printed only
+once during the execution of the program, C<US_LOCAL> for message that
+should be printed only once per file.  (Actually, C<Channels> does not
+do this now when files are changed, it relies on you calling
+C<reset_local_duplicates> when this happens.)
+
+=cut
+
+# possible values for uniq_scope
+use constant US_LOCAL => 0;
+use constant US_GLOBAL => 1;
+
+=back
+
+=head2 Options
+
+Channels accept the options described below.  These options can be
+passed as a hash to the C<register_channel>, C<setup_channel>, and C<msg>
+functions.  The possible keys, with their default value are:
+
+=over
+
+=item C<type =E<gt> 'warning'>
+
+The type of the channel.  One of C<'debug'>, C<'warning'>, C<'error'>, or
+C<'fatal'>.  Fatal messages abort the program when they are output.
+Error messages update the exit status.  Debug and warning messages are
+harmless, except that warnings are treated as errors if
+C<$warnings_are_errors> is set.
+
+=item C<exit_code =E<gt> 1>
+
+The value to update C<$exit_code> with when a fatal or error message
+is emitted.  C<$exit_code> is also updated for warnings output
+when C<$warnings_are_errors> is set.
+
+=item C<file =E<gt> \*STDERR>
+
+The file where the error should be output.
+
+=item C<silent =E<gt> 0>
+
+Whether the channel should be silent.  Use this do disable a
+category of warning, for instance.
+
+=item C<ordered =E<gt> 1>
+
+Whether, with multi-threaded execution, the message should be queued
+for ordered output.
+
+=item C<uniq_part =E<gt> UP_LOC_TEXT>
+
+The part of the message subject to duplicate filtering.  See the
+documentation for the C<UP_NONE>, C<UP_TEXT>, and C<UP_LOC_TEXT>
+constants above.
+
+C<uniq_part> can also be set to an arbitrary string that will be used
+instead of the message when considering duplicates.
+
+=item C<uniq_scope =E<gt> US_LOCAL>
+
+The scope of duplicate filtering.  See the documentation for the
+C<US_LOCAL>, and C<US_GLOBAL> constants above.
+
+=item C<header =E<gt> ''>
+
+A string to prepend to each message emitted through this channel.
+With partial messages, only the first part will have C<header>
+prepended.
+
+=item C<footer =E<gt> ''>
+
+A string to append to each message emitted through this channel.
+With partial messages, only the final part will have C<footer>
+appended.
+
+=item C<backtrace =E<gt> 0>
+
+Die with a stack backtrace after displaying the message.
+
+=item C<partial =E<gt> 0>
+
+When set, indicates a partial message that should
+be output along with the next message with C<partial> unset.
+Several partial messages can be stacked this way.
+
+Duplicate filtering will apply to the I<global> message resulting from
+all I<partial> messages, using the options from the last (non-partial)
+message.  Linking associated messages is the main reason to use this
+option.
+
+For instance the following messages
+
+  msg 'channel', 'foo:2', 'redefinition of A ...';
+  msg 'channel', 'foo:1', '... A previously defined here';
+  msg 'channel', 'foo:3', 'redefinition of A ...';
+  msg 'channel', 'foo:1', '... A previously defined here';
+
+will result in
+
+ foo:2: redefinition of A ...
+ foo:1: ... A previously defined here
+ foo:3: redefinition of A ...
+
+where the duplicate "I<... A previously defined here>" has been
+filtered out.
+
+Linking these messages using C<partial> as follows will prevent the
+fourth message to disappear.
+
+  msg 'channel', 'foo:2', 'redefinition of A ...', partial => 1;
+  msg 'channel', 'foo:1', '... A previously defined here';
+  msg 'channel', 'foo:3', 'redefinition of A ...', partial => 1;
+  msg 'channel', 'foo:1', '... A previously defined here';
+
+Note that because the stack of C<partial> messages is printed with the
+first non-C<partial> message, most options of C<partial> messages will
+be ignored.
+
+=back
+
+=cut
+
+use vars qw (%_default_options %_global_duplicate_messages
+	     %_local_duplicate_messages);
+
+# Default options for a channel.
+%_default_options =
+  (
+   type => 'warning',
+   exit_code => 1,
+   file => \*STDERR,
+   silent => 0,
+   ordered => 1,
+   queue => 0,
+   queue_key => undef,
+   uniq_scope => US_LOCAL,
+   uniq_part => UP_LOC_TEXT,
+   header => '',
+   footer => '',
+   backtrace => 0,
+   partial => 0,
+   );
+
+# Filled with output messages as keys, to detect duplicates.
+# The value associated with each key is the number of occurrences
+# filtered out.
+%_local_duplicate_messages = ();
+%_global_duplicate_messages = ();
+
+sub _reset_duplicates (\%)
+{
+  my ($ref) = @_;
+  my $dup = 0;
+  foreach my $k (keys %$ref)
+    {
+      $dup += $ref->{$k};
+    }
+  %$ref = ();
+  return $dup;
+}
+
+
+=head2 Functions
+
+=over 4
+
+=item C<reset_local_duplicates ()>
+
+Reset local duplicate messages (see C<US_LOCAL>), and
+return the number of messages that have been filtered out.
+
+=cut
+
+sub reset_local_duplicates ()
+{
+  return _reset_duplicates %_local_duplicate_messages;
+}
+
+=item C<reset_global_duplicates ()>
+
+Reset local duplicate messages (see C<US_GLOBAL>), and
+return the number of messages that have been filtered out.
+
+=cut
+
+sub reset_global_duplicates ()
+{
+  return _reset_duplicates %_global_duplicate_messages;
+}
+
+sub _merge_options (\%%)
+{
+  my ($hash, %options) = @_;
+  local $_;
+
+  foreach (keys %options)
+    {
+      if (exists $hash->{$_})
+	{
+	  $hash->{$_} = $options{$_}
+	}
+      else
+	{
+	  confess "unknown option `$_'";
+	}
+    }
+  if ($hash->{'ordered'})
+    {
+      confess "fatal messages cannot be ordered"
+	if $hash->{'type'} eq 'fatal';
+      confess "backtrace cannot be output on ordered messages"
+	if $hash->{'backtrace'};
+    }
+}
+
+=item C<register_channel ($name, [%options])>
+
+Declare channel C<$name>, and override the default options
+with those listed in C<%options>.
+
+=cut
+
+sub register_channel ($;%)
+{
+  my ($name, %options) = @_;
+  my %channel_opts = %_default_options;
+  _merge_options %channel_opts, %options;
+  $channels{$name} = \%channel_opts;
+}
+
+=item C<exists_channel ($name)>
+
+Returns true iff channel C<$name> has been registered.
+
+=cut
+
+sub exists_channel ($)
+{
+  my ($name) = @_;
+  return exists $channels{$name};
+}
+
+=item C<channel_type ($name)>
+
+Returns the type of channel C<$name> if it has been registered.
+Returns the empty string otherwise.
+
+=cut
+
+sub channel_type ($)
+{
+  my ($name) = @_;
+  return $channels{$name}{'type'} if exists_channel $name;
+  return '';
+}
+
+# _format_sub_message ($LEADER, $MESSAGE)
+# ---------------------------------------
+# Split $MESSAGE at new lines and add $LEADER to each line.
+sub _format_sub_message ($$)
+{
+  my ($leader, $message) = @_;
+  return $leader . join ("\n" . $leader, split ("\n", $message)) . "\n";
+}
+
+# Store partial messages here. (See the 'partial' option.)
+use vars qw ($partial);
+$partial = '';
+
+# _format_message ($LOCATION, $MESSAGE, %OPTIONS)
+# -----------------------------------------------
+# Format the message.  Return a string ready to print.
+sub _format_message ($$%)
+{
+  my ($location, $message, %opts) = @_;
+  my $msg = ($partial eq '' ? $opts{'header'} : '') . $message
+	    . ($opts{'partial'} ? '' : $opts{'footer'});
+  if (ref $location)
+    {
+      # If $LOCATION is a reference, assume it's an instance of the
+      # Autom4te::Location class and display contexts.
+      my $loc = $location->get || $me;
+      $msg = _format_sub_message ("$loc: ", $msg);
+      for my $pair ($location->get_contexts)
+	{
+	  $msg .= _format_sub_message ($pair->[0] . ":   ", $pair->[1]);
+	}
+    }
+  else
+    {
+      $location ||= $me;
+      $msg = _format_sub_message ("$location: ", $msg);
+    }
+  return $msg;
+}
+
+# _enqueue ($QUEUE, $KEY, $UNIQ_SCOPE, $TO_FILTER, $MSG, $FILE)
+# -------------------------------------------------------------
+# Push message on a queue, to be processed by another thread.
+sub _enqueue ($$$$$$)
+{
+  my ($queue, $key, $uniq_scope, $to_filter, $msg, $file) = @_;
+  $queue->enqueue ($key, $msg, $to_filter, $uniq_scope);
+  confess "message queuing works only for STDERR"
+    if $file ne \*STDERR;
+}
+
+# _dequeue ($QUEUE)
+# -----------------
+# Pop a message from a queue, and print, similarly to how
+# _print_message would do it.  Return 0 if the queue is
+# empty.  Note that the key has already been dequeued.
+sub _dequeue ($)
+{
+  my ($queue) = @_;
+  my $msg = $queue->dequeue || return 0;
+  my $to_filter = $queue->dequeue;
+  my $uniq_scope = $queue->dequeue;
+  my $file = \*STDERR;
+
+  if ($to_filter ne '')
+    {
+      # Do we want local or global uniqueness?
+      my $dups;
+      if ($uniq_scope == US_LOCAL)
+	{
+	  $dups = \%_local_duplicate_messages;
+	}
+      elsif ($uniq_scope == US_GLOBAL)
+	{
+	  $dups = \%_global_duplicate_messages;
+	}
+      else
+	{
+	  confess "unknown value for uniq_scope: " . $uniq_scope;
+	}
+
+      # Update the hash of messages.
+      if (exists $dups->{$to_filter})
+	{
+	  ++$dups->{$to_filter};
+	  return 1;
+	}
+      else
+	{
+	  $dups->{$to_filter} = 0;
+	}
+    }
+  print $file $msg;
+  return 1;
+}
+
+
+# _print_message ($LOCATION, $MESSAGE, %OPTIONS)
+# ----------------------------------------------
+# Format the message, check duplicates, and print it.
+sub _print_message ($$%)
+{
+  my ($location, $message, %opts) = @_;
+
+  return 0 if ($opts{'silent'});
+
+  my $msg = _format_message ($location, $message, %opts);
+  if ($opts{'partial'})
+    {
+      # Incomplete message.  Store, don't print.
+      $partial .= $msg;
+      return;
+    }
+  else
+    {
+      # Prefix with any partial message send so far.
+      $msg = $partial . $msg;
+      $partial = '';
+    }
+
+  msg ('note', '', 'warnings are treated as errors', uniq_scope => US_GLOBAL)
+    if ($opts{'type'} eq 'warning' && $warnings_are_errors);
+
+  # Check for duplicate message if requested.
+  my $to_filter;
+  if ($opts{'uniq_part'} ne UP_NONE)
+    {
+      # Which part of the error should we match?
+      if ($opts{'uniq_part'} eq UP_TEXT)
+	{
+	  $to_filter = $message;
+	}
+      elsif ($opts{'uniq_part'} eq UP_LOC_TEXT)
+	{
+	  $to_filter = $msg;
+	}
+      else
+	{
+	  $to_filter = $opts{'uniq_part'};
+	}
+
+      # Do we want local or global uniqueness?
+      my $dups;
+      if ($opts{'uniq_scope'} == US_LOCAL)
+	{
+	  $dups = \%_local_duplicate_messages;
+	}
+      elsif ($opts{'uniq_scope'} == US_GLOBAL)
+	{
+	  $dups = \%_global_duplicate_messages;
+	}
+      else
+	{
+	  confess "unknown value for uniq_scope: " . $opts{'uniq_scope'};
+	}
+
+      # Update the hash of messages.
+      if (exists $dups->{$to_filter})
+	{
+	  ++$dups->{$to_filter};
+	  return 0;
+	}
+      else
+	{
+	  $dups->{$to_filter} = 0;
+	}
+    }
+  my $file = $opts{'file'};
+  if ($opts{'ordered'} && $opts{'queue'})
+    {
+      _enqueue ($opts{'queue'}, $opts{'queue_key'}, $opts{'uniq_scope'},
+		$to_filter, $msg, $file);
+    }
+  else
+    {
+      print $file $msg;
+    }
+  return 1;
+}
+
+=item C<msg ($channel, $location, $message, [%options])>
+
+Emit a message on C<$channel>, overriding some options of the channel with
+those specified in C<%options>.  Obviously C<$channel> must have been
+registered with C<register_channel>.
+
+C<$message> is the text of the message, and C<$location> is a location
+associated to the message.
+
+For instance to complain about some unused variable C<mumble>
+declared at line 10 in F<foo.c>, one could do:
+
+  msg 'unused', 'foo.c:10', "unused variable `mumble'";
+
+If channel C<unused> is not silent (and if this message is not a duplicate),
+the following would be output:
+
+  foo.c:10: unused variable `mumble'
+
+C<$location> can also be an instance of C<Autom4te::Location>.  In this
+case, the stack of contexts will be displayed in addition.
+
+If C<$message> contains newline characters, C<$location> is prepended
+to each line.  For instance,
+
+  msg 'error', 'somewhere', "1st line\n2nd line";
+
+becomes
+
+  somewhere: 1st line
+  somewhere: 2nd line
+
+If C<$location> is an empty string, it is replaced by the name of the
+program.  Actually, if you don't use C<%options>, you can even
+elide the empty C<$location>.  Thus
+
+  msg 'fatal', '', 'fatal error';
+  msg 'fatal', 'fatal error';
+
+both print
+
+  progname: fatal error
+
+=cut
+
+
+use vars qw (@backlog %buffering);
+
+# See buffer_messages() and flush_messages() below.
+%buffering = ();	# The map of channel types to buffer.
+@backlog = ();		# The buffer of messages.
+
+sub msg ($$;$%)
+{
+  my ($channel, $location, $message, %options) = @_;
+
+  if (! defined $message)
+    {
+      $message = $location;
+      $location = '';
+    }
+
+  confess "unknown channel $channel" unless exists $channels{$channel};
+
+  my %opts = %{$channels{$channel}};
+  _merge_options (%opts, %options);
+
+  if (exists $buffering{$opts{'type'}})
+    {
+      push @backlog, [$channel, $location->clone, $message, %options];
+      return;
+    }
+
+  # Print the message if needed.
+  if (_print_message ($location, $message, %opts))
+    {
+      # Adjust exit status.
+      if ($opts{'type'} eq 'error'
+	  || $opts{'type'} eq 'fatal'
+	  || ($opts{'type'} eq 'warning' && $warnings_are_errors))
+	{
+	  my $es = $opts{'exit_code'};
+	  $exit_code = $es if $es > $exit_code;
+	}
+
+      # Die on fatal messages.
+      confess if $opts{'backtrace'};
+      if ($opts{'type'} eq 'fatal')
+        {
+	  # flush messages explicitly here, needed in worker threads.
+	  STDERR->flush;
+	  exit $exit_code;
+	}
+    }
+}
+
+
+=item C<setup_channel ($channel, %options)>
+
+Override the options of C<$channel> with those specified by C<%options>.
+
+=cut
+
+sub setup_channel ($%)
+{
+  my ($name, %opts) = @_;
+  confess "unknown channel $name" unless exists $channels{$name};
+  _merge_options %{$channels{$name}}, %opts;
+}
+
+=item C<setup_channel_type ($type, %options)>
+
+Override the options of any channel of type C<$type>
+with those specified by C<%options>.
+
+=cut
+
+sub setup_channel_type ($%)
+{
+  my ($type, %opts) = @_;
+  foreach my $channel (keys %channels)
+    {
+      setup_channel $channel, %opts
+	if $channels{$channel}{'type'} eq $type;
+    }
+}
+
+=item C<dup_channel_setup ()>, C<drop_channel_setup ()>
+
+Sometimes it is necessary to make temporary modifications to channels.
+For instance one may want to disable a warning while processing a
+particular file, and then restore the initial setup.  These two
+functions make it easy: C<dup_channel_setup ()> saves a copy of the
+current configuration for later restoration by
+C<drop_channel_setup ()>.
+
+You can think of this as a stack of configurations whose first entry
+is the active one.  C<dup_channel_setup ()> duplicates the first
+entry, while C<drop_channel_setup ()> just deletes it.
+
+=cut
+
+use vars qw (@_saved_channels @_saved_werrors);
+@_saved_channels = ();
+@_saved_werrors = ();
+
+sub dup_channel_setup ()
+{
+  my %channels_copy;
+  foreach my $k1 (keys %channels)
+    {
+      $channels_copy{$k1} = {%{$channels{$k1}}};
+    }
+  push @_saved_channels, \%channels_copy;
+  push @_saved_werrors, $warnings_are_errors;
+}
+
+sub drop_channel_setup ()
+{
+  my $saved = pop @_saved_channels;
+  %channels = %$saved;
+  $warnings_are_errors = pop @_saved_werrors;
+}
+
+=item C<buffer_messages (@types)>, C<flush_messages ()>
+
+By default, when C<msg> is called, messages are processed immediately.
+
+Sometimes it is necessary to delay the output of messages.
+For instance you might want to make diagnostics before
+channels have been completely configured.
+
+After C<buffer_messages(@types)> has been called, messages sent with
+C<msg> to a channel whose type is listed in C<@types> will be stored in a
+list for later processing.
+
+This backlog of messages is processed when C<flush_messages> is
+called, with the current channel options (not the options in effect,
+at the time of C<msg>).  So for instance, if some channel was silenced
+in the meantime, messages to this channel will not be printed.
+
+C<flush_messages> cancels the effect of C<buffer_messages>.  Following
+calls to C<msg> are processed immediately as usual.
+
+=cut
+
+sub buffer_messages (@)
+{
+  foreach my $type (@_)
+    {
+      $buffering{$type} = 1;
+    }
+}
+
+sub flush_messages ()
+{
+  %buffering = ();
+  foreach my $args (@backlog)
+    {
+      &msg (@$args);
+    }
+  @backlog = ();
+}
+
+=item C<setup_channel_queue ($queue, $key)>
+
+Set the queue to fill for each channel that is ordered,
+and the key to use for serialization.
+
+=cut
+sub setup_channel_queue ($$)
+{
+  my ($queue, $key) = @_;
+  foreach my $channel (keys %channels)
+    {
+      setup_channel $channel, queue => $queue, queue_key => $key
+        if $channels{$channel}{'ordered'};
+    }
+}
+
+=item C<pop_channel_queue ($queue)>
+
+pop a message off the $queue; the key has already been popped.
+
+=cut
+sub pop_channel_queue ($)
+{
+  my ($queue) = @_;
+  return _dequeue ($queue);
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Autom4te::Location>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Configure_ac.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Configure_ac.pm
new file mode 100644
index 0000000..c8c63fd
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Configure_ac.pm
@@ -0,0 +1,127 @@
+# Copyright (C) 2003, 2005, 2006, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's CVS repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Autom4te::Configure_ac;
+
+use strict;
+use Exporter;
+use Autom4te::Channels;
+use Autom4te::ChannelDefs;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&find_configure_ac &require_configure_ac);
+
+=head1 NAME
+
+Autom4te::Configure_ac - Locate configure.ac or configure.in.
+
+=head1 SYNOPSIS
+
+  use Autom4te::Configure_ac;
+
+  # Try to locate configure.in or configure.ac in the current
+  # directory.  It may be absent.  Complain if both files exist.
+  my $file_name = find_configure_ac;
+
+  # Likewise, but bomb out if the file does not exist.
+  my $file_name = require_configure_ac;
+
+  # Likewise, but in $dir.
+  my $file_name = find_configure_ac ($dir);
+  my $file_name = require_configure_ac ($dir);
+
+=over 4
+
+=back
+
+=head2 Functions
+
+=over 4
+
+=item C<$configure_ac = find_configure_ac ([$directory])>
+
+Find a F<configure.ac> or F<configure.in> file in C<$directory>,
+defaulting to the current directory.  Complain if both files are present.
+Return the name of the file found, or the former if neither is present.
+
+=cut
+
+sub find_configure_ac (;@)
+{
+  my ($directory) = @_;
+  $directory ||= '.';
+  my $configure_ac =
+    File::Spec->canonpath (File::Spec->catfile ($directory, 'configure.ac'));
+  my $configure_in =
+    File::Spec->canonpath (File::Spec->catfile ($directory, 'configure.in'));
+
+  if (-f $configure_ac)
+    {
+      if (-f $configure_in)
+	{
+	  msg ('unsupported',
+	       "`$configure_ac' and `$configure_in' both present.\n"
+	       . "proceeding with `$configure_ac'");
+	}
+      return $configure_ac
+    }
+  elsif (-f $configure_in)
+    {
+      return $configure_in;
+    }
+  return $configure_ac;
+}
+
+
+=item C<$configure_ac = require_configure_ac ([$directory])>
+
+Like C<find_configure_ac>, but fail if neither is present.
+
+=cut
+
+sub require_configure_ac (;$)
+{
+  my $res = find_configure_ac (@_);
+  fatal "`configure.ac' or `configure.in' is required"
+    unless -f $res;
+  return $res
+}
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/FileUtils.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/FileUtils.pm
new file mode 100644
index 0000000..8d2b3e5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/FileUtils.pm
@@ -0,0 +1,452 @@
+# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's CVS repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Autom4te::FileUtils;
+
+=head1 NAME
+
+Autom4te::FileUtils - handling files
+
+=head1 SYNOPSIS
+
+  use Autom4te::FileUtils
+
+=head1 DESCRIPTION
+
+This perl module provides various general purpose file handling functions.
+
+=cut
+
+use strict;
+use Exporter;
+use File::stat;
+use IO::File;
+use Autom4te::Channels;
+use Autom4te::ChannelDefs;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&open_quote &contents
+	      &find_file &mtime
+	      &update_file &up_to_date_p
+	      &xsystem &xsystem_hint &xqx
+	      &dir_has_case_matching_file &reset_dir_cache
+	      &set_dir_cache_file);
+
+
+=item C<open_quote ($file_name)>
+
+Quote C<$file_name> for open.
+
+=cut
+
+# $FILE_NAME
+# open_quote ($FILE_NAME)
+# -----------------------
+# If the string $S is a well-behaved file name, simply return it.
+# If it starts with white space, prepend `./', if it ends with
+# white space, add `\0'.  Return the new string.
+sub open_quote($)
+{
+  my ($s) = @_;
+  if ($s =~ m/^\s/)
+    {
+      $s = "./$s";
+    }
+  if ($s =~ m/\s$/)
+    {
+      $s = "$s\0";
+    }
+  return $s;
+}
+
+=item C<find_file ($file_name, @include)>
+
+Return the first path for a C<$file_name> in the C<include>s.
+
+We match exactly the behavior of GNU M4: first look in the current
+directory (which includes the case of absolute file names), and then,
+if the file name is not absolute, look in C<@include>.
+
+If the file is flagged as optional (ends with C<?>), then return undef
+if absent, otherwise exit with error.
+
+=cut
+
+# $FILE_NAME
+# find_file ($FILE_NAME, @INCLUDE)
+# --------------------------------
+sub find_file ($@)
+{
+  use File::Spec;
+
+  my ($file_name, @include) = @_;
+  my $optional = 0;
+
+  $optional = 1
+    if $file_name =~ s/\?$//;
+
+  return File::Spec->canonpath ($file_name)
+    if -e $file_name;
+
+  if (!File::Spec->file_name_is_absolute ($file_name))
+    {
+      foreach my $path (@include)
+	{
+	  return File::Spec->canonpath (File::Spec->catfile ($path, $file_name))
+	    if -e File::Spec->catfile ($path, $file_name)
+	}
+    }
+
+  fatal "$file_name: no such file or directory"
+    unless $optional;
+  return undef;
+}
+
+=item C<mtime ($file)>
+
+Return the mtime of C<$file>.  Missing files, or C<-> standing for
+C<STDIN> or C<STDOUT> are ``obsolete'', i.e., as old as possible.
+
+=cut
+
+# $MTIME
+# MTIME ($FILE)
+# -------------
+sub mtime ($)
+{
+  my ($file) = @_;
+
+  return 0
+    if $file eq '-' || ! -f $file;
+
+  my $stat = stat ($file)
+    or fatal "cannot stat $file: $!";
+
+  return $stat->mtime;
+}
+
+
+=item C<update_file ($from, $to, [$force])>
+
+Rename C<$from> as C<$to>, preserving C<$to> timestamp if it has not
+changed, unless C<$force> is true (defaults to false).  Recognize
+C<$to> = C<-> standing for C<STDIN>.  C<$from> is always
+removed/renamed.
+
+=cut
+
+# &update_file ($FROM, $TO; $FORCE)
+# ---------------------------------
+sub update_file ($$;$)
+{
+  my ($from, $to, $force) = @_;
+  $force = 0
+    unless defined $force;
+  my $SIMPLE_BACKUP_SUFFIX = $ENV{'SIMPLE_BACKUP_SUFFIX'} || '~';
+  use File::Compare;
+  use File::Copy;
+
+  if ($to eq '-')
+    {
+      my $in = new IO::File ("< " . open_quote ($from));
+      my $out = new IO::File (">-");
+      while ($_ = $in->getline)
+	{
+	  print $out $_;
+	}
+      $in->close;
+      unlink ($from) || fatal "cannot remove $from: $!";
+      return;
+    }
+
+  if (!$force && -f "$to" && compare ("$from", "$to") == 0)
+    {
+      # File didn't change, so don't update its mod time.
+      msg 'note', "`$to' is unchanged";
+      unlink ($from)
+        or fatal "cannot remove $from: $!";
+      return
+    }
+
+  if (-f "$to")
+    {
+      # Back up and install the new one.
+      move ("$to",  "$to$SIMPLE_BACKUP_SUFFIX")
+	or fatal "cannot backup $to: $!";
+      move ("$from", "$to")
+	or fatal "cannot rename $from as $to: $!";
+      msg 'note', "`$to' is updated";
+    }
+  else
+    {
+      move ("$from", "$to")
+	or fatal "cannot rename $from as $to: $!";
+      msg 'note', "`$to' is created";
+    }
+}
+
+
+=item C<up_to_date_p ($file, @dep)>
+
+Is C<$file> more recent than C<@dep>?
+
+=cut
+
+# $BOOLEAN
+# &up_to_date_p ($FILE, @DEP)
+# ---------------------------
+sub up_to_date_p ($@)
+{
+  my ($file, @dep) = @_;
+  my $mtime = mtime ($file);
+
+  foreach my $dep (@dep)
+    {
+      if ($mtime < mtime ($dep))
+	{
+	  verb "up_to_date ($file): outdated: $dep";
+	  return 0;
+	}
+    }
+
+  verb "up_to_date ($file): up to date";
+  return 1;
+}
+
+
+=item C<handle_exec_errors ($command, [$expected_exit_code = 0], [$hint])>
+
+Display an error message for C<$command>, based on the content of
+C<$?> and C<$!>.  Be quiet if the command exited normally
+with C<$expected_exit_code>.  If C<$hint> is given, display that as well
+if the command failed to run at all.
+
+=cut
+
+sub handle_exec_errors ($;$$)
+{
+  my ($command, $expected, $hint) = @_;
+  $expected = 0 unless defined $expected;
+  if (defined $hint)
+    {
+      $hint = "\n" . $hint;
+    }
+  else
+    {
+      $hint = '';
+    }
+
+  $command = (split (' ', $command))[0];
+  if ($!)
+    {
+      fatal "failed to run $command: $!" . $hint;
+    }
+  else
+    {
+      use POSIX qw (WIFEXITED WEXITSTATUS WIFSIGNALED WTERMSIG);
+
+      if (WIFEXITED ($?))
+	{
+	  my $status = WEXITSTATUS ($?);
+	  # Propagate exit codes.
+	  fatal ('',
+		 "$command failed with exit status: $status",
+		 exit_code => $status)
+	    unless $status == $expected;
+	}
+      elsif (WIFSIGNALED ($?))
+	{
+	  my $signal = WTERMSIG ($?);
+	  fatal "$command terminated by signal: $signal";
+	}
+      else
+	{
+	  fatal "$command exited abnormally";
+	}
+    }
+}
+
+=item C<xqx ($command)>
+
+Same as C<qx> (but in scalar context), but fails on errors.
+
+=cut
+
+# xqx ($COMMAND)
+# --------------
+sub xqx ($)
+{
+  my ($command) = @_;
+
+  verb "running: $command";
+
+  $! = 0;
+  my $res = `$command`;
+  handle_exec_errors $command
+    if $?;
+
+  return $res;
+}
+
+
+=item C<xsystem (@argv)>
+
+Same as C<system>, but fails on errors, and reports the C<@argv>
+in verbose mode.
+
+=cut
+
+sub xsystem (@)
+{
+  my (@command) = @_;
+
+  verb "running: @command";
+
+  $! = 0;
+  handle_exec_errors "@command"
+    if system @command;
+}
+
+
+=item C<xsystem_hint ($msg, @argv)>
+
+Same as C<xsystem>, but allows to pass a hint that will be displayed
+in case the command failed to run at all.
+
+=cut
+
+sub xsystem_hint (@)
+{
+  my ($hint, @command) = @_;
+
+  verb "running: @command";
+
+  $! = 0;
+  handle_exec_errors "@command", 0, $hint
+    if system @command;
+}
+
+
+=item C<contents ($file_name)>
+
+Return the contents of C<$file_name>.
+
+=cut
+
+# contents ($FILE_NAME)
+# ---------------------
+sub contents ($)
+{
+  my ($file) = @_;
+  verb "reading $file";
+  local $/;			# Turn on slurp-mode.
+  my $f = new Autom4te::XFile "< " . open_quote ($file);
+  my $contents = $f->getline;
+  $f->close;
+  return $contents;
+}
+
+
+=item C<dir_has_case_matching_file ($DIRNAME, $FILE_NAME)>
+
+Return true iff $DIR contains a file name that matches $FILE_NAME case
+insensitively.
+
+We need to be cautious on case-insensitive case-preserving file
+systems (e.g. Mac OS X's HFS+).  On such systems C<-f 'Foo'> and C<-f
+'foO'> answer the same thing.  Hence if a package distributes its own
+F<CHANGELOG> file, but has no F<ChangeLog> file, automake would still
+try to distribute F<ChangeLog> (because it thinks it exists) in
+addition to F<CHANGELOG>, although it is impossible for these two
+files to be in the same directory (the two file names designate the
+same file).
+
+=cut
+
+use vars '%_directory_cache';
+sub dir_has_case_matching_file ($$)
+{
+  # Note that print File::Spec->case_tolerant returns 0 even on MacOS
+  # X (with Perl v5.8.1-RC3 at least), so do not try to shortcut this
+  # function using that.
+
+  my ($dirname, $file_name) = @_;
+  return 0 unless -f "$dirname/$file_name";
+
+  # The file appears to exist, however it might be a mirage if the
+  # system is case insensitive.  Let's browse the directory and check
+  # whether the file is really in.  We maintain a cache of directories
+  # so Automake doesn't spend all its time reading the same directory
+  # again and again.
+  if (!exists $_directory_cache{$dirname})
+    {
+      error "failed to open directory `$dirname'"
+	unless opendir (DIR, $dirname);
+      $_directory_cache{$dirname} = { map { $_ => 1 } readdir (DIR) };
+      closedir (DIR);
+    }
+  return exists $_directory_cache{$dirname}{$file_name};
+}
+
+=item C<reset_dir_cache ($dirname)>
+
+Clear C<dir_has_case_matching_file>'s cache for C<$dirname>.
+
+=cut
+
+sub reset_dir_cache ($)
+{
+  delete $_directory_cache{$_[0]};
+}
+
+=item C<set_dir_cache_file ($dirname, $file_name)>
+
+State that C<$dirname> contains C<$file_name> now.
+
+=cut
+
+sub set_dir_cache_file ($$)
+{
+  my ($dirname, $file_name) = @_;
+  $_directory_cache{$dirname}{$file_name} = 1
+    if exists $_directory_cache{$dirname};
+}
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/General.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/General.pm
new file mode 100644
index 0000000..a6ff680
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/General.pm
@@ -0,0 +1,446 @@
+# autoconf -- create `configure' using m4 macros
+# Copyright (C) 2001, 2002, 2003, 2004, 2006, 2007, 2009, 2010 Free
+# Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Autom4te::General;
+
+=head1 NAME
+
+Autom4te::General - general support functions for Autoconf and Automake
+
+=head1 SYNOPSIS
+
+  use Autom4te::General
+
+=head1 DESCRIPTION
+
+This perl module provides various general purpose support functions
+used in several executables of the Autoconf and Automake packages.
+
+=cut
+
+use 5.005_03;
+use Exporter;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use File::Basename;
+use File::Path ();
+use File::stat;
+use IO::File;
+use Carp;
+use strict;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+
+# Variables we define and export.
+my @export_vars =
+  qw ($debug $force $help $me $tmp $verbose $version);
+
+# Functions we define and export.
+my @export_subs =
+  qw (&debug
+      &getopt &shell_quote &mktmpdir
+      &uniq);
+
+# Functions we forward (coming from modules we use).
+my @export_forward_subs =
+  qw (&basename &dirname &fileparse);
+
+@EXPORT = (@export_vars, @export_subs, @export_forward_subs);
+
+
+# Variable we share with the main package.  Be sure to have a single
+# copy of them: using `my' together with multiple inclusion of this
+# package would introduce several copies.
+
+=head2 Global Variables
+
+=over 4
+
+=item C<$debug>
+
+Set this variable to 1 if debug messages should be enabled.  Debug
+messages are meant for developpers only, or when tracking down an
+incorrect execution.
+
+=cut
+
+use vars qw ($debug);
+$debug = 0;
+
+=item C<$force>
+
+Set this variable to 1 to recreate all the files, or to consider all
+the output files are obsolete.
+
+=cut
+
+use vars qw ($force);
+$force = undef;
+
+=item C<$help>
+
+Set to the help message associated with the option C<--help>.
+
+=cut
+
+use vars qw ($help);
+$help = undef;
+
+=item C<$me>
+
+The name of this application, for diagnostic messages.
+
+=cut
+
+use vars qw ($me);
+$me = basename ($0);
+
+=item C<$tmp>
+
+The name of the temporary directory created by C<mktmpdir>.  Left
+C<undef> otherwise.
+
+=cut
+
+# Our tmp dir.
+use vars qw ($tmp);
+$tmp = undef;
+
+=item C<$verbose>
+
+Enable verbosity messages.  These messages are meant for ordinary
+users, and typically make explicit the steps being performed.
+
+=cut
+
+use vars qw ($verbose);
+$verbose = 0;
+
+=item C<$version>
+
+Set to the version message associated to the option C<--version>.
+
+=cut
+
+use vars qw ($version);
+$version = undef;
+
+=back
+
+=cut
+
+
+
+## ----- ##
+## END.  ##
+## ----- ##
+
+=head2 Functions
+
+=over 4
+
+=item C<END>
+
+Filter Perl's exit codes, delete any temporary directory (unless
+C<$debug>), and exit nonzero whenever closing C<STDOUT> fails.
+
+=cut
+
+# END
+# ---
+sub END
+{
+  # $? contains the exit status we will return.
+  # It was set using one of the following ways:
+  #
+  #  1) normal termination
+  #     this sets $? = 0
+  #  2) calling `exit (n)'
+  #     this sets $? = n
+  #  3) calling die or friends (croak, confess...):
+  #     a) when $! is non-0
+  #        this set $? = $!
+  #     b) when $! is 0 but $? is not
+  #        this sets $? = ($? >> 8)   (i.e., the exit code of the
+  #        last program executed)
+  #     c) when both $! and $? are 0
+  #        this sets $? = 255
+  #
+  # Cases 1), 2), and 3b) are fine, but we prefer $? = 1 for 3a) and 3c).
+  my $status = $?;
+  $status = 1 if ($! && $! == $?) || $? == 255;
+  # (Note that we cannot safely distinguish calls to `exit (n)'
+  # from calls to die when `$! = n'.  It's not big deal because
+  # we only call `exit (0)' or `exit (1)'.)
+
+  if (!$debug && defined $tmp && -d $tmp)
+    {
+      local $SIG{__WARN__} = sub { $status = 1; warn $_[0] };
+      File::Path::rmtree $tmp;
+    }
+
+  # This is required if the code might send any output to stdout
+  # E.g., even --version or --help.  So it's best to do it unconditionally.
+  if (! close STDOUT)
+    {
+      print STDERR "$me: closing standard output: $!\n";
+      $? = 1;
+      return;
+    }
+
+  $? = $status;
+}
+
+
+## ----------- ##
+## Functions.  ##
+## ----------- ##
+
+
+=item C<debug (@message)>
+
+If the debug mode is enabled (C<$debug> and C<$verbose>), report the
+C<@message> on C<STDERR>, signed with the name of the program.
+
+=cut
+
+# &debug(@MESSAGE)
+# ----------------
+# Messages displayed only if $DEBUG and $VERBOSE.
+sub debug (@)
+{
+  print STDERR "$me: ", @_, "\n"
+    if $verbose && $debug;
+}
+
+
+=item C<getopt (%option)>
+
+Wrapper around C<Getopt::Long>.  In addition to the user C<option>s,
+support C<-h>/C<--help>, C<-V>/C<--version>, C<-v>/C<--verbose>,
+C<-d>/C<--debug>, C<-f>/C<--force>.  Conform to the GNU Coding
+Standards for error messages.  Try to work around a weird behavior
+from C<Getopt::Long> to preserve C<-> as an C<@ARGV> instead of
+rejecting it as a broken option.
+
+=cut
+
+# getopt (%OPTION)
+# ----------------
+# Handle the %OPTION, plus all the common options.
+# Work around Getopt bugs wrt `-'.
+sub getopt (%)
+{
+  my (%option) = @_;
+  use Getopt::Long;
+
+  # F*k.  Getopt seems bogus and dies when given `-' with `bundling'.
+  # If fixed some day, use this: '' => sub { push @ARGV, "-" }
+  my $stdin = grep /^-$/, @ARGV;
+  @ARGV = grep !/^-$/, @ARGV;
+  %option = ("h|help"     => sub { print $help; exit 0 },
+	     "V|version"  => sub { print $version; exit 0 },
+
+	     "v|verbose"  => sub { ++$verbose },
+	     "d|debug"    => sub { ++$debug },
+	     'f|force'    => \$force,
+
+	     # User options last, so that they have precedence.
+	     %option);
+  Getopt::Long::Configure ("bundling", "pass_through");
+  GetOptions (%option)
+    or exit 1;
+
+  foreach (grep { /^-./ } @ARGV)
+    {
+      print STDERR "$0: unrecognized option `$_'\n";
+      print STDERR "Try `$0 --help' for more information.\n";
+      exit (1);
+    }
+
+  push @ARGV, '-'
+    if $stdin;
+
+  setup_channel 'note', silent => !$verbose;
+  setup_channel 'verb', silent => !$verbose;
+}
+
+
+=item C<shell_quote ($file_name)>
+
+Quote C<$file_name> for the shell.
+
+=cut
+
+# $FILE_NAME
+# shell_quote ($FILE_NAME)
+# ------------------------
+# If the string $S is a well-behaved file name, simply return it.
+# If it contains white space, quotes, etc., quote it, and return
+# the new string.
+sub shell_quote($)
+{
+  my ($s) = @_;
+  if ($s =~ m![^\w+/.,-]!)
+    {
+      # Convert each single quote to '\''
+      $s =~ s/\'/\'\\\'\'/g;
+      # Then single quote the string.
+      $s = "'$s'";
+    }
+  return $s;
+}
+
+=item C<mktmpdir ($signature)>
+
+Create a temporary directory which name is based on C<$signature>.
+Store its name in C<$tmp>.  C<END> is in charge of removing it, unless
+C<$debug>.
+
+=cut
+
+# mktmpdir ($SIGNATURE)
+# ---------------------
+sub mktmpdir ($)
+{
+  my ($signature) = @_;
+  my $TMPDIR = $ENV{'TMPDIR'} || '/tmp';
+  my $quoted_tmpdir = shell_quote ($TMPDIR);
+
+  # If mktemp supports dirs, use it.
+  $tmp = `(umask 077 &&
+	   mktemp -d $quoted_tmpdir/"${signature}XXXXXX") 2>/dev/null`;
+  chomp $tmp;
+
+  if (!$tmp || ! -d $tmp)
+    {
+      $tmp = "$TMPDIR/$signature" . int (rand 10000) . ".$$";
+      mkdir $tmp, 0700
+	or croak "$me: cannot create $tmp: $!\n";
+    }
+
+  print STDERR "$me:$$: working in $tmp\n"
+    if $debug;
+}
+
+
+=item C<uniq (@list)>
+
+Return C<@list> with no duplicates, keeping only the first
+occurrences.
+
+=cut
+
+# @RES
+# uniq (@LIST)
+# ------------
+sub uniq (@)
+{
+  my @res = ();
+  my %seen = ();
+  foreach my $item (@_)
+    {
+      if (! exists $seen{$item})
+	{
+	  $seen{$item} = 1;
+	  push (@res, $item);
+	}
+    }
+  return wantarray ? @res : "@res";
+}
+
+
+=item C<handle_exec_errors ($command)>
+
+Display an error message for C<$command>, based on the content of
+C<$?> and C<$!>.
+
+=cut
+
+
+# handle_exec_errors ($COMMAND)
+# -----------------------------
+sub handle_exec_errors ($)
+{
+  my ($command) = @_;
+
+  $command = (split (' ', $command))[0];
+  if ($!)
+    {
+      error "failed to run $command: $!";
+    }
+  else
+    {
+      use POSIX qw (WIFEXITED WEXITSTATUS WIFSIGNALED WTERMSIG);
+
+      if (WIFEXITED ($?))
+	{
+	  my $status = WEXITSTATUS ($?);
+	  # WIFEXITED and WEXITSTATUS can alter $!, reset it so that
+	  # error() actually propagates the command's exit status, not $!.
+	  $! = 0;
+	  error "$command failed with exit status: $status";
+	}
+      elsif (WIFSIGNALED ($?))
+	{
+	  my $signal = WTERMSIG ($?);
+	  # In this case we prefer to exit with status 1.
+	  $! = 1;
+	  error "$command terminated by signal: $signal";
+	}
+      else
+	{
+	  error "$command exited abnormally";
+	}
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Autom4te::XFile>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt> and Akim
+Demaille E<lt>F<akim@freefriends.org>E<gt>.
+
+=cut
+
+
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Request.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Request.pm
new file mode 100644
index 0000000..32f54ff
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Request.pm
@@ -0,0 +1,115 @@
+# autoconf -- create `configure' using m4 macros
+# Copyright (C) 2001, 2002, 2003, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Autom4te::Request;
+
+=head1 NAME
+
+Autom4te::Request - a single m4 run request
+
+=head1 SYNOPSIS
+
+  use Autom4te::Request;
+
+=head1 DESCRIPTION
+
+This perl module provides various general purpose support functions
+used in several executables of the Autoconf and Automake packages.
+
+=cut
+
+use strict;
+use Autom4te::Struct;
+use Carp;
+use Data::Dumper;
+
+struct
+  (
+   # The key of the cache files.
+   'id' => "\$",
+   # True iff %MACRO contains all the macros we want to trace.
+   'valid' => "\$",
+   # The include path.
+   'path' => '@',
+   # The set of input files.
+   'input' => '@',
+   # The set of macros currently traced.
+   'macro' => '%',
+  );
+
+
+# Serialize a request or all the current requests.
+sub marshall($)
+{
+  my ($caller) = @_;
+  my $res = '';
+
+  # CALLER is an object: instance method.
+  my $marshall = Data::Dumper->new ([$caller]);
+  $marshall->Indent(2)->Terse(0);
+  $res = $marshall->Dump . "\n";
+
+  return $res;
+}
+
+
+# includes_p ($SELF, @MACRO)
+# --------------------------
+# Does this request covers all the @MACRO.
+sub includes_p
+{
+  my ($self, @macro) = @_;
+
+  foreach (@macro)
+    {
+      return 0
+	if ! exists ${$self->macro}{$_};
+    }
+  return 1;
+}
+
+
+=head1 SEE ALSO
+
+L<Autom4te::C4che>
+
+=head1 HISTORY
+
+Written by Akim Demaille E<lt>F<akim@freefriends.org>E<gt>.
+
+=cut
+
+
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Struct.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Struct.pm
new file mode 100644
index 0000000..d352e52
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/Struct.pm
@@ -0,0 +1,628 @@
+# autoconf -- create `configure' using m4 macros
+# Copyright (C) 2001, 2002, 2006 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# This file is basically Perl 5.6's Class::Struct, but made compatible
+# with Perl 5.5.  If someday this has to be updated, be sure to rename
+# all the occurrences of Class::Struct into Autom4te::Struct, otherwise
+# if we `use' a Perl module (e.g., File::stat) that uses Class::Struct,
+# we would have two packages defining the same symbols.  Boom.
+
+###############################################################
+# The main copy of this file is in Automake's CVS repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Autom4te::Struct;
+
+## See POD after __END__
+
+use 5.005_03;
+
+use strict;
+use vars qw(@ISA @EXPORT $VERSION);
+
+use Carp;
+
+require Exporter;
+@ISA = qw(Exporter);
+@EXPORT = qw(struct);
+
+$VERSION = '0.58';
+
+## Tested on 5.002 and 5.003 without class membership tests:
+my $CHECK_CLASS_MEMBERSHIP = ($] >= 5.003_95);
+
+my $print = 0;
+sub printem {
+    if (@_) { $print = shift }
+    else    { $print++ }
+}
+
+{
+    package Autom4te::Struct::Tie_ISA;
+
+    sub TIEARRAY {
+        my $class = shift;
+        return bless [], $class;
+    }
+
+    sub STORE {
+        my ($self, $index, $value) = @_;
+        Autom4te::Struct::_subclass_error();
+    }
+
+    sub FETCH {
+        my ($self, $index) = @_;
+        $self->[$index];
+    }
+
+    sub FETCHSIZE {
+        my $self = shift;
+        return scalar(@$self);
+    }
+
+    sub DESTROY { }
+}
+
+sub struct {
+
+    # Determine parameter list structure, one of:
+    #   struct( class => [ element-list ])
+    #   struct( class => { element-list })
+    #   struct( element-list )
+    # Latter form assumes current package name as struct name.
+
+    my ($class, @decls);
+    my $base_type = ref $_[1];
+    if ( $base_type eq 'HASH' ) {
+        $class = shift;
+        @decls = %{shift()};
+        _usage_error() if @_;
+    }
+    elsif ( $base_type eq 'ARRAY' ) {
+        $class = shift;
+        @decls = @{shift()};
+        _usage_error() if @_;
+    }
+    else {
+        $base_type = 'ARRAY';
+        $class = (caller())[0];
+        @decls = @_;
+    }
+    _usage_error() if @decls % 2 == 1;
+
+    # Ensure we are not, and will not be, a subclass.
+
+    my $isa = do {
+        no strict 'refs';
+        \@{$class . '::ISA'};
+    };
+    _subclass_error() if @$isa;
+    tie @$isa, 'Autom4te::Struct::Tie_ISA';
+
+    # Create constructor.
+
+    croak "function 'new' already defined in package $class"
+        if do { no strict 'refs'; defined &{$class . "::new"} };
+
+    my @methods = ();
+    my %refs = ();
+    my %arrays = ();
+    my %hashes = ();
+    my %classes = ();
+    my $got_class = 0;
+    my $out = '';
+
+    $out = "{\n  package $class;\n  use Carp;\n  sub new {\n";
+    $out .= "    my (\$class, \%init) = \@_;\n";
+    $out .= "    \$class = __PACKAGE__ unless \@_;\n";
+
+    my $cnt = 0;
+    my $idx = 0;
+    my( $cmt, $name, $type, $elem );
+
+    if( $base_type eq 'HASH' ){
+        $out .= "    my(\$r) = {};\n";
+        $cmt = '';
+    }
+    elsif( $base_type eq 'ARRAY' ){
+        $out .= "    my(\$r) = [];\n";
+    }
+    while( $idx < @decls ){
+        $name = $decls[$idx];
+        $type = $decls[$idx+1];
+        push( @methods, $name );
+        if( $base_type eq 'HASH' ){
+            $elem = "{'${class}::$name'}";
+        }
+        elsif( $base_type eq 'ARRAY' ){
+            $elem = "[$cnt]";
+            ++$cnt;
+            $cmt = " # $name";
+        }
+        if( $type =~ /^\*(.)/ ){
+            $refs{$name}++;
+            $type = $1;
+        }
+        my $init = "defined(\$init{'$name'}) ? \$init{'$name'} :";
+        if( $type eq '@' ){
+            $out .= "    croak 'Initializer for $name must be array reference'\n";
+            $out .= "        if defined(\$init{'$name'}) && ref(\$init{'$name'}) ne 'ARRAY';\n";
+            $out .= "    \$r->$elem = $init [];$cmt\n";
+            $arrays{$name}++;
+        }
+        elsif( $type eq '%' ){
+            $out .= "    croak 'Initializer for $name must be hash reference'\n";
+            $out .= "        if defined(\$init{'$name'}) && ref(\$init{'$name'}) ne 'HASH';\n";
+            $out .= "    \$r->$elem = $init {};$cmt\n";
+            $hashes{$name}++;
+        }
+        elsif ( $type eq '$') {
+            $out .= "    \$r->$elem = $init undef;$cmt\n";
+        }
+        elsif( $type =~ /^\w+(?:::\w+)*$/ ){
+            $init = "defined(\$init{'$name'}) ? \%{\$init{'$name'}} : ()";
+            $out .= "    croak 'Initializer for $name must be hash reference'\n";
+            $out .= "        if defined(\$init{'$name'}) && ref(\$init{'$name'}) ne 'HASH';\n";
+            $out .= "    \$r->$elem = '${type}'->new($init);$cmt\n";
+            $classes{$name} = $type;
+            $got_class = 1;
+        }
+        else{
+            croak "'$type' is not a valid struct element type";
+        }
+        $idx += 2;
+    }
+    $out .= "    bless \$r, \$class;\n  }\n";
+
+    # Create accessor methods.
+
+    my( $pre, $pst, $sel );
+    $cnt = 0;
+    foreach $name (@methods){
+        if ( do { no strict 'refs'; defined &{$class . "::$name"} } ) {
+            carp "function '$name' already defined, overrides struct accessor method";
+        }
+        else {
+            $pre = $pst = $cmt = $sel = '';
+            if( defined $refs{$name} ){
+                $pre = "\\(";
+                $pst = ")";
+                $cmt = " # returns ref";
+            }
+            $out .= "  sub $name {$cmt\n    my \$r = shift;\n";
+            if( $base_type eq 'ARRAY' ){
+                $elem = "[$cnt]";
+                ++$cnt;
+            }
+            elsif( $base_type eq 'HASH' ){
+                $elem = "{'${class}::$name'}";
+            }
+            if( defined $arrays{$name} ){
+                $out .= "    my \$i;\n";
+                $out .= "    \@_ ? (\$i = shift) : return \$r->$elem;\n";
+                $sel = "->[\$i]";
+            }
+            elsif( defined $hashes{$name} ){
+                $out .= "    my \$i;\n";
+                $out .= "    \@_ ? (\$i = shift) : return \$r->$elem;\n";
+                $sel = "->{\$i}";
+            }
+            elsif( defined $classes{$name} ){
+                if ( $CHECK_CLASS_MEMBERSHIP ) {
+                    $out .= "    croak '$name argument is wrong class' if \@_ && ! UNIVERSAL::isa(\$_[0], '$classes{$name}');\n";
+                }
+            }
+            $out .= "    croak 'Too many args to $name' if \@_ > 1;\n";
+            $out .= "    \@_ ? ($pre\$r->$elem$sel = shift$pst) : $pre\$r->$elem$sel$pst;\n";
+            $out .= "  }\n";
+        }
+    }
+    $out .= "}\n1;\n";
+
+    print $out if $print;
+    my $result = eval $out;
+    carp $@ if $@;
+}
+
+sub _usage_error {
+    confess "struct usage error";
+}
+
+sub _subclass_error {
+    croak 'struct class cannot be a subclass (@ISA not allowed)';
+}
+
+1; # for require
+
+
+__END__
+
+=head1 NAME
+
+Autom4te::Struct - declare struct-like datatypes as Perl classes
+
+=head1 SYNOPSIS
+
+    use Autom4te::Struct;
+            # declare struct, based on array:
+    struct( CLASS_NAME => [ ELEMENT_NAME => ELEMENT_TYPE, ... ]);
+            # declare struct, based on hash:
+    struct( CLASS_NAME => { ELEMENT_NAME => ELEMENT_TYPE, ... });
+
+    package CLASS_NAME;
+    use Autom4te::Struct;
+            # declare struct, based on array, implicit class name:
+    struct( ELEMENT_NAME => ELEMENT_TYPE, ... );
+
+
+    package Myobj;
+    use Autom4te::Struct;
+            # declare struct with four types of elements:
+    struct( s => '$', a => '@', h => '%', c => 'My_Other_Class' );
+
+    $obj = new Myobj;               # constructor
+
+                                    # scalar type accessor:
+    $element_value = $obj->s;           # element value
+    $obj->s('new value');               # assign to element
+
+                                    # array type accessor:
+    $ary_ref = $obj->a;                 # reference to whole array
+    $ary_element_value = $obj->a(2);    # array element value
+    $obj->a(2, 'new value');            # assign to array element
+
+                                    # hash type accessor:
+    $hash_ref = $obj->h;                # reference to whole hash
+    $hash_element_value = $obj->h('x'); # hash element value
+    $obj->h('x', 'new value');        # assign to hash element
+
+                                    # class type accessor:
+    $element_value = $obj->c;           # object reference
+    $obj->c->method(...);               # call method of object
+    $obj->c(new My_Other_Class);        # assign a new object
+
+
+=head1 DESCRIPTION
+
+C<Autom4te::Struct> exports a single function, C<struct>.
+Given a list of element names and types, and optionally
+a class name, C<struct> creates a Perl 5 class that implements
+a "struct-like" data structure.
+
+The new class is given a constructor method, C<new>, for creating
+struct objects.
+
+Each element in the struct data has an accessor method, which is
+used to assign to the element and to fetch its value.  The
+default accessor can be overridden by declaring a C<sub> of the
+same name in the package.  (See Example 2.)
+
+Each element's type can be scalar, array, hash, or class.
+
+
+=head2 The C<struct()> function
+
+The C<struct> function has three forms of parameter-list.
+
+    struct( CLASS_NAME => [ ELEMENT_LIST ]);
+    struct( CLASS_NAME => { ELEMENT_LIST });
+    struct( ELEMENT_LIST );
+
+The first and second forms explicitly identify the name of the
+class being created.  The third form assumes the current package
+name as the class name.
+
+An object of a class created by the first and third forms is
+based on an array, whereas an object of a class created by the
+second form is based on a hash. The array-based forms will be
+somewhat faster and smaller; the hash-based forms are more
+flexible.
+
+The class created by C<struct> must not be a subclass of another
+class other than C<UNIVERSAL>.
+
+It can, however, be used as a superclass for other classes. To facilitate
+this, the generated constructor method uses a two-argument blessing.
+Furthermore, if the class is hash-based, the key of each element is
+prefixed with the class name (see I<Perl Cookbook>, Recipe 13.12).
+
+A function named C<new> must not be explicitly defined in a class
+created by C<struct>.
+
+The I<ELEMENT_LIST> has the form
+
+    NAME => TYPE, ...
+
+Each name-type pair declares one element of the struct. Each
+element name will be defined as an accessor method unless a
+method by that name is explicitly defined; in the latter case, a
+warning is issued if the warning flag (B<-w>) is set.
+
+
+=head2 Element Types and Accessor Methods
+
+The four element types -- scalar, array, hash, and class -- are
+represented by strings -- C<'$'>, C<'@'>, C<'%'>, and a class name --
+optionally preceded by a C<'*'>.
+
+The accessor method provided by C<struct> for an element depends
+on the declared type of the element.
+
+=over
+
+=item Scalar (C<'$'> or C<'*$'>)
+
+The element is a scalar, and by default is initialized to C<undef>
+(but see L<Initializing with new>).
+
+The accessor's argument, if any, is assigned to the element.
+
+If the element type is C<'$'>, the value of the element (after
+assignment) is returned. If the element type is C<'*$'>, a reference
+to the element is returned.
+
+=item Array (C<'@'> or C<'*@'>)
+
+The element is an array, initialized by default to C<()>.
+
+With no argument, the accessor returns a reference to the
+element's whole array (whether or not the element was
+specified as C<'@'> or C<'*@'>).
+
+With one or two arguments, the first argument is an index
+specifying one element of the array; the second argument, if
+present, is assigned to the array element.  If the element type
+is C<'@'>, the accessor returns the array element value.  If the
+element type is C<'*@'>, a reference to the array element is
+returned.
+
+=item Hash (C<'%'> or C<'*%'>)
+
+The element is a hash, initialized by default to C<()>.
+
+With no argument, the accessor returns a reference to the
+element's whole hash (whether or not the element was
+specified as C<'%'> or C<'*%'>).
+
+With one or two arguments, the first argument is a key specifying
+one element of the hash; the second argument, if present, is
+assigned to the hash element.  If the element type is C<'%'>, the
+accessor returns the hash element value.  If the element type is
+C<'*%'>, a reference to the hash element is returned.
+
+=item Class (C<'Class_Name'> or C<'*Class_Name'>)
+
+The element's value must be a reference blessed to the named
+class or to one of its subclasses. The element is initialized to
+the result of calling the C<new> constructor of the named class.
+
+The accessor's argument, if any, is assigned to the element. The
+accessor will C<croak> if this is not an appropriate object
+reference.
+
+If the element type does not start with a C<'*'>, the accessor
+returns the element value (after assignment). If the element type
+starts with a C<'*'>, a reference to the element itself is returned.
+
+=back
+
+=head2 Initializing with C<new>
+
+C<struct> always creates a constructor called C<new>. That constructor
+may take a list of initializers for the various elements of the new
+struct.
+
+Each initializer is a pair of values: I<element name>C< =E<gt> >I<value>.
+The initializer value for a scalar element is just a scalar value. The
+initializer for an array element is an array reference. The initializer
+for a hash is a hash reference.
+
+The initializer for a class element is also a hash reference, and the
+contents of that hash are passed to the element's own constructor.
+
+See Example 3 below for an example of initialization.
+
+
+=head1 EXAMPLES
+
+=over
+
+=item Example 1
+
+Giving a struct element a class type that is also a struct is how
+structs are nested.  Here, C<timeval> represents a time (seconds and
+microseconds), and C<rusage> has two elements, each of which is of
+type C<timeval>.
+
+    use Autom4te::Struct;
+
+    struct( rusage => {
+        ru_utime => timeval,  # seconds
+        ru_stime => timeval,  # microseconds
+    });
+
+    struct( timeval => [
+        tv_secs  => '$',
+        tv_usecs => '$',
+    ]);
+
+        # create an object:
+    my $t = new rusage;
+
+        # $t->ru_utime and $t->ru_stime are objects of type timeval.
+        # set $t->ru_utime to 100.0 sec and $t->ru_stime to 5.0 sec.
+    $t->ru_utime->tv_secs(100);
+    $t->ru_utime->tv_usecs(0);
+    $t->ru_stime->tv_secs(5);
+    $t->ru_stime->tv_usecs(0);
+
+
+=item Example 2
+
+An accessor function can be redefined in order to provide
+additional checking of values, etc.  Here, we want the C<count>
+element always to be nonnegative, so we redefine the C<count>
+accessor accordingly.
+
+    package MyObj;
+    use Autom4te::Struct;
+
+    # declare the struct
+    struct ( 'MyObj', { count => '$', stuff => '%' } );
+
+    # override the default accessor method for 'count'
+    sub count {
+        my $self = shift;
+        if ( @_ ) {
+            die 'count must be nonnegative' if $_[0] < 0;
+            $self->{'count'} = shift;
+            warn "Too many args to count" if @_;
+        }
+        return $self->{'count'};
+    }
+
+    package main;
+    $x = new MyObj;
+    print "\$x->count(5) = ", $x->count(5), "\n";
+                            # prints '$x->count(5) = 5'
+
+    print "\$x->count = ", $x->count, "\n";
+                            # prints '$x->count = 5'
+
+    print "\$x->count(-5) = ", $x->count(-5), "\n";
+                            # dies due to negative argument!
+
+=item Example 3
+
+The constructor of a generated class can be passed a list
+of I<element>=>I<value> pairs, with which to initialize the struct.
+If no initializer is specified for a particular element, its default
+initialization is performed instead. Initializers for non-existent
+elements are silently ignored.
+
+Note that the initializer for a nested struct is specified
+as an anonymous hash of initializers, which is passed on to the nested
+struct's constructor.
+
+
+    use Autom4te::Struct;
+
+    struct Breed =>
+    {
+        name  => '$',
+        cross => '$',
+    };
+
+    struct Cat =>
+    [
+        name     => '$',
+        kittens  => '@',
+        markings => '%',
+        breed    => 'Breed',
+    ];
+
+
+    my $cat = Cat->new( name     => 'Socks',
+                        kittens  => ['Monica', 'Kenneth'],
+                        markings => { socks=>1, blaze=>"white" },
+                        breed    => { name=>'short-hair', cross=>1 },
+                      );
+
+    print "Once a cat called ", $cat->name, "\n";
+    print "(which was a ", $cat->breed->name, ")\n";
+    print "had two kittens: ", join(' and ', @{$cat->kittens}), "\n";
+
+=back
+
+=head1 Author and Modification History
+
+Modified by Akim Demaille, 2001-08-03
+
+    Rename as Autom4te::Struct to avoid name clashes with
+    Class::Struct.
+
+    Make it compatible with Perl 5.5.
+
+Modified by Damian Conway, 1999-03-05, v0.58.
+
+    Added handling of hash-like arg list to class ctor.
+
+    Changed to two-argument blessing in ctor to support
+    derivation from created classes.
+
+    Added classname prefixes to keys in hash-based classes
+    (refer to "Perl Cookbook", Recipe 13.12 for rationale).
+
+    Corrected behavior of accessors for '*@' and '*%' struct
+    elements.  Package now implements documented behavior when
+    returning a reference to an entire hash or array element.
+    Previously these were returned as a reference to a reference
+    to the element.
+
+
+Renamed to C<Class::Struct> and modified by Jim Miner, 1997-04-02.
+
+    members() function removed.
+    Documentation corrected and extended.
+    Use of struct() in a subclass prohibited.
+    User definition of accessor allowed.
+    Treatment of '*' in element types corrected.
+    Treatment of classes as element types corrected.
+    Class name to struct() made optional.
+    Diagnostic checks added.
+
+
+Originally C<Class::Template> by Dean Roehrich.
+
+    # Template.pm   --- struct/member template builder
+    #   12mar95
+    #   Dean Roehrich
+    #
+    # changes/bugs fixed since 28nov94 version:
+    #  - podified
+    # changes/bugs fixed since 21nov94 version:
+    #  - Fixed examples.
+    # changes/bugs fixed since 02sep94 version:
+    #  - Moved to Class::Template.
+    # changes/bugs fixed since 20feb94 version:
+    #  - Updated to be a more proper module.
+    #  - Added "use strict".
+    #  - Bug in build_methods, was using @var when @$var needed.
+    #  - Now using my() rather than local().
+    #
+    # Uses perl5 classes to create nested data types.
+    # This is offered as one implementation of Tom Christiansen's "structs.pl"
+    # idea.
+
+=cut
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/XFile.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/XFile.pm
new file mode 100644
index 0000000..0e39763
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/Autom4te/XFile.pm
@@ -0,0 +1,320 @@
+# Copyright (C) 2001, 2003, 2004, 2006, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Akim Demaille <akim@freefriends.org>.
+
+###############################################################
+# The main copy of this file is in Automake's CVS repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Autom4te::XFile;
+
+=head1 NAME
+
+Autom4te::XFile - supply object methods for filehandles with error handling
+
+=head1 SYNOPSIS
+
+    use Autom4te::XFile;
+
+    $fh = new Autom4te::XFile;
+    $fh->open ("< file");
+    # No need to check $FH: we died if open failed.
+    print <$fh>;
+    $fh->close;
+    # No need to check the return value of close: we died if it failed.
+
+    $fh = new Autom4te::XFile "> file";
+    # No need to check $FH: we died if new failed.
+    print $fh "bar\n";
+    $fh->close;
+
+    $fh = new Autom4te::XFile "file", "r";
+    # No need to check $FH: we died if new failed.
+    defined $fh
+    print <$fh>;
+    undef $fh;   # automatically closes the file and checks for errors.
+
+    $fh = new Autom4te::XFile "file", O_WRONLY | O_APPEND;
+    # No need to check $FH: we died if new failed.
+    print $fh "corge\n";
+
+    $pos = $fh->getpos;
+    $fh->setpos ($pos);
+
+    undef $fh;   # automatically closes the file and checks for errors.
+
+    autoflush STDOUT 1;
+
+=head1 DESCRIPTION
+
+C<Autom4te::XFile> inherits from C<IO::File>.  It provides the method
+C<name> returning the file name.  It provides dying versions of the
+methods C<close>, C<lock> (corresponding to C<flock>), C<new>,
+C<open>, C<seek>, and C<truncate>.  It also overrides the C<getline>
+and C<getlines> methods to translate C<\r\n> to C<\n>.
+
+=cut
+
+require 5.000;
+use strict;
+use vars qw($VERSION @EXPORT @EXPORT_OK $AUTOLOAD @ISA);
+use Carp;
+use Errno;
+use IO::File;
+use File::Basename;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels qw(msg);
+use Autom4te::FileUtils;
+
+require Exporter;
+require DynaLoader;
+
+@ISA = qw(IO::File Exporter DynaLoader);
+
+$VERSION = "1.2";
+
+@EXPORT = @IO::File::EXPORT;
+
+eval {
+  # Make all Fcntl O_XXX and LOCK_XXX constants available for importing
+  require Fcntl;
+  my @O = grep /^(LOCK|O)_/, @Fcntl::EXPORT, @Fcntl::EXPORT_OK;
+  Fcntl->import (@O);  # first we import what we want to export
+  push (@EXPORT, @O);
+};
+
+=head2 Methods
+
+=over
+
+=item C<$fh = new Autom4te::XFile ([$expr, ...]>
+
+Constructor a new XFile object.  Additional arguments
+are passed to C<open>, if any.
+
+=cut
+
+sub new
+{
+  my $type = shift;
+  my $class = ref $type || $type || "Autom4te::XFile";
+  my $fh = $class->SUPER::new ();
+  if (@_)
+    {
+      $fh->open (@_);
+    }
+  $fh;
+}
+
+=item C<$fh-E<gt>open ([$file, ...])>
+
+Open a file, passing C<$file> and further arguments to C<IO::File::open>.
+Die if opening fails.  Store the name of the file.  Use binmode for writing.
+
+=cut
+
+sub open
+{
+  my $fh = shift;
+  my ($file) = @_;
+
+  # WARNING: Gross hack: $FH is a typeglob: use its hash slot to store
+  # the `name' of the file we are opening.  See the example with
+  # io_socket_timeout in IO::Socket for more, and read Graham's
+  # comment in IO::Handle.
+  ${*$fh}{'autom4te_xfile_file'} = "$file";
+
+  if (!$fh->SUPER::open (@_))
+    {
+      fatal "cannot open $file: $!";
+    }
+
+  # In case we're running under MSWindows, don't write with CRLF.
+  # (This circumvents a bug in at least Cygwin bash where the shell
+  # parsing fails on lines ending with the continuation character '\'
+  # and CRLF).
+  binmode $fh if $file =~ /^\s*>/;
+}
+
+=item C<$fh-E<gt>close>
+
+Close the file, handling errors.
+
+=cut
+
+sub close
+{
+  my $fh = shift;
+  if (!$fh->SUPER::close (@_))
+    {
+      my $file = $fh->name;
+      Autom4te::FileUtils::handle_exec_errors $file
+	unless $!;
+      fatal "cannot close $file: $!";
+    }
+}
+
+=item C<$line = $fh-E<gt>getline>
+
+Read and return a line from the file.  Ensure C<\r\n> is translated to
+C<\n> on input files.
+
+=cut
+
+# Some Win32/perl installations fail to translate \r\n to \n on input
+# so we do that here.
+sub getline
+{
+  local $_ = $_[0]->SUPER::getline;
+  # Perform a _global_ replacement: $_ may can contains many lines
+  # in slurp mode ($/ = undef).
+  s/\015\012/\n/gs if defined $_;
+  return $_;
+}
+
+=item C<@lines = $fh-E<gt>getlines>
+
+Slurp lines from the files.
+
+=cut
+
+sub getlines
+{
+  my @res = ();
+  my $line;
+  push @res, $line while $line = $_[0]->getline;
+  return @res;
+}
+
+=item C<$name = $fh-E<gt>name>
+
+Return the name of the file.
+
+=cut
+
+sub name
+{
+  my $fh = shift;
+  return ${*$fh}{'autom4te_xfile_file'};
+}
+
+=item C<$fh-E<gt>lock>
+
+Lock the file using C<flock>.  If locking fails for reasons other than
+C<flock> being unsupported, then error out if C<$ENV{'MAKEFLAGS'}> indicates
+that we are spawned from a parallel C<make>.
+
+=cut
+
+sub lock
+{
+  my ($fh, $mode) = @_;
+  # Cannot use @_ here.
+
+  # Unless explicitly configured otherwise, Perl implements its `flock' with the
+  # first of flock(2), fcntl(2), or lockf(3) that works.  These can fail on
+  # NFS-backed files, with ENOLCK (GNU/Linux) or EOPNOTSUPP (FreeBSD); we
+  # usually ignore these errors.  If $ENV{MAKEFLAGS} suggests that a parallel
+  # invocation of `make' has invoked the tool we serve, report all locking
+  # failures and abort.
+  #
+  # On Unicos, flock(2) and fcntl(2) over NFS hang indefinitely when `lockd' is
+  # not running.  NetBSD NFS clients silently grant all locks.  We do not
+  # attempt to defend against these dangers.
+  #
+  # -j is for parallel BSD make, -P is for parallel HP-UX make.
+  if (!flock ($fh, $mode))
+    {
+      my $make_j = (exists $ENV{'MAKEFLAGS'}
+		    && " -$ENV{'MAKEFLAGS'}" =~ / (-[BdeikrRsSw]*[jP]|--[jP]|---?jobs)/);
+      my $note = "\nforgo `make -j' or use a file system that supports locks";
+      my $file = $fh->name;
+
+      msg ($make_j ? 'fatal' : 'unsupported',
+	   "cannot lock $file with mode $mode: $!" . ($make_j ? $note : ""))
+	if $make_j || !($!{ENOLCK} || $!{EOPNOTSUPP});
+    }
+}
+
+=item C<$fh-E<gt>seek ($position, [$whence])>
+
+Seek file to C<$position>.  Die if seeking fails.
+
+=cut
+
+sub seek
+{
+  my $fh = shift;
+  # Cannot use @_ here.
+  if (!seek ($fh, $_[0], $_[1]))
+    {
+      my $file = $fh->name;
+      fatal "cannot rewind $file with @_: $!";
+    }
+}
+
+=item C<$fh-E<gt>truncate ($len)>
+
+Truncate the file to length C<$len>.  Die on failure.
+
+=cut
+
+sub truncate
+{
+  my ($fh, $len) = @_;
+  if (!truncate ($fh, $len))
+    {
+      my $file = $fh->name;
+      fatal "cannot truncate $file at $len: $!";
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<perlfunc>,
+L<perlop/"I/O Operators">,
+L<IO::File>
+L<IO::Handle>
+L<IO::Seekable>
+
+=head1 HISTORY
+
+Derived from IO::File.pm by Akim Demaille E<lt>F<akim@freefriends.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/INSTALL b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/INSTALL
new file mode 100644
index 0000000..81fd332
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/INSTALL
@@ -0,0 +1,365 @@
+Installation Instructions
+*************************
+
+Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005,
+2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+   Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.  This file is offered as-is,
+without warranty of any kind.
+
+Basic Installation
+==================
+
+   Briefly, the shell commands `./configure; make; make install' should
+configure, build, and install this package.  The following
+more-detailed instructions are generic; see the `README' file for
+instructions specific to this package.  Some packages provide this
+`INSTALL' file but do not implement all of the features documented
+below.  The lack of an optional feature in a given package is not
+necessarily a bug.  More recommendations for GNU packages can be found
+in *note Makefile Conventions: (standards)Makefile Conventions.
+
+   The `configure' shell script attempts to guess correct values for
+various system-dependent variables used during compilation.  It uses
+those values to create a `Makefile' in each directory of the package.
+It may also create one or more `.h' files containing system-dependent
+definitions.  Finally, it creates a shell script `config.status' that
+you can run in the future to recreate the current configuration, and a
+file `config.log' containing compiler output (useful mainly for
+debugging `configure').
+
+   It can also use an optional file (typically called `config.cache'
+and enabled with `--cache-file=config.cache' or simply `-C') that saves
+the results of its tests to speed up reconfiguring.  Caching is
+disabled by default to prevent problems with accidental use of stale
+cache files.
+
+   If you need to do unusual things to compile the package, please try
+to figure out how `configure' could check whether to do them, and mail
+diffs or instructions to the address given in the `README' so they can
+be considered for the next release.  If you are using the cache, and at
+some point `config.cache' contains results you don't want to keep, you
+may remove or edit it.
+
+   The file `configure.ac' (or `configure.in') is used to create
+`configure' by a program called `autoconf'.  You need `configure.ac' if
+you want to change it or regenerate `configure' using a newer version
+of `autoconf'.
+
+   The simplest way to compile this package is:
+
+  1. `cd' to the directory containing the package's source code and type
+     `./configure' to configure the package for your system.
+
+     Running `configure' might take a while.  While running, it prints
+     some messages telling which features it is checking for.
+
+  2. Type `make' to compile the package.
+
+  3. Optionally, type `make check' to run any self-tests that come with
+     the package, generally using the just-built uninstalled binaries.
+
+  4. Type `make install' to install the programs and any data files and
+     documentation.  When installing into a prefix owned by root, it is
+     recommended that the package be configured and built as a regular
+     user, and only the `make install' phase executed with root
+     privileges.
+
+  5. Optionally, type `make installcheck' to repeat any self-tests, but
+     this time using the binaries in their final installed location.
+     This target does not install anything.  Running this target as a
+     regular user, particularly if the prior `make install' required
+     root privileges, verifies that the installation completed
+     correctly.
+
+  6. You can remove the program binaries and object files from the
+     source code directory by typing `make clean'.  To also remove the
+     files that `configure' created (so you can compile the package for
+     a different kind of computer), type `make distclean'.  There is
+     also a `make maintainer-clean' target, but that is intended mainly
+     for the package's developers.  If you use it, you may have to get
+     all sorts of other programs in order to regenerate files that came
+     with the distribution.
+
+  7. Often, you can also type `make uninstall' to remove the installed
+     files again.  In practice, not all packages have tested that
+     uninstallation works correctly, even though it is required by the
+     GNU Coding Standards.
+
+  8. Some packages, particularly those that use Automake, provide `make
+     distcheck', which can by used by developers to test that all other
+     targets like `make install' and `make uninstall' work correctly.
+     This target is generally not run by end users.
+
+Compilers and Options
+=====================
+
+   Some systems require unusual options for compilation or linking that
+the `configure' script does not know about.  Run `./configure --help'
+for details on some of the pertinent environment variables.
+
+   You can give `configure' initial values for configuration parameters
+by setting variables in the command line or in the environment.  Here
+is an example:
+
+     ./configure CC=c99 CFLAGS=-g LIBS=-lposix
+
+   *Note Defining Variables::, for more details.
+
+Compiling For Multiple Architectures
+====================================
+
+   You can compile the package for more than one kind of computer at the
+same time, by placing the object files for each architecture in their
+own directory.  To do this, you can use GNU `make'.  `cd' to the
+directory where you want the object files and executables to go and run
+the `configure' script.  `configure' automatically checks for the
+source code in the directory that `configure' is in and in `..'.  This
+is known as a "VPATH" build.
+
+   With a non-GNU `make', it is safer to compile the package for one
+architecture at a time in the source code directory.  After you have
+installed the package for one architecture, use `make distclean' before
+reconfiguring for another architecture.
+
+   On MacOS X 10.5 and later systems, you can create libraries and
+executables that work on multiple system types--known as "fat" or
+"universal" binaries--by specifying multiple `-arch' options to the
+compiler but only a single `-arch' option to the preprocessor.  Like
+this:
+
+     ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CPP="gcc -E" CXXCPP="g++ -E"
+
+   This is not guaranteed to produce working output in all cases, you
+may have to build one architecture at a time and combine the results
+using the `lipo' tool if you have problems.
+
+Installation Names
+==================
+
+   By default, `make install' installs the package's commands under
+`/usr/local/bin', include files under `/usr/local/include', etc.  You
+can specify an installation prefix other than `/usr/local' by giving
+`configure' the option `--prefix=PREFIX', where PREFIX must be an
+absolute file name.
+
+   You can specify separate installation prefixes for
+architecture-specific files and architecture-independent files.  If you
+pass the option `--exec-prefix=PREFIX' to `configure', the package uses
+PREFIX as the prefix for installing programs and libraries.
+Documentation and other data files still use the regular prefix.
+
+   In addition, if you use an unusual directory layout you can give
+options like `--bindir=DIR' to specify different values for particular
+kinds of files.  Run `configure --help' for a list of the directories
+you can set and what kinds of files go in them.  In general, the
+default for these options is expressed in terms of `${prefix}', so that
+specifying just `--prefix' will affect all of the other directory
+specifications that were not explicitly provided.
+
+   The most portable way to affect installation locations is to pass the
+correct locations to `configure'; however, many packages provide one or
+both of the following shortcuts of passing variable assignments to the
+`make install' command line to change installation locations without
+having to reconfigure or recompile.
+
+   The first method involves providing an override variable for each
+affected directory.  For example, `make install
+prefix=/alternate/directory' will choose an alternate location for all
+directory configuration variables that were expressed in terms of
+`${prefix}'.  Any directories that were specified during `configure',
+but not in terms of `${prefix}', must each be overridden at install
+time for the entire installation to be relocated.  The approach of
+makefile variable overrides for each directory variable is required by
+the GNU Coding Standards, and ideally causes no recompilation.
+However, some platforms have known limitations with the semantics of
+shared libraries that end up requiring recompilation when using this
+method, particularly noticeable in packages that use GNU Libtool.
+
+   The second method involves providing the `DESTDIR' variable.  For
+example, `make install DESTDIR=/alternate/directory' will prepend
+`/alternate/directory' before all installation names.  The approach of
+`DESTDIR' overrides is not required by the GNU Coding Standards, and
+does not work on platforms that have drive letters.  On the other hand,
+it does better at avoiding recompilation issues, and works well even
+when some directory options were not specified in terms of `${prefix}'
+at `configure' time.
+
+Optional Features
+=================
+
+   If the package supports it, you can cause programs to be installed
+with an extra prefix or suffix on their names by giving `configure' the
+option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
+
+   Some packages pay attention to `--enable-FEATURE' options to
+`configure', where FEATURE indicates an optional part of the package.
+They may also pay attention to `--with-PACKAGE' options, where PACKAGE
+is something like `gnu-as' or `x' (for the X Window System).  The
+`README' should mention any `--enable-' and `--with-' options that the
+package recognizes.
+
+   For packages that use the X Window System, `configure' can usually
+find the X include and library files automatically, but if it doesn't,
+you can use the `configure' options `--x-includes=DIR' and
+`--x-libraries=DIR' to specify their locations.
+
+   Some packages offer the ability to configure how verbose the
+execution of `make' will be.  For these packages, running `./configure
+--enable-silent-rules' sets the default to minimal output, which can be
+overridden with `make V=1'; while running `./configure
+--disable-silent-rules' sets the default to verbose, which can be
+overridden with `make V=0'.
+
+Particular systems
+==================
+
+   On HP-UX, the default C compiler is not ANSI C compatible.  If GNU
+CC is not installed, it is recommended to use the following options in
+order to use an ANSI C compiler:
+
+     ./configure CC="cc -Ae -D_XOPEN_SOURCE=500"
+
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
+
+   On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
+parse its `<wchar.h>' header file.  The option `-nodtk' can be used as
+a workaround.  If GNU CC is not installed, it is therefore recommended
+to try
+
+     ./configure CC="cc"
+
+and if that doesn't work, try
+
+     ./configure CC="cc -nodtk"
+
+   On Solaris, don't put `/usr/ucb' early in your `PATH'.  This
+directory contains several dysfunctional programs; working variants of
+these programs are available in `/usr/bin'.  So, if you need `/usr/ucb'
+in your `PATH', put it _after_ `/usr/bin'.
+
+   On Haiku, software installed for all users goes in `/boot/common',
+not `/usr/local'.  It is recommended to use the following options:
+
+     ./configure --prefix=/boot/common
+
+Specifying the System Type
+==========================
+
+   There may be some features `configure' cannot figure out
+automatically, but needs to determine by the type of machine the package
+will run on.  Usually, assuming the package is built to be run on the
+_same_ architectures, `configure' can figure that out, but if it prints
+a message saying it cannot guess the machine type, give it the
+`--build=TYPE' option.  TYPE can either be a short name for the system
+type, such as `sun4', or a canonical name which has the form:
+
+     CPU-COMPANY-SYSTEM
+
+where SYSTEM can have one of these forms:
+
+     OS
+     KERNEL-OS
+
+   See the file `config.sub' for the possible values of each field.  If
+`config.sub' isn't included in this package, then this package doesn't
+need to know the machine type.
+
+   If you are _building_ compiler tools for cross-compiling, you should
+use the option `--target=TYPE' to select the type of system they will
+produce code for.
+
+   If you want to _use_ a cross compiler, that generates code for a
+platform different from the build platform, you should specify the
+"host" platform (i.e., that on which the generated programs will
+eventually be run) with `--host=TYPE'.
+
+Sharing Defaults
+================
+
+   If you want to set default values for `configure' scripts to share,
+you can create a site shell script called `config.site' that gives
+default values for variables like `CC', `cache_file', and `prefix'.
+`configure' looks for `PREFIX/share/config.site' if it exists, then
+`PREFIX/etc/config.site' if it exists.  Or, you can set the
+`CONFIG_SITE' environment variable to the location of the site script.
+A warning: not all `configure' scripts look for a site script.
+
+Defining Variables
+==================
+
+   Variables not defined in a site shell script can be set in the
+environment passed to `configure'.  However, some packages may run
+configure again during the build, and the customized values of these
+variables may be lost.  In order to avoid this problem, you should set
+them in the `configure' command line, using `VAR=value'.  For example:
+
+     ./configure CC=/usr/local2/bin/gcc
+
+causes the specified `gcc' to be used as the C compiler (unless it is
+overridden in the site shell script).
+
+Unfortunately, this technique does not work for `CONFIG_SHELL' due to
+an Autoconf bug.  Until the bug is fixed you can use this workaround:
+
+     CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash
+
+`configure' Invocation
+======================
+
+   `configure' recognizes the following options to control how it
+operates.
+
+`--help'
+`-h'
+     Print a summary of all of the options to `configure', and exit.
+
+`--help=short'
+`--help=recursive'
+     Print a summary of the options unique to this package's
+     `configure', and exit.  The `short' variant lists options used
+     only in the top level, while the `recursive' variant lists options
+     also present in any nested packages.
+
+`--version'
+`-V'
+     Print the version of Autoconf used to generate the `configure'
+     script, and exit.
+
+`--cache-file=FILE'
+     Enable the cache: use and save the results of the tests in FILE,
+     traditionally `config.cache'.  FILE defaults to `/dev/null' to
+     disable caching.
+
+`--config-cache'
+`-C'
+     Alias for `--cache-file=config.cache'.
+
+`--quiet'
+`--silent'
+`-q'
+     Do not print messages saying which checks are being made.  To
+     suppress all normal output, redirect it to `/dev/null' (any error
+     messages will still be shown).
+
+`--srcdir=DIR'
+     Look for the package's source code in directory DIR.  Usually
+     `configure' can determine that directory automatically.
+
+`--prefix=DIR'
+     Use DIR as the installation prefix.  *note Installation Names::
+     for more details, including other options available for fine-tuning
+     the installation locations.
+
+`--no-create'
+`-n'
+     Run the configure checks, but stop before creating any output
+     files.
+
+`configure' also accepts some other, not widely useful, options.  Run
+`configure --help' for more details.
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4
new file mode 100644
index 0000000..33f6019
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4
@@ -0,0 +1,104 @@
+# This file is part of Autoconf.                -*- Autoconf -*-
+# Driver that loads the Autoconf macro files.
+#
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2006, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie and many others.
+#
+# Do not sinclude acsite.m4 here, because it may not be installed
+# yet when Autoconf is frozen.
+# Do not sinclude ./aclocal.m4 here, to prevent it from being frozen.
+
+# general includes some AU_DEFUN.
+m4_include([autoconf/autoupdate.m4])
+m4_include([autoconf/autoscan.m4])
+m4_include([autoconf/general.m4])
+m4_include([autoconf/status.m4])
+m4_include([autoconf/autoheader.m4])
+m4_include([autoconf/autotest.m4])
+m4_include([autoconf/programs.m4])
+m4_include([autoconf/lang.m4])
+m4_include([autoconf/c.m4])
+m4_include([autoconf/erlang.m4])
+m4_include([autoconf/fortran.m4])
+m4_include([autoconf/functions.m4])
+m4_include([autoconf/headers.m4])
+m4_include([autoconf/types.m4])
+m4_include([autoconf/libs.m4])
+m4_include([autoconf/specific.m4])
+m4_include([autoconf/oldnames.m4])
+
+# We discourage the use of the non prefixed macro names: M4sugar maps
+# all the builtins into `m4_'.  Autoconf has been converted to these
+# names too.  But users may still depend upon these, so reestablish
+# them.
+
+# In order to copy pushdef stacks, m4_copy temporarily destroys the
+# current pushdef stack.  But these builtins are so primitive that:
+#   1. they should not have more than one pushdef definition
+#   2. undefining the pushdef stack to copy breaks m4_copy
+# Hence, we temporarily restore a simpler m4_copy.
+
+m4_pushdef([m4_copy], [m4_define([$2], m4_defn([$1]))])
+
+m4_copy_unm4([m4_builtin])
+m4_copy_unm4([m4_changequote])
+m4_copy_unm4([m4_decr])
+m4_copy_unm4([m4_define])
+m4_copy_unm4([m4_defn])
+m4_copy_unm4([m4_divert])
+m4_copy_unm4([m4_divnum])
+m4_copy_unm4([m4_errprint])
+m4_copy_unm4([m4_esyscmd])
+m4_copy_unm4([m4_ifdef])
+m4_copy([m4_if], [ifelse])
+m4_copy_unm4([m4_incr])
+m4_copy_unm4([m4_index])
+m4_copy_unm4([m4_indir])
+m4_copy_unm4([m4_len])
+m4_copy([m4_bpatsubst], [patsubst])
+m4_copy_unm4([m4_popdef])
+m4_copy_unm4([m4_pushdef])
+m4_copy([m4_bregexp], [regexp])
+m4_copy_unm4([m4_sinclude])
+m4_copy_unm4([m4_syscmd])
+m4_copy_unm4([m4_sysval])
+m4_copy_unm4([m4_traceoff])
+m4_copy_unm4([m4_traceon])
+m4_copy_unm4([m4_translit])
+m4_copy_unm4([m4_undefine])
+m4_copy_unm4([m4_undivert])
+
+m4_popdef([m4_copy])
+
+# Yet some people have started to use m4_patsubst and m4_regexp.
+m4_define([m4_patsubst],
+[m4_expand_once([m4_warn([syntax],
+		 [do not use m4_patsubst: use patsubst or m4_bpatsubst])])dnl
+patsubst($@)])
+
+m4_define([m4_regexp],
+[m4_expand_once([m4_warn([syntax],
+		 [do not use m4_regexp: use regexp or m4_bregexp])])dnl
+regexp($@)])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4f b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4f
new file mode 100644
index 0000000..7d9f3ec
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoconf.m4f
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoheader.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoheader.m4
new file mode 100644
index 0000000..f89d042
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoheader.m4
@@ -0,0 +1,78 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Interface with autoheader.
+
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# AH_OUTPUT(KEY, TEXT)
+# --------------------
+# Pass TEXT to autoheader.
+# This macro is `read' only via `autoconf --trace', it outputs nothing.
+m4_define([AH_OUTPUT], [])
+
+
+# AH_VERBATIM(KEY, TEMPLATE)
+# --------------------------
+# If KEY is direct (i.e., no indirection such as in KEY=$my_func which
+# may occur if there is AC_CHECK_FUNCS($my_func)), issue an autoheader
+# TEMPLATE associated to the KEY.  Otherwise, do nothing.  TEMPLATE is
+# output as is, with no formatting.
+#
+# Quote for Perl '' strings, which are those used by Autoheader.
+m4_define([AH_VERBATIM],
+[AS_LITERAL_WORD_IF([$1],
+	       [AH_OUTPUT(_m4_expand([$1]), AS_ESCAPE([[$2]], [\']))])])
+
+
+# AH_TEMPLATE(KEY, DESCRIPTION)
+# -----------------------------
+# Issue an autoheader template for KEY, i.e., a comment composed of
+# DESCRIPTION (properly wrapped), and then #undef KEY.
+m4_define([AH_TEMPLATE],
+[AH_VERBATIM([$1],
+	     m4_text_wrap([$2 */], [   ], [/* ])[
+@%:@undef ]_m4_expand([$1]))])
+
+
+# AH_TOP(TEXT)
+# ------------
+# Output TEXT at the top of `config.h.in'.
+m4_define([AH_TOP],
+[m4_define([_AH_COUNTER], m4_incr(_AH_COUNTER))dnl
+AH_VERBATIM([0000]_AH_COUNTER, [$1])])
+
+
+# AH_BOTTOM(TEXT)
+# ---------------
+# Output TEXT at the bottom of `config.h.in'.
+m4_define([AH_BOTTOM],
+[m4_define([_AH_COUNTER], m4_incr(_AH_COUNTER))dnl
+AH_VERBATIM([zzzz]_AH_COUNTER, [$1])])
+
+# Initialize.
+m4_define([_AH_COUNTER], [0])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoscan.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoscan.m4
new file mode 100644
index 0000000..c3563e7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoscan.m4
@@ -0,0 +1,50 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Interface with autoscan.
+
+# Copyright (C) 2002, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Akim Demaille.
+
+# The prefix `AN' is chosen after `AutoscaN'.
+
+# AN_OUTPUT(KIND, WORD, MACROS)
+# -----------------------------
+# Declare that the WORD, used as a KIND, requires triggering the MACROS.
+m4_define([AN_OUTPUT], [])
+
+
+# AN_FUNCTION(NAME, MACROS)
+# AN_HEADER(NAME, MACROS)
+# AN_IDENTIFIER(NAME, MACROS)
+# AN_LIBRARY(NAME, MACROS)
+# AN_MAKEVAR(NAME, MACROS)
+# AN_PROGRAM(NAME, MACROS)
+# ---------------------------
+# If the FUNCTION/HEADER etc. is used in the package, then the MACROS
+# should be invoked from configure.ac.
+m4_define([AN_FUNCTION],   [AN_OUTPUT([function], $@)])
+m4_define([AN_HEADER],     [AN_OUTPUT([header], $@)])
+m4_define([AN_IDENTIFIER], [AN_OUTPUT([identifier], $@)])
+m4_define([AN_LIBRARY],    [AN_OUTPUT([library], $@)])
+m4_define([AN_MAKEVAR],    [AN_OUTPUT([makevar], $@)])
+m4_define([AN_PROGRAM],    [AN_OUTPUT([program], $@)])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autotest.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autotest.m4
new file mode 100644
index 0000000..c219a01
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autotest.m4
@@ -0,0 +1,77 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Interface with Autotest.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# AC_CONFIG_TESTDIR(TEST-DIRECTORY, [AUTOTEST-PATH = TEST-DIRECTORY])
+# -------------------------------------------------------------------
+# Configure an Autotest test suite directory.  Invoke it once per dir,
+# even if there are several test suites in there.
+#
+# AUTOTEST-PATH must help the test suite to find the executables.
+# It is relative to the top level of the package, and is expanded
+# into all the build dirs of AUTOTEST-PATH, then all the src dirs.
+#
+# Do not use _ACEOF as we are being dumped into config.status via
+# an _ACEOF-heredoc.
+AC_DEFUN([AC_CONFIG_TESTDIR],
+[AC_CONFIG_COMMANDS([$1/atconfig],
+[cat >$1/atconfig <<ATEOF
+@%:@ Configurable variable values for building test suites.
+@%:@ Generated by $[0].
+@%:@ Copyright (C) m4_PACKAGE_YEAR Free Software Foundation, Inc.
+
+# The test suite will define top_srcdir=$at_top_srcdir/../.. etc.
+at_testdir='$1'
+abs_builddir='$ac_abs_builddir'
+at_srcdir='$ac_srcdir'
+abs_srcdir='$ac_abs_srcdir'
+at_top_srcdir='$ac_top_srcdir'
+abs_top_srcdir='$ac_abs_top_srcdir'
+at_top_build_prefix='$ac_top_build_prefix'
+abs_top_builddir='$ac_abs_top_builddir'
+
+# Backward compatibility with Autotest <= 2.59b:
+at_top_builddir=\$at_top_build_prefix
+
+AUTOTEST_PATH='m4_default([$2], [$1])'
+
+SHELL=\${CONFIG_SHELL-'$SHELL'}
+m4_provide_if([AC_ERLANG_PATH_ERL], [
+ERL='$ERL'
+])dnl
+m4_provide_if([AC_ERLANG_PATH_ERLC], [
+ERLC='$ERLC'
+ERLCFLAGS='$ERLCFLAGS'
+])dnl
+ATEOF
+],
+[m4_provide_if([AC_ERLANG_PATH_ERL], [ERL="$ERL"
+])m4_provide_if([AC_ERLANG_PATH_ERLC], [ERLC="$ERLC"
+ERLCFLAGS="$ERLCFLAGS"
+])])])# AC_CONFIG_TESTDIR
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoupdate.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoupdate.m4
new file mode 100644
index 0000000..5d933f8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/autoupdate.m4
@@ -0,0 +1,108 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Interface with autoupdate.
+
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2003, 2004, 2006, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+## ---------------------------------- ##
+## Macros to define obsolete macros.  ##
+## ---------------------------------- ##
+
+
+# AU_DEFINE(NAME, CODE)
+# ---------------------
+# Define the macro NAME so that it expand to CODE only when
+# autoupdate is running.  This is achieved with traces in
+# autoupdate itself, so this macro expands to nothing.
+#
+m4_define([AU_DEFINE], [])
+
+# AU_DEFUN(NAME, NEW-CODE, [MESSAGE])
+# -----------------------------------
+# Declare that the macro NAME is now obsoleted, and should be replaced
+# by NEW-CODE.  Tell the user she should run autoupdate, and when
+# autoupdate is run, emit MESSAGE as a warning and include it in
+# the updated configure.ac file.
+#
+# Also define NAME as a macro which code is NEW-CODE.
+#
+# This allows sharing the same code for both supporting obsoleted macros,
+# and to update a configure.ac.
+# See the end of `autoupdate.in' for a longer description.
+m4_define([AU_DEFUN],
+[# This is what autoupdate's m4 run will expand.  It fires
+# the warning (with _au_warn_XXX), outputs it into the
+# updated configure.ac (with AC_DIAGNOSE), and then outputs
+# the replacement expansion.
+AU_DEFINE([$1],
+[m4_ifval([$3], [_au_warn_$1([$3])AC_DIAGNOSE([obsolete], [$3])d[]nl
+])dnl
+$2])
+
+# This is an auxiliary macro that is also run when
+# autoupdate runs m4.  It simply calls m4_warning, but
+# we need a wrapper so that each warning is emitted only
+# once.  We break the quoting in m4_warning's argument in
+# order to expand this macro's arguments, not AU_DEFUN's.
+AU_DEFINE([_au_warn_$1],
+[m4_warning($][@)dnl
+m4_define([_au_warn_$1], [])])
+
+# Finally, this is the expansion that is picked up by
+# autoconf.  It tells the user to run autoupdate, and
+# then outputs the replacement expansion.  We do not care
+# about autoupdate's warning because that contains
+# information on what to do *after* running autoupdate.
+AC_DEFUN([$1],
+	 [AC_DIAGNOSE([obsolete], [The macro `$1' is obsolete.
+You should run autoupdate.])dnl
+$2])])
+
+
+# AU_ALIAS(OLD-NAME, NEW-NAME)
+# ----------------------------
+# The OLD-NAME is no longer used, just use NEW-NAME instead.  There is
+# little difference with using AU_DEFUN but the fact there is little
+# interest in running the test suite on both OLD-NAME and NEW-NAME.
+# This macro makes it possible to distinguish such cases.
+#
+# Do not use `defn' since then autoupdate would replace an old macro
+# call with the new macro body instead of the new macro call.
+#
+# Moreover, we have to take care that calls without parameters are
+# expanded to calls without parameters, not with one empty parameter.
+# This is not only an aesthetical improvement of autoupdate, it also
+# matters with poorly written macros which test for $# = 0.
+#
+m4_define([AU_ALIAS],
+[AU_DEFUN([$1], _AU_ALIAS_BODY([$], [$2]))])
+
+# The body for the AU_DEFUN above should look like:
+#	[m4_if($#, 0, [NEW-NAME], [NEW-NAME($@)])]
+# Thus the helper macro is:
+m4_define([_AU_ALIAS_BODY], [[m4_if($1#, 0, [$2], [$2($1@)])]])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/c.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/c.m4
new file mode 100644
index 0000000..233644a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/c.m4
@@ -0,0 +1,2011 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# Programming languages support.
+# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+# 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Akim Demaille, Paul Eggert,
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# Table of Contents:
+#
+# 1. Language selection
+# 2. and routines to produce programs in a given language.
+#      1a. C   2a. C
+#      1b. C++
+#      1c. Objective C
+#      1d. Objective C++
+#
+# 3. Looking for a compiler
+#    And possibly the associated preprocessor.
+#      3a. C   3b. C++   3c. Objective C   3d. Objective C++
+#
+# 4. Compilers' characteristics.
+#      4a. C
+
+
+
+## ----------------------- ##
+## 1a/2a. The C language.  ##
+## ----------------------- ##
+
+
+# ------------------------ #
+# 1a. Language selection.  #
+# ------------------------ #
+
+# AC_LANG(C)
+# ----------
+# CFLAGS is not in ac_cpp because -g, -O, etc. are not valid cpp options.
+AC_LANG_DEFINE([C], [c], [C], [CC], [],
+[ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+])
+
+
+# AC_LANG_C
+# ---------
+AU_DEFUN([AC_LANG_C], [AC_LANG(C)])
+
+
+# ------------------------ #
+# 2a. Producing programs.  #
+# ------------------------ #
+
+
+# AC_LANG_CONFTEST(C)(BODY)
+# -------------------------
+# We can't use '#line $LINENO "configure"' here, since
+# Sun c89 (Sun WorkShop 6 update 2 C 5.3 Patch 111679-08 2002/05/09)
+# rejects $LINENO greater than 32767, and some configure scripts
+# are longer than 32767 lines.
+m4_define([AC_LANG_CONFTEST(C)],
+[cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$1
+_ACEOF])
+
+
+# AC_LANG_PROGRAM(C)([PROLOGUE], [BODY])
+# --------------------------------------
+m4_define([AC_LANG_PROGRAM(C)],
+[$1
+m4_ifdef([_AC_LANG_PROGRAM_C_F77_HOOKS], [_AC_LANG_PROGRAM_C_F77_HOOKS])[]dnl
+m4_ifdef([_AC_LANG_PROGRAM_C_FC_HOOKS], [_AC_LANG_PROGRAM_C_FC_HOOKS])[]dnl
+int
+main ()
+{
+dnl Do *not* indent the following line: there may be CPP directives.
+dnl Don't move the `;' right after for the same reason.
+$2
+  ;
+  return 0;
+}])
+
+
+# _AC_LANG_IO_PROGRAM(C)
+# ----------------------
+# Produce source that performs I/O, necessary for proper
+# cross-compiler detection.
+m4_define([_AC_LANG_IO_PROGRAM(C)],
+[AC_LANG_PROGRAM([@%:@include <stdio.h>],
+[FILE *f = fopen ("conftest.out", "w");
+ return ferror (f) || fclose (f) != 0;
+])])
+
+
+# AC_LANG_CALL(C)(PROLOGUE, FUNCTION)
+# -----------------------------------
+# Avoid conflicting decl of main.
+m4_define([AC_LANG_CALL(C)],
+[AC_LANG_PROGRAM([$1
+m4_if([$2], [main], ,
+[/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char $2 ();])], [return $2 ();])])
+
+
+# AC_LANG_FUNC_LINK_TRY(C)(FUNCTION)
+# ----------------------------------
+# Don't include <ctype.h> because on OSF/1 3.0 it includes
+# <sys/types.h> which includes <sys/select.h> which contains a
+# prototype for select.  Similarly for bzero.
+#
+# This test used to merely assign f=$1 in main(), but that was
+# optimized away by HP unbundled cc A.05.36 for ia64 under +O3,
+# presumably on the basis that there's no need to do that store if the
+# program is about to exit.  Conversely, the AIX linker optimizes an
+# unused external declaration that initializes f=$1.  So this test
+# program has both an external initialization of f, and a use of f in
+# main that affects the exit status.
+#
+m4_define([AC_LANG_FUNC_LINK_TRY(C)],
+[AC_LANG_PROGRAM(
+[/* Define $1 to an innocuous variant, in case <limits.h> declares $1.
+   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
+#define $1 innocuous_$1
+
+/* System header to define __stub macros and hopefully few prototypes,
+    which can conflict with char $1 (); below.
+    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+    <limits.h> exists even on freestanding compilers.  */
+
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+
+#undef $1
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char $1 ();
+/* The GNU C library defines this for functions which it implements
+    to always fail with ENOSYS.  Some functions are actually named
+    something starting with __ and the normal name is an alias.  */
+#if defined __stub_$1 || defined __stub___$1
+choke me
+#endif
+], [return $1 ();])])
+
+
+# AC_LANG_BOOL_COMPILE_TRY(C)(PROLOGUE, EXPRESSION)
+# -------------------------------------------------
+# Return a program that is valid if EXPRESSION is nonzero.
+# EXPRESSION must be an integer constant expression.
+# Be sure to use this array to avoid `unused' warnings, which are even
+# errors with `-W error'.
+m4_define([AC_LANG_BOOL_COMPILE_TRY(C)],
+[AC_LANG_PROGRAM([$1], [static int test_array @<:@1 - 2 * !($2)@:>@;
+test_array @<:@0@:>@ = 0
+])])
+
+
+# AC_LANG_INT_SAVE(C)(PROLOGUE, EXPRESSION)
+# -----------------------------------------
+# We need `stdio.h' to open a `FILE' and `stdlib.h' for `exit'.
+# But we include them only after the EXPRESSION has been evaluated.
+m4_define([AC_LANG_INT_SAVE(C)],
+[AC_LANG_PROGRAM([$1
+static long int longval () { return $2; }
+static unsigned long int ulongval () { return $2; }
+@%:@include <stdio.h>
+@%:@include <stdlib.h>],
+[
+  FILE *f = fopen ("conftest.val", "w");
+  if (! f)
+    return 1;
+  if (($2) < 0)
+    {
+      long int i = longval ();
+      if (i != ($2))
+	return 1;
+      fprintf (f, "%ld", i);
+    }
+  else
+    {
+      unsigned long int i = ulongval ();
+      if (i != ($2))
+	return 1;
+      fprintf (f, "%lu", i);
+    }
+  /* Do not output a trailing newline, as this causes \r\n confusion
+     on some platforms.  */
+  return ferror (f) || fclose (f) != 0;
+])])
+
+
+
+## ---------------------- ##
+## 1b. The C++ language.  ##
+## ---------------------- ##
+
+
+# AC_LANG(C++)
+# ------------
+# CXXFLAGS is not in ac_cpp because -g, -O, etc. are not valid cpp options.
+AC_LANG_DEFINE([C++], [cxx], [CXX], [CXX], [C],
+[ac_ext=cpp
+ac_cpp='$CXXCPP $CPPFLAGS'
+ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
+])
+
+
+# AC_LANG_CPLUSPLUS
+# -----------------
+AU_DEFUN([AC_LANG_CPLUSPLUS], [AC_LANG(C++)])
+
+
+
+## ------------------------------ ##
+## 1c. The Objective C language.  ##
+## ------------------------------ ##
+
+
+# AC_LANG(Objective C)
+# --------------------
+AC_LANG_DEFINE([Objective C], [objc], [OBJC], [OBJC], [C],
+[ac_ext=m
+ac_cpp='$OBJCPP $CPPFLAGS'
+ac_compile='$OBJC -c $OBJCFLAGS $CPPFLAGS conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$OBJC -o conftest$ac_exeext $OBJCFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_objc_compiler_gnu
+])
+
+
+# AC_LANG_OBJC
+# ------------
+AU_DEFUN([AC_LANG_OBJC], [AC_LANG(Objective C)])
+
+
+
+## -------------------------------- ##
+## 1d. The Objective C++ language.  ##
+## -------------------------------- ##
+
+
+# AC_LANG(Objective C++)
+# ----------------------
+AC_LANG_DEFINE([Objective C++], [objcxx], [OBJCXX], [OBJCXX], [C++],
+[ac_ext=mm
+ac_cpp='$OBJCXXCPP $CPPFLAGS'
+ac_compile='$OBJCXX -c $OBJCXXFLAGS $CPPFLAGS conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$OBJCXX -o conftest$ac_exeext $OBJCXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_objcxx_compiler_gnu
+])
+
+
+
+## -------------------------------------------- ##
+## 3. Looking for Compilers and Preprocessors.  ##
+## -------------------------------------------- ##
+
+# -------------------- #
+# 3a. The C compiler.  #
+# -------------------- #
+
+
+# _AC_ARG_VAR_CPPFLAGS
+# --------------------
+# Document and register CPPFLAGS, which is used by
+# AC_PROG_{CC, CPP, CXX, CXXCPP, OBJC, OBJCPP, OBJCXX, OBJCXXCPP}.
+AC_DEFUN([_AC_ARG_VAR_CPPFLAGS],
+[AC_ARG_VAR([CPPFLAGS],
+	    [(Objective) C/C++ preprocessor flags, e.g. -I<include dir>
+	     if you have headers in a nonstandard directory <include dir>])])
+
+
+# _AC_ARG_VAR_LDFLAGS
+# -------------------
+# Document and register LDFLAGS, which is used by
+# AC_PROG_{CC, CXX, F77, FC, OBJC, OBJCXX}.
+AC_DEFUN([_AC_ARG_VAR_LDFLAGS],
+[AC_ARG_VAR([LDFLAGS],
+	    [linker flags, e.g. -L<lib dir> if you have libraries in a
+	     nonstandard directory <lib dir>])])
+
+
+# _AC_ARG_VAR_LIBS
+# ----------------
+# Document and register LIBS, which is used by
+# AC_PROG_{CC, CXX, F77, FC, OBJC, OBJCXX}.
+AC_DEFUN([_AC_ARG_VAR_LIBS],
+[AC_ARG_VAR([LIBS],
+	    [libraries to pass to the linker, e.g. -l<library>])])
+
+
+# AC_LANG_PREPROC(C)
+# ------------------
+# Find the C preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(C)],
+[AC_REQUIRE([AC_PROG_CPP])])
+
+
+# _AC_PROG_PREPROC_WORKS_IFELSE(IF-WORKS, IF-NOT)
+# -----------------------------------------------
+# Check if $ac_cpp is a working preprocessor that can flag absent
+# includes either by the exit status or by warnings.
+# This macro is for all languages, not only C.
+AC_DEFUN([_AC_PROG_PREPROC_WORKS_IFELSE],
+[ac_preproc_ok=false
+for ac_[]_AC_LANG_ABBREV[]_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  _AC_PREPROC_IFELSE([AC_LANG_SOURCE([[@%:@ifdef __STDC__
+@%:@ include <limits.h>
+@%:@else
+@%:@ include <assert.h>
+@%:@endif
+		     Syntax error]])],
+		     [],
+		     [# Broken: fails on valid input.
+continue])
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  _AC_PREPROC_IFELSE([AC_LANG_SOURCE([[@%:@include <ac_nonexistent.h>]])],
+		     [# Broken: success on invalid input.
+continue],
+		     [# Passes both tests.
+ac_preproc_ok=:
+break])
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+AS_IF([$ac_preproc_ok], [$1], [$2])
+])# _AC_PROG_PREPROC_WORKS_IFELSE
+
+
+# AC_PROG_CPP
+# -----------
+# Find a working C preprocessor.
+# We shouldn't have to require AC_PROG_CC, but this is due to the concurrency
+# between the AC_LANG_COMPILER_REQUIRE family and that of AC_PROG_CC.
+AN_MAKEVAR([CPP], [AC_PROG_CPP])
+AN_PROGRAM([cpp], [AC_PROG_CPP])
+AC_DEFUN([AC_PROG_CPP],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_ARG_VAR([CPP],      [C preprocessor])dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+AC_LANG_PUSH(C)dnl
+AC_MSG_CHECKING([how to run the C preprocessor])
+# On Suns, sometimes $CPP names a directory.
+if test -n "$CPP" && test -d "$CPP"; then
+  CPP=
+fi
+if test -z "$CPP"; then
+  AC_CACHE_VAL([ac_cv_prog_CPP],
+  [dnl
+    # Double quotes because CPP needs to be expanded
+    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
+    do
+      _AC_PROG_PREPROC_WORKS_IFELSE([break])
+    done
+    ac_cv_prog_CPP=$CPP
+  ])dnl
+  CPP=$ac_cv_prog_CPP
+else
+  ac_cv_prog_CPP=$CPP
+fi
+AC_MSG_RESULT([$CPP])
+_AC_PROG_PREPROC_WORKS_IFELSE([],
+		[AC_MSG_FAILURE([C preprocessor "$CPP" fails sanity check])])
+AC_SUBST(CPP)dnl
+AC_LANG_POP(C)dnl
+])# AC_PROG_CPP
+
+# AC_PROG_CPP_WERROR
+# ------------------
+# Treat warnings from the preprocessor as errors.
+AC_DEFUN([AC_PROG_CPP_WERROR],
+[AC_REQUIRE([AC_PROG_CPP])dnl
+ac_c_preproc_warn_flag=yes])# AC_PROG_CPP_WERROR
+
+# AC_LANG_COMPILER(C)
+# -------------------
+# Find the C compiler.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(C)],
+[AC_REQUIRE([AC_PROG_CC])])
+
+
+# ac_cv_prog_gcc
+# --------------
+# We used to name the cache variable this way.
+AU_DEFUN([ac_cv_prog_gcc],
+[ac_cv_c_compiler_gnu])
+
+
+# AC_PROG_CC([COMPILER ...])
+# --------------------------
+# COMPILER ... is a space separated list of C compilers to search for.
+# This just gives the user an opportunity to specify an alternative
+# search list for the C compiler.
+AN_MAKEVAR([CC],  [AC_PROG_CC])
+AN_PROGRAM([cc],  [AC_PROG_CC])
+AN_PROGRAM([gcc], [AC_PROG_CC])
+AC_DEFUN([AC_PROG_CC],
+[AC_LANG_PUSH(C)dnl
+AC_ARG_VAR([CC],     [C compiler command])dnl
+AC_ARG_VAR([CFLAGS], [C compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+m4_ifval([$1],
+      [AC_CHECK_TOOLS(CC, [$1])],
+[AC_CHECK_TOOL(CC, gcc)
+if test -z "$CC"; then
+  dnl Here we want:
+  dnl	AC_CHECK_TOOL(CC, cc)
+  dnl but without the check for a tool without the prefix.
+  dnl Until the check is removed from there, copy the code:
+  if test -n "$ac_tool_prefix"; then
+    AC_CHECK_PROG(CC, [${ac_tool_prefix}cc], [${ac_tool_prefix}cc])
+  fi
+fi
+if test -z "$CC"; then
+  AC_CHECK_PROG(CC, cc, cc, , , /usr/ucb/cc)
+fi
+if test -z "$CC"; then
+  AC_CHECK_TOOLS(CC, cl.exe)
+fi
+])
+
+test -z "$CC" && AC_MSG_FAILURE([no acceptable C compiler found in \$PATH])
+
+# Provide some information about the compiler.
+_AS_ECHO_LOG([checking for _AC_LANG compiler version])
+set X $ac_compile
+ac_compiler=$[2]
+for ac_option in --version -v -V -qversion; do
+  _AC_DO_LIMIT([$ac_compiler $ac_option >&AS_MESSAGE_LOG_FD])
+done
+
+m4_expand_once([_AC_COMPILER_EXEEXT])[]dnl
+m4_expand_once([_AC_COMPILER_OBJEXT])[]dnl
+_AC_LANG_COMPILER_GNU
+if test $ac_compiler_gnu = yes; then
+  GCC=yes
+else
+  GCC=
+fi
+_AC_PROG_CC_G
+_AC_PROG_CC_C89
+AC_LANG_POP(C)dnl
+])# AC_PROG_CC
+
+
+# _AC_PROG_CC_G
+# -------------
+# Check whether -g works, even if CFLAGS is set, in case the package
+# plays around with CFLAGS (such as to build both debugging and normal
+# versions of a library), tasteless as that idea is.
+# Don't consider -g to work if it generates warnings when plain compiles don't.
+m4_define([_AC_PROG_CC_G],
+[ac_test_CFLAGS=${CFLAGS+set}
+ac_save_CFLAGS=$CFLAGS
+AC_CACHE_CHECK(whether $CC accepts -g, ac_cv_prog_cc_g,
+  [ac_save_c_werror_flag=$ac_c_werror_flag
+   ac_c_werror_flag=yes
+   ac_cv_prog_cc_g=no
+   CFLAGS="-g"
+   _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+     [ac_cv_prog_cc_g=yes],
+     [CFLAGS=""
+      _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	[],
+	[ac_c_werror_flag=$ac_save_c_werror_flag
+	 CFLAGS="-g"
+	 _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	   [ac_cv_prog_cc_g=yes])])])
+   ac_c_werror_flag=$ac_save_c_werror_flag])
+if test "$ac_test_CFLAGS" = set; then
+  CFLAGS=$ac_save_CFLAGS
+elif test $ac_cv_prog_cc_g = yes; then
+  if test "$GCC" = yes; then
+    CFLAGS="-g -O2"
+  else
+    CFLAGS="-g"
+  fi
+else
+  if test "$GCC" = yes; then
+    CFLAGS="-O2"
+  else
+    CFLAGS=
+  fi
+fi[]dnl
+])# _AC_PROG_CC_G
+
+
+# AC_PROG_GCC_TRADITIONAL
+# -----------------------
+AC_DEFUN([AC_PROG_GCC_TRADITIONAL],
+[AC_REQUIRE([AC_PROG_CC])dnl
+if test $ac_cv_c_compiler_gnu = yes; then
+    AC_CACHE_CHECK(whether $CC needs -traditional,
+      ac_cv_prog_gcc_traditional,
+[  ac_pattern="Autoconf.*'x'"
+  AC_EGREP_CPP($ac_pattern, [#include <sgtty.h>
+Autoconf TIOCGETP],
+  ac_cv_prog_gcc_traditional=yes, ac_cv_prog_gcc_traditional=no)
+
+  if test $ac_cv_prog_gcc_traditional = no; then
+    AC_EGREP_CPP($ac_pattern, [#include <termio.h>
+Autoconf TCGETA],
+    ac_cv_prog_gcc_traditional=yes)
+  fi])
+  if test $ac_cv_prog_gcc_traditional = yes; then
+    CC="$CC -traditional"
+  fi
+fi
+])# AC_PROG_GCC_TRADITIONAL
+
+
+# AC_PROG_CC_C_O
+# --------------
+AC_DEFUN([AC_PROG_CC_C_O],
+[AC_REQUIRE([AC_PROG_CC])dnl
+if test "x$CC" != xcc; then
+  AC_MSG_CHECKING([whether $CC and cc understand -c and -o together])
+else
+  AC_MSG_CHECKING([whether cc understands -c and -o together])
+fi
+set dummy $CC; ac_cc=`AS_ECHO(["$[2]"]) |
+		      sed 's/[[^a-zA-Z0-9_]]/_/g;s/^[[0-9]]/_/'`
+AC_CACHE_VAL(ac_cv_prog_cc_${ac_cc}_c_o,
+[AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+# Make sure it works both with $CC and with simple cc.
+# We do the test twice because some compilers refuse to overwrite an
+# existing .o file with -o, though they will create one.
+ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&AS_MESSAGE_LOG_FD'
+rm -f conftest2.*
+if _AC_DO_VAR(ac_try) &&
+   test -f conftest2.$ac_objext && _AC_DO_VAR(ac_try);
+then
+  eval ac_cv_prog_cc_${ac_cc}_c_o=yes
+  if test "x$CC" != xcc; then
+    # Test first that cc exists at all.
+    if _AC_DO_TOKENS(cc -c conftest.$ac_ext >&AS_MESSAGE_LOG_FD); then
+      ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&AS_MESSAGE_LOG_FD'
+      rm -f conftest2.*
+      if _AC_DO_VAR(ac_try) &&
+	 test -f conftest2.$ac_objext && _AC_DO_VAR(ac_try);
+      then
+	# cc works too.
+	:
+      else
+	# cc exists but doesn't like -o.
+	eval ac_cv_prog_cc_${ac_cc}_c_o=no
+      fi
+    fi
+  fi
+else
+  eval ac_cv_prog_cc_${ac_cc}_c_o=no
+fi
+rm -f core conftest*
+])dnl
+if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then
+  AC_MSG_RESULT([yes])
+else
+  AC_MSG_RESULT([no])
+  AC_DEFINE(NO_MINUS_C_MINUS_O, 1,
+	   [Define to 1 if your C compiler doesn't accept -c and -o together.])
+fi
+])# AC_PROG_CC_C_O
+
+
+
+# ---------------------- #
+# 3b. The C++ compiler.  #
+# ---------------------- #
+
+
+# AC_LANG_PREPROC(C++)
+# --------------------
+# Find the C++ preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(C++)],
+[AC_REQUIRE([AC_PROG_CXXCPP])])
+
+
+# AC_PROG_CXXCPP
+# --------------
+# Find a working C++ preprocessor.
+# We shouldn't have to require AC_PROG_CC, but this is due to the concurrency
+# between the AC_LANG_COMPILER_REQUIRE family and that of AC_PROG_CXX.
+AC_DEFUN([AC_PROG_CXXCPP],
+[AC_REQUIRE([AC_PROG_CXX])dnl
+AC_ARG_VAR([CXXCPP],   [C++ preprocessor])dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+AC_LANG_PUSH(C++)dnl
+AC_MSG_CHECKING([how to run the C++ preprocessor])
+if test -z "$CXXCPP"; then
+  AC_CACHE_VAL(ac_cv_prog_CXXCPP,
+  [dnl
+    # Double quotes because CXXCPP needs to be expanded
+    for CXXCPP in "$CXX -E" "/lib/cpp"
+    do
+      _AC_PROG_PREPROC_WORKS_IFELSE([break])
+    done
+    ac_cv_prog_CXXCPP=$CXXCPP
+  ])dnl
+  CXXCPP=$ac_cv_prog_CXXCPP
+else
+  ac_cv_prog_CXXCPP=$CXXCPP
+fi
+AC_MSG_RESULT([$CXXCPP])
+_AC_PROG_PREPROC_WORKS_IFELSE([],
+	  [AC_MSG_FAILURE([C++ preprocessor "$CXXCPP" fails sanity check])])
+AC_SUBST(CXXCPP)dnl
+AC_LANG_POP(C++)dnl
+])# AC_PROG_CXXCPP
+
+
+# AC_LANG_COMPILER(C++)
+# ---------------------
+# Find the C++ compiler.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(C++)],
+[AC_REQUIRE([AC_PROG_CXX])])
+
+
+# ac_cv_prog_gxx
+# --------------
+# We used to name the cache variable this way.
+AU_DEFUN([ac_cv_prog_gxx],
+[ac_cv_cxx_compiler_gnu])
+
+
+# AC_PROG_CXX([LIST-OF-COMPILERS])
+# --------------------------------
+# LIST-OF-COMPILERS is a space separated list of C++ compilers to search
+# for (if not specified, a default list is used).  This just gives the
+# user an opportunity to specify an alternative search list for the C++
+# compiler.
+# aCC	HP-UX C++ compiler much better than `CC', so test before.
+# FCC   Fujitsu C++ compiler
+# KCC	KAI C++ compiler
+# RCC	Rational C++
+# xlC_r	AIX C Set++ (with support for reentrant code)
+# xlC	AIX C Set++
+AN_MAKEVAR([CXX],  [AC_PROG_CXX])
+AN_PROGRAM([CC],   [AC_PROG_CXX])
+AN_PROGRAM([c++],  [AC_PROG_CXX])
+AN_PROGRAM([g++],  [AC_PROG_CXX])
+AC_DEFUN([AC_PROG_CXX],
+[AC_LANG_PUSH(C++)dnl
+AC_ARG_VAR([CXX],      [C++ compiler command])dnl
+AC_ARG_VAR([CXXFLAGS], [C++ compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+_AC_ARG_VAR_PRECIOUS([CCC])dnl
+if test -z "$CXX"; then
+  if test -n "$CCC"; then
+    CXX=$CCC
+  else
+    AC_CHECK_TOOLS(CXX,
+		   [m4_default([$1],
+			       [g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC])],
+		   g++)
+  fi
+fi
+# Provide some information about the compiler.
+_AS_ECHO_LOG([checking for _AC_LANG compiler version])
+set X $ac_compile
+ac_compiler=$[2]
+for ac_option in --version -v -V -qversion; do
+  _AC_DO_LIMIT([$ac_compiler $ac_option >&AS_MESSAGE_LOG_FD])
+done
+
+m4_expand_once([_AC_COMPILER_EXEEXT])[]dnl
+m4_expand_once([_AC_COMPILER_OBJEXT])[]dnl
+_AC_LANG_COMPILER_GNU
+if test $ac_compiler_gnu = yes; then
+  GXX=yes
+else
+  GXX=
+fi
+_AC_PROG_CXX_G
+AC_LANG_POP(C++)dnl
+])# AC_PROG_CXX
+
+
+# _AC_PROG_CXX_G
+# --------------
+# Check whether -g works, even if CXXFLAGS is set, in case the package
+# plays around with CXXFLAGS (such as to build both debugging and
+# normal versions of a library), tasteless as that idea is.
+# Don't consider -g to work if it generates warnings when plain compiles don't.
+m4_define([_AC_PROG_CXX_G],
+[ac_test_CXXFLAGS=${CXXFLAGS+set}
+ac_save_CXXFLAGS=$CXXFLAGS
+AC_CACHE_CHECK(whether $CXX accepts -g, ac_cv_prog_cxx_g,
+  [ac_save_cxx_werror_flag=$ac_cxx_werror_flag
+   ac_cxx_werror_flag=yes
+   ac_cv_prog_cxx_g=no
+   CXXFLAGS="-g"
+   _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+     [ac_cv_prog_cxx_g=yes],
+     [CXXFLAGS=""
+      _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	[],
+	[ac_cxx_werror_flag=$ac_save_cxx_werror_flag
+	 CXXFLAGS="-g"
+	 _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	   [ac_cv_prog_cxx_g=yes])])])
+   ac_cxx_werror_flag=$ac_save_cxx_werror_flag])
+if test "$ac_test_CXXFLAGS" = set; then
+  CXXFLAGS=$ac_save_CXXFLAGS
+elif test $ac_cv_prog_cxx_g = yes; then
+  if test "$GXX" = yes; then
+    CXXFLAGS="-g -O2"
+  else
+    CXXFLAGS="-g"
+  fi
+else
+  if test "$GXX" = yes; then
+    CXXFLAGS="-O2"
+  else
+    CXXFLAGS=
+  fi
+fi[]dnl
+])# _AC_PROG_CXX_G
+
+
+# AC_PROG_CXX_C_O
+# ---------------
+# Test if the C++ compiler accepts the options `-c' and `-o'
+# simultaneously, and define `CXX_NO_MINUS_C_MINUS_O' if it does not.
+AC_DEFUN([AC_PROG_CXX_C_O],
+[AC_REQUIRE([AC_PROG_CXX])dnl
+AC_LANG_PUSH([C++])dnl
+AC_CACHE_CHECK([whether $CXX understands -c and -o together],
+	       [ac_cv_prog_cxx_c_o],
+[AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+# We test twice because some compilers refuse to overwrite an existing
+# `.o' file with `-o', although they will create one.
+ac_try='$CXX $CXXFLAGS -c conftest.$ac_ext -o conftest2.$ac_objext >&AS_MESSAGE_LOG_FD'
+rm -f conftest2.*
+if _AC_DO_VAR(ac_try) &&
+     test -f conftest2.$ac_objext &&
+     _AC_DO_VAR(ac_try); then
+  ac_cv_prog_cxx_c_o=yes
+else
+  ac_cv_prog_cxx_c_o=no
+fi
+rm -f conftest*])
+if test $ac_cv_prog_cxx_c_o = no; then
+  AC_DEFINE(CXX_NO_MINUS_C_MINUS_O, 1,
+	    [Define to 1 if your C++ compiler doesn't accept
+	     -c and -o together.])
+fi
+AC_LANG_POP([C++])dnl
+])# AC_PROG_CXX_C_O
+
+
+
+# ------------------------------ #
+# 3c. The Objective C compiler.  #
+# ------------------------------ #
+
+
+# AC_LANG_PREPROC(Objective C)
+# ----------------------------
+# Find the Objective C preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(Objective C)],
+[AC_REQUIRE([AC_PROG_OBJCPP])])
+
+
+# AC_PROG_OBJCPP
+# --------------
+# Find a working Objective C preprocessor.
+AC_DEFUN([AC_PROG_OBJCPP],
+[AC_REQUIRE([AC_PROG_OBJC])dnl
+AC_ARG_VAR([OBJCPP],   [Objective C preprocessor])dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+AC_LANG_PUSH(Objective C)dnl
+AC_MSG_CHECKING([how to run the Objective C preprocessor])
+if test -z "$OBJCPP"; then
+  AC_CACHE_VAL(ac_cv_prog_OBJCPP,
+  [dnl
+    # Double quotes because OBJCPP needs to be expanded
+    for OBJCPP in "$OBJC -E" "/lib/cpp"
+    do
+      _AC_PROG_PREPROC_WORKS_IFELSE([break])
+    done
+    ac_cv_prog_OBJCPP=$OBJCPP
+  ])dnl
+  OBJCPP=$ac_cv_prog_OBJCPP
+else
+  ac_cv_prog_OBJCPP=$OBJCPP
+fi
+AC_MSG_RESULT([$OBJCPP])
+_AC_PROG_PREPROC_WORKS_IFELSE([],
+	  [AC_MSG_FAILURE([Objective C preprocessor "$OBJCPP" fails sanity check])])
+AC_SUBST(OBJCPP)dnl
+AC_LANG_POP(Objective C)dnl
+])# AC_PROG_OBJCPP
+
+
+# AC_LANG_COMPILER(Objective C)
+# -----------------------------
+# Find the Objective C compiler.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(Objective C)],
+[AC_REQUIRE([AC_PROG_OBJC])])
+
+
+
+# AC_PROG_OBJC([LIST-OF-COMPILERS])
+# ---------------------------------
+# LIST-OF-COMPILERS is a space separated list of Objective C compilers to
+# search for (if not specified, a default list is used).  This just gives
+# the user an opportunity to specify an alternative search list for the
+# Objective C compiler.
+# objcc StepStone Objective-C compiler (also "standard" name for OBJC)
+# objc  David Stes' POC.  If you installed this, you likely want it.
+# cc    Native C compiler (for instance, Apple).
+# CC    You never know.
+AN_MAKEVAR([OBJC],  [AC_PROG_OBJC])
+AN_PROGRAM([objcc],  [AC_PROG_OBJC])
+AN_PROGRAM([objc],  [AC_PROG_OBJC])
+AC_DEFUN([AC_PROG_OBJC],
+[AC_LANG_PUSH(Objective C)dnl
+AC_ARG_VAR([OBJC],      [Objective C compiler command])dnl
+AC_ARG_VAR([OBJCFLAGS], [Objective C compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+_AC_ARG_VAR_PRECIOUS([OBJC])dnl
+AC_CHECK_TOOLS(OBJC,
+	       [m4_default([$1], [gcc objcc objc cc CC])],
+	       gcc)
+# Provide some information about the compiler.
+_AS_ECHO_LOG([checking for _AC_LANG compiler version])
+set X $ac_compile
+ac_compiler=$[2]
+for ac_option in --version -v -V -qversion; do
+  _AC_DO_LIMIT([$ac_compiler $ac_option >&AS_MESSAGE_LOG_FD])
+done
+
+m4_expand_once([_AC_COMPILER_EXEEXT])[]dnl
+m4_expand_once([_AC_COMPILER_OBJEXT])[]dnl
+_AC_LANG_COMPILER_GNU
+if test $ac_compiler_gnu = yes; then
+  GOBJC=yes
+else
+  GOBJC=
+fi
+_AC_PROG_OBJC_G
+AC_LANG_POP(Objective C)dnl
+])# AC_PROG_OBJC
+
+
+# _AC_PROG_OBJC_G
+# ---------------
+# Check whether -g works, even if OBJCFLAGS is set, in case the package
+# plays around with OBJCFLAGS (such as to build both debugging and
+# normal versions of a library), tasteless as that idea is.
+# Don't consider -g to work if it generates warnings when plain compiles don't.
+m4_define([_AC_PROG_OBJC_G],
+[ac_test_OBJCFLAGS=${OBJCFLAGS+set}
+ac_save_OBJCFLAGS=$OBJCFLAGS
+AC_CACHE_CHECK(whether $OBJC accepts -g, ac_cv_prog_objc_g,
+  [ac_save_objc_werror_flag=$ac_objc_werror_flag
+   ac_objc_werror_flag=yes
+   ac_cv_prog_objc_g=no
+   OBJCFLAGS="-g"
+   _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+     [ac_cv_prog_objc_g=yes],
+     [OBJCFLAGS=""
+      _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	[],
+	[ac_objc_werror_flag=$ac_save_objc_werror_flag
+	 OBJCFLAGS="-g"
+	 _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	   [ac_cv_prog_objc_g=yes])])])
+   ac_objc_werror_flag=$ac_save_objc_werror_flag])
+if test "$ac_test_OBJCFLAGS" = set; then
+  OBJCFLAGS=$ac_save_OBJCFLAGS
+elif test $ac_cv_prog_objc_g = yes; then
+  if test "$GOBJC" = yes; then
+    OBJCFLAGS="-g -O2"
+  else
+    OBJCFLAGS="-g"
+  fi
+else
+  if test "$GOBJC" = yes; then
+    OBJCFLAGS="-O2"
+  else
+    OBJCFLAGS=
+  fi
+fi[]dnl
+])# _AC_PROG_OBJC_G
+
+
+
+# -------------------------------- #
+# 3d. The Objective C++ compiler.  #
+# -------------------------------- #
+
+
+# AC_LANG_PREPROC(Objective C++)
+# ------------------------------
+# Find the Objective C++ preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(Objective C++)],
+[AC_REQUIRE([AC_PROG_OBJCXXCPP])])
+
+
+# AC_PROG_OBJCXXCPP
+# -----------------
+# Find a working Objective C++ preprocessor.
+AC_DEFUN([AC_PROG_OBJCXXCPP],
+[AC_REQUIRE([AC_PROG_OBJCXX])dnl
+AC_ARG_VAR([OBJCXXCPP],   [Objective C++ preprocessor])dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+AC_LANG_PUSH(Objective C++)dnl
+AC_MSG_CHECKING([how to run the Objective C++ preprocessor])
+if test -z "$OBJCXXCPP"; then
+  AC_CACHE_VAL(ac_cv_prog_OBJCXXCPP,
+  [dnl
+    # Double quotes because OBJCXXCPP needs to be expanded
+    for OBJCXXCPP in "$OBJCXX -E" "/lib/cpp"
+    do
+      _AC_PROG_PREPROC_WORKS_IFELSE([break])
+    done
+    ac_cv_prog_OBJCXXCPP=$OBJCXXCPP
+  ])dnl
+  OBJCXXCPP=$ac_cv_prog_OBJCXXCPP
+else
+  ac_cv_prog_OBJCXXCPP=$OBJCXXCPP
+fi
+AC_MSG_RESULT([$OBJCXXCPP])
+_AC_PROG_PREPROC_WORKS_IFELSE([],
+	  [AC_MSG_FAILURE([Objective C++ preprocessor "$OBJCXXCPP" fails sanity check])])
+AC_SUBST(OBJCXXCPP)dnl
+AC_LANG_POP(Objective C++)dnl
+])# AC_PROG_OBJCXXCPP
+
+
+# AC_LANG_COMPILER(Objective C++)
+# -------------------------------
+# Find the Objective C++ compiler.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(Objective C++)],
+[AC_REQUIRE([AC_PROG_OBJCXX])])
+
+
+
+# AC_PROG_OBJCXX([LIST-OF-COMPILERS])
+# -----------------------------------
+# LIST-OF-COMPILERS is a space separated list of Objective C++ compilers to
+# search for (if not specified, a default list is used).  This just gives
+# the user an opportunity to specify an alternative search list for the
+# Objective C++ compiler.
+# FIXME: this list is pure guesswork
+# objc++ StepStone Objective-C++ compiler (also "standard" name for OBJCXX)
+# objcxx David Stes' POC.  If you installed this, you likely want it.
+# c++    Native C++ compiler (for instance, Apple).
+# CXX    You never know.
+AN_MAKEVAR([OBJCXX],  [AC_PROG_OBJCXX])
+AN_PROGRAM([objcxx],  [AC_PROG_OBJCXX])
+AC_DEFUN([AC_PROG_OBJCXX],
+[AC_LANG_PUSH(Objective C++)dnl
+AC_ARG_VAR([OBJCXX],      [Objective C++ compiler command])dnl
+AC_ARG_VAR([OBJCXXFLAGS], [Objective C++ compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_ARG_VAR_CPPFLAGS()dnl
+_AC_ARG_VAR_PRECIOUS([OBJCXX])dnl
+AC_CHECK_TOOLS(OBJCXX,
+	       [m4_default([$1], [g++ objc++ objcxx c++ CXX])],
+	       g++)
+# Provide some information about the compiler.
+_AS_ECHO_LOG([checking for _AC_LANG compiler version])
+set X $ac_compile
+ac_compiler=$[2]
+for ac_option in --version -v -V -qversion; do
+  _AC_DO_LIMIT([$ac_compiler $ac_option >&AS_MESSAGE_LOG_FD])
+done
+
+m4_expand_once([_AC_COMPILER_EXEEXT])[]dnl
+m4_expand_once([_AC_COMPILER_OBJEXT])[]dnl
+_AC_LANG_COMPILER_GNU
+if test $ac_compiler_gnu = yes; then
+  GOBJCXX=yes
+else
+  GOBJCXX=
+fi
+_AC_PROG_OBJCXX_G
+AC_LANG_POP(Objective C++)dnl
+])# AC_PROG_OBJCXX
+
+
+# _AC_PROG_OBJCXX_G
+# -----------------
+# Check whether -g works, even if OBJCFLAGS is set, in case the package
+# plays around with OBJCFLAGS (such as to build both debugging and
+# normal versions of a library), tasteless as that idea is.
+# Don't consider -g to work if it generates warnings when plain compiles don't.
+m4_define([_AC_PROG_OBJCXX_G],
+[ac_test_OBJCXXFLAGS=${OBJCXXFLAGS+set}
+ac_save_OBJCXXFLAGS=$OBJCXXFLAGS
+AC_CACHE_CHECK(whether $OBJCXX accepts -g, ac_cv_prog_objcxx_g,
+  [ac_save_objcxx_werror_flag=$ac_objcxx_werror_flag
+   ac_objcxx_werror_flag=yes
+   ac_cv_prog_objcxx_g=no
+   OBJCXXFLAGS="-g"
+   _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+     [ac_cv_prog_objcxx_g=yes],
+     [OBJCXXFLAGS=""
+      _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	[],
+	[ac_objcxx_werror_flag=$ac_save_objcxx_werror_flag
+	 OBJCXXFLAGS="-g"
+	 _AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+	   [ac_cv_prog_objcxx_g=yes])])])
+   ac_objcxx_werror_flag=$ac_save_objcx_werror_flag])
+if test "$ac_test_OBJCXXFLAGS" = set; then
+  OBJCXXFLAGS=$ac_save_OBJCXXFLAGS
+elif test $ac_cv_prog_objcxx_g = yes; then
+  if test "$GOBJCXX" = yes; then
+    OBJCXXFLAGS="-g -O2"
+  else
+    OBJCXXFLAGS="-g"
+  fi
+else
+  if test "$GOBJCXX" = yes; then
+    OBJCXXFLAGS="-O2"
+  else
+    OBJCXXFLAGS=
+  fi
+fi[]dnl
+])# _AC_PROG_OBJCXX_G
+
+
+
+## ------------------------------- ##
+## 4. Compilers' characteristics.  ##
+## ------------------------------- ##
+
+# -------------------------------- #
+# 4a. C compiler characteristics.  #
+# -------------------------------- #
+
+
+# _AC_PROG_CC_C89 ([ACTION-IF-AVAILABLE], [ACTION-IF-UNAVAILABLE])
+# ----------------------------------------------------------------
+# If the C compiler is not in ANSI C89 (ISO C90) mode by default, try
+# to add an option to output variable CC to make it so.  This macro
+# tries various options that select ANSI C89 on some system or
+# another.  It considers the compiler to be in ANSI C89 mode if it
+# handles function prototypes correctly.
+AC_DEFUN([_AC_PROG_CC_C89],
+[_AC_C_STD_TRY([c89],
+[[#include <stdarg.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+     char **p;
+     int i;
+{
+  return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+  char *s;
+  va_list v;
+  va_start (v,p);
+  s = g (p, va_arg (v,int));
+  va_end (v);
+  return s;
+}
+
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
+   function prototypes and stuff, but not '\xHH' hex character constants.
+   These don't provoke an error unfortunately, instead are silently treated
+   as 'x'.  The following induces an error, until -std is added to get
+   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
+   array size at least.  It's necessary to write '\x00'==0 to get something
+   that's true only with -std.  */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
+
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+   inside strings and character constants.  */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;]],
+[[return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];]],
+dnl Don't try gcc -ansi; that turns off useful extensions and
+dnl breaks some systems' header files.
+dnl AIX circa 2003	-qlanglvl=extc89
+dnl old AIX		-qlanglvl=ansi
+dnl Ultrix, OSF/1, Tru64	-std
+dnl HP-UX 10.20 and later	-Ae
+dnl HP-UX older versions	-Aa -D_HPUX_SOURCE
+dnl SVR4			-Xc -D__EXTENSIONS__
+[-qlanglvl=extc89 -qlanglvl=ansi -std \
+	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"], [$1], [$2])[]dnl
+])# _AC_PROG_CC_C89
+
+
+# _AC_C_STD_TRY(STANDARD, TEST-PROLOGUE, TEST-BODY, OPTION-LIST,
+#		ACTION-IF-AVAILABLE, ACTION-IF-UNAVAILABLE)
+# --------------------------------------------------------------
+# Check whether the C compiler accepts features of STANDARD (e.g `c89', `c99')
+# by trying to compile a program of TEST-PROLOGUE and TEST-BODY.  If this fails,
+# try again with each compiler option in the space-separated OPTION-LIST; if one
+# helps, append it to CC.  If eventually successful, run ACTION-IF-AVAILABLE,
+# else ACTION-IF-UNAVAILABLE.
+AC_DEFUN([_AC_C_STD_TRY],
+[AC_MSG_CHECKING([for $CC option to accept ISO ]m4_translit($1, [c], [C]))
+AC_CACHE_VAL(ac_cv_prog_cc_$1,
+[ac_cv_prog_cc_$1=no
+ac_save_CC=$CC
+AC_LANG_CONFTEST([AC_LANG_PROGRAM([$2], [$3])])
+for ac_arg in '' $4
+do
+  CC="$ac_save_CC $ac_arg"
+  _AC_COMPILE_IFELSE([], [ac_cv_prog_cc_$1=$ac_arg])
+  test "x$ac_cv_prog_cc_$1" != "xno" && break
+done
+rm -f conftest.$ac_ext
+CC=$ac_save_CC
+])# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_$1" in
+  x)
+    AC_MSG_RESULT([none needed]) ;;
+  xno)
+    AC_MSG_RESULT([unsupported]) ;;
+  *)
+    CC="$CC $ac_cv_prog_cc_$1"
+    AC_MSG_RESULT([$ac_cv_prog_cc_$1]) ;;
+esac
+AS_IF([test "x$ac_cv_prog_cc_$1" != xno], [$5], [$6])
+])# _AC_C_STD_TRY
+
+
+# _AC_PROG_CC_C99 ([ACTION-IF-AVAILABLE], [ACTION-IF-UNAVAILABLE])
+# ----------------------------------------------------------------
+# If the C compiler is not in ISO C99 mode by default, try to add an
+# option to output variable CC to make it so.  This macro tries
+# various options that select ISO C99 on some system or another.  It
+# considers the compiler to be in ISO C99 mode if it handles _Bool,
+# // comments, flexible array members, inline, long long int, mixed
+# code and declarations, named initialization of structs, restrict,
+# va_copy, varargs macros, variable declarations in for loops and
+# variable length arrays.
+AC_DEFUN([_AC_PROG_CC_C99],
+[_AC_C_STD_TRY([c99],
+[[#include <stdarg.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <wchar.h>
+#include <stdio.h>
+
+// Check varargs macros.  These examples are taken from C99 6.10.3.5.
+#define debug(...) fprintf (stderr, __VA_ARGS__)
+#define showlist(...) puts (#__VA_ARGS__)
+#define report(test,...) ((test) ? puts (#test) : printf (__VA_ARGS__))
+static void
+test_varargs_macros (void)
+{
+  int x = 1234;
+  int y = 5678;
+  debug ("Flag");
+  debug ("X = %d\n", x);
+  showlist (The first, second, and third items.);
+  report (x>y, "x is %d but y is %d", x, y);
+}
+
+// Check long long types.
+#define BIG64 18446744073709551615ull
+#define BIG32 4294967295ul
+#define BIG_OK (BIG64 / BIG32 == 4294967297ull && BIG64 % BIG32 == 0)
+#if !BIG_OK
+  your preprocessor is broken;
+#endif
+#if BIG_OK
+#else
+  your preprocessor is broken;
+#endif
+static long long int bignum = -9223372036854775807LL;
+static unsigned long long int ubignum = BIG64;
+
+struct incomplete_array
+{
+  int datasize;
+  double data[];
+};
+
+struct named_init {
+  int number;
+  const wchar_t *name;
+  double average;
+};
+
+typedef const char *ccp;
+
+static inline int
+test_restrict (ccp restrict text)
+{
+  // See if C++-style comments work.
+  // Iterate through items via the restricted pointer.
+  // Also check for declarations in for loops.
+  for (unsigned int i = 0; *(text+i) != '\0'; ++i)
+    continue;
+  return 0;
+}
+
+// Check varargs and va_copy.
+static void
+test_varargs (const char *format, ...)
+{
+  va_list args;
+  va_start (args, format);
+  va_list args_copy;
+  va_copy (args_copy, args);
+
+  const char *str;
+  int number;
+  float fnumber;
+
+  while (*format)
+    {
+      switch (*format++)
+	{
+	case 's': // string
+	  str = va_arg (args_copy, const char *);
+	  break;
+	case 'd': // int
+	  number = va_arg (args_copy, int);
+	  break;
+	case 'f': // float
+	  fnumber = va_arg (args_copy, double);
+	  break;
+	default:
+	  break;
+	}
+    }
+  va_end (args_copy);
+  va_end (args);
+}
+]],
+[[
+  // Check bool.
+  _Bool success = false;
+
+  // Check restrict.
+  if (test_restrict ("String literal") == 0)
+    success = true;
+  char *restrict newvar = "Another string";
+
+  // Check varargs.
+  test_varargs ("s, d' f .", "string", 65, 34.234);
+  test_varargs_macros ();
+
+  // Check flexible array members.
+  struct incomplete_array *ia =
+    malloc (sizeof (struct incomplete_array) + (sizeof (double) * 10));
+  ia->datasize = 10;
+  for (int i = 0; i < ia->datasize; ++i)
+    ia->data[i] = i * 1.234;
+
+  // Check named initializers.
+  struct named_init ni = {
+    .number = 34,
+    .name = L"Test wide string",
+    .average = 543.34343,
+  };
+
+  ni.number = 58;
+
+  int dynamic_array[ni.number];
+  dynamic_array[ni.number - 1] = 543;
+
+  // work around unused variable warnings
+  return (!success || bignum == 0LL || ubignum == 0uLL || newvar[0] == 'x'
+	  || dynamic_array[ni.number - 1] != 543);
+]],
+dnl Try
+dnl GCC		-std=gnu99 (unused restrictive modes: -std=c99 -std=iso9899:1999)
+dnl AIX		-qlanglvl=extc99 (unused restrictive mode: -qlanglvl=stdc99)
+dnl HP cc	-AC99
+dnl Intel ICC	-std=c99, -c99 (deprecated)
+dnl IRIX	-c99
+dnl Solaris	-xc99=all (Forte Developer 7 C mishandles -xc99 on Solaris 9,
+dnl		as it incorrectly assumes C99 semantics for library functions)
+dnl Tru64	-c99
+dnl with extended modes being tried first.
+[[-std=gnu99 -std=c99 -c99 -AC99 -xc99=all -qlanglvl=extc99]], [$1], [$2])[]dnl
+])# _AC_PROG_CC_C99
+
+
+# AC_PROG_CC_C89
+# --------------
+AC_DEFUN([AC_PROG_CC_C89],
+[ AC_REQUIRE([AC_PROG_CC])dnl
+  _AC_PROG_CC_C89
+])
+
+
+# AC_PROG_CC_C99
+# --------------
+AC_DEFUN([AC_PROG_CC_C99],
+[ AC_REQUIRE([AC_PROG_CC])dnl
+  _AC_PROG_CC_C99
+])
+
+
+# AC_PROG_CC_STDC
+# ---------------
+AC_DEFUN([AC_PROG_CC_STDC],
+[ AC_REQUIRE([AC_PROG_CC])dnl
+  AS_CASE([$ac_cv_prog_cc_stdc],
+    [no], [ac_cv_prog_cc_c99=no; ac_cv_prog_cc_c89=no],
+	  [_AC_PROG_CC_C99([ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c99],
+	     [_AC_PROG_CC_C89([ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c89],
+			      [ac_cv_prog_cc_stdc=no])])])
+  AC_MSG_CHECKING([for $CC option to accept ISO Standard C])
+  AC_CACHE_VAL([ac_cv_prog_cc_stdc], [])
+  AS_CASE([$ac_cv_prog_cc_stdc],
+    [no], [AC_MSG_RESULT([unsupported])],
+    [''], [AC_MSG_RESULT([none needed])],
+	  [AC_MSG_RESULT([$ac_cv_prog_cc_stdc])])
+])
+
+
+# AC_C_BACKSLASH_A
+# ----------------
+AC_DEFUN([AC_C_BACKSLASH_A],
+[
+  AC_CACHE_CHECK([whether backslash-a works in strings], ac_cv_c_backslash_a,
+   [AC_COMPILE_IFELSE([AC_LANG_PROGRAM([],
+     [[
+#if '\a' == 'a'
+      syntax error;
+#endif
+      char buf['\a' == 'a' ? -1 : 1];
+      buf[0] = '\a';
+      return buf[0] != "\a"[0];
+     ]])],
+     [ac_cv_c_backslash_a=yes],
+     [ac_cv_c_backslash_a=no])])
+  if test $ac_cv_c_backslash_a = yes; then
+    AC_DEFINE(HAVE_C_BACKSLASH_A, 1,
+      [Define if backslash-a works in C strings.])
+  fi
+])
+
+
+# AC_C_CROSS
+# ----------
+# Has been merged into AC_PROG_CC.
+AU_DEFUN([AC_C_CROSS], [])
+
+
+# AC_C_CHAR_UNSIGNED
+# ------------------
+AC_DEFUN([AC_C_CHAR_UNSIGNED],
+[AH_VERBATIM([__CHAR_UNSIGNED__],
+[/* Define to 1 if type `char' is unsigned and you are not using gcc.  */
+#ifndef __CHAR_UNSIGNED__
+# undef __CHAR_UNSIGNED__
+#endif])dnl
+AC_CACHE_CHECK(whether char is unsigned, ac_cv_c_char_unsigned,
+[AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([AC_INCLUDES_DEFAULT([])],
+					     [((char) -1) < 0])],
+		   ac_cv_c_char_unsigned=no, ac_cv_c_char_unsigned=yes)])
+if test $ac_cv_c_char_unsigned = yes && test "$GCC" != yes; then
+  AC_DEFINE(__CHAR_UNSIGNED__)
+fi
+])# AC_C_CHAR_UNSIGNED
+
+
+# AC_C_BIGENDIAN ([ACTION-IF-TRUE], [ACTION-IF-FALSE], [ACTION-IF-UNKNOWN],
+#                 [ACTION-IF-UNIVERSAL])
+# -------------------------------------------------------------------------
+AC_DEFUN([AC_C_BIGENDIAN],
+[AH_VERBATIM([WORDS_BIGENDIAN],
+[/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
+   significant byte first (like Motorola and SPARC, unlike Intel). */
+#if defined AC_APPLE_UNIVERSAL_BUILD
+# if defined __BIG_ENDIAN__
+#  define WORDS_BIGENDIAN 1
+# endif
+#else
+# ifndef WORDS_BIGENDIAN
+#  undef WORDS_BIGENDIAN
+# endif
+#endif])dnl
+ AC_CACHE_CHECK([whether byte ordering is bigendian], [ac_cv_c_bigendian],
+   [ac_cv_c_bigendian=unknown
+    # See if we're dealing with a universal compiler.
+    AC_COMPILE_IFELSE(
+	 [AC_LANG_SOURCE(
+	    [[#ifndef __APPLE_CC__
+	       not a universal capable compiler
+	     #endif
+	     typedef int dummy;
+	    ]])],
+	 [
+	# Check for potential -arch flags.  It is not universal unless
+	# there are at least two -arch flags with different values.
+	ac_arch=
+	ac_prev=
+	for ac_word in $CC $CFLAGS $CPPFLAGS $LDFLAGS; do
+	 if test -n "$ac_prev"; then
+	   case $ac_word in
+	     i?86 | x86_64 | ppc | ppc64)
+	       if test -z "$ac_arch" || test "$ac_arch" = "$ac_word"; then
+		 ac_arch=$ac_word
+	       else
+		 ac_cv_c_bigendian=universal
+		 break
+	       fi
+	       ;;
+	   esac
+	   ac_prev=
+	 elif test "x$ac_word" = "x-arch"; then
+	   ac_prev=arch
+	 fi
+       done])
+    if test $ac_cv_c_bigendian = unknown; then
+      # See if sys/param.h defines the BYTE_ORDER macro.
+      AC_COMPILE_IFELSE(
+	[AC_LANG_PROGRAM(
+	   [[#include <sys/types.h>
+	     #include <sys/param.h>
+	   ]],
+	   [[#if ! (defined BYTE_ORDER && defined BIG_ENDIAN \
+		     && defined LITTLE_ENDIAN && BYTE_ORDER && BIG_ENDIAN \
+		     && LITTLE_ENDIAN)
+	      bogus endian macros
+	     #endif
+	   ]])],
+	[# It does; now see whether it defined to BIG_ENDIAN or not.
+	 AC_COMPILE_IFELSE(
+	   [AC_LANG_PROGRAM(
+	      [[#include <sys/types.h>
+		#include <sys/param.h>
+	      ]],
+	      [[#if BYTE_ORDER != BIG_ENDIAN
+		 not big endian
+		#endif
+	      ]])],
+	   [ac_cv_c_bigendian=yes],
+	   [ac_cv_c_bigendian=no])])
+    fi
+    if test $ac_cv_c_bigendian = unknown; then
+      # See if <limits.h> defines _LITTLE_ENDIAN or _BIG_ENDIAN (e.g., Solaris).
+      AC_COMPILE_IFELSE(
+	[AC_LANG_PROGRAM(
+	   [[#include <limits.h>
+	   ]],
+	   [[#if ! (defined _LITTLE_ENDIAN || defined _BIG_ENDIAN)
+	      bogus endian macros
+	     #endif
+	   ]])],
+	[# It does; now see whether it defined to _BIG_ENDIAN or not.
+	 AC_COMPILE_IFELSE(
+	   [AC_LANG_PROGRAM(
+	      [[#include <limits.h>
+	      ]],
+	      [[#ifndef _BIG_ENDIAN
+		 not big endian
+		#endif
+	      ]])],
+	   [ac_cv_c_bigendian=yes],
+	   [ac_cv_c_bigendian=no])])
+    fi
+    if test $ac_cv_c_bigendian = unknown; then
+      # Compile a test program.
+      AC_RUN_IFELSE(
+	[AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+	   [[
+	     /* Are we little or big endian?  From Harbison&Steele.  */
+	     union
+	     {
+	       long int l;
+	       char c[sizeof (long int)];
+	     } u;
+	     u.l = 1;
+	     return u.c[sizeof (long int) - 1] == 1;
+	   ]])],
+	[ac_cv_c_bigendian=no],
+	[ac_cv_c_bigendian=yes],
+	[# Try to guess by grepping values from an object file.
+	 AC_COMPILE_IFELSE(
+	   [AC_LANG_PROGRAM(
+	      [[short int ascii_mm[] =
+		  { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+		short int ascii_ii[] =
+		  { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+		int use_ascii (int i) {
+		  return ascii_mm[i] + ascii_ii[i];
+		}
+		short int ebcdic_ii[] =
+		  { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+		short int ebcdic_mm[] =
+		  { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+		int use_ebcdic (int i) {
+		  return ebcdic_mm[i] + ebcdic_ii[i];
+		}
+		extern int foo;
+	      ]],
+	      [[return use_ascii (foo) == use_ebcdic (foo);]])],
+	   [if grep BIGenDianSyS conftest.$ac_objext >/dev/null; then
+	      ac_cv_c_bigendian=yes
+	    fi
+	    if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then
+	      if test "$ac_cv_c_bigendian" = unknown; then
+		ac_cv_c_bigendian=no
+	      else
+		# finding both strings is unlikely to happen, but who knows?
+		ac_cv_c_bigendian=unknown
+	      fi
+	    fi])])
+    fi])
+ case $ac_cv_c_bigendian in #(
+   yes)
+     m4_default([$1],
+       [AC_DEFINE([WORDS_BIGENDIAN], 1)]);; #(
+   no)
+     $2 ;; #(
+   universal)
+dnl Note that AC_APPLE_UNIVERSAL_BUILD sorts less than WORDS_BIGENDIAN;
+dnl this is a necessity for proper config header operation.  Warn if
+dnl the user did not specify a config header but is relying on the
+dnl default behavior for universal builds.
+     m4_default([$4],
+       [AC_CONFIG_COMMANDS_PRE([m4_ifset([AH_HEADER], [],
+	 [AC_DIAGNOSE([obsolete],
+	   [AC_C_BIGENDIAN should be used with AC_CONFIG_HEADERS])])])dnl
+	AC_DEFINE([AC_APPLE_UNIVERSAL_BUILD],1,
+	  [Define if building universal (internal helper macro)])])
+     ;; #(
+   *)
+     m4_default([$3],
+       [AC_MSG_ERROR([unknown endianness
+ presetting ac_cv_c_bigendian=no (or yes) will help])]) ;;
+ esac
+])# AC_C_BIGENDIAN
+
+
+# AC_C_INLINE
+# -----------
+# Do nothing if the compiler accepts the inline keyword.
+# Otherwise define inline to __inline__ or __inline if one of those work,
+# otherwise define inline to be empty.
+#
+# HP C version B.11.11.04 doesn't allow a typedef as the return value for an
+# inline function, only builtin types.
+#
+AN_IDENTIFIER([inline], [AC_C_INLINE])
+AC_DEFUN([AC_C_INLINE],
+[AC_CACHE_CHECK([for inline], ac_cv_c_inline,
+[ac_cv_c_inline=no
+for ac_kw in inline __inline__ __inline; do
+  AC_COMPILE_IFELSE([AC_LANG_SOURCE(
+[#ifndef __cplusplus
+typedef int foo_t;
+static $ac_kw foo_t static_foo () {return 0; }
+$ac_kw foo_t foo () {return 0; }
+#endif
+])],
+		    [ac_cv_c_inline=$ac_kw])
+  test "$ac_cv_c_inline" != no && break
+done
+])
+AH_VERBATIM([inline],
+[/* Define to `__inline__' or `__inline' if that's what the C compiler
+   calls it, or to nothing if 'inline' is not supported under any name.  */
+#ifndef __cplusplus
+#undef inline
+#endif])
+case $ac_cv_c_inline in
+  inline | yes) ;;
+  *)
+    case $ac_cv_c_inline in
+      no) ac_val=;;
+      *) ac_val=$ac_cv_c_inline;;
+    esac
+    cat >>confdefs.h <<_ACEOF
+#ifndef __cplusplus
+#define inline $ac_val
+#endif
+_ACEOF
+    ;;
+esac
+])# AC_C_INLINE
+
+
+# AC_C_CONST
+# ----------
+AC_DEFUN([AC_C_CONST],
+[AC_CACHE_CHECK([for an ANSI C-conforming const], ac_cv_c_const,
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([],
+[[/* FIXME: Include the comments suggested by Paul. */
+#ifndef __cplusplus
+  /* Ultrix mips cc rejects this.  */
+  typedef int charset[2];
+  const charset cs;
+  /* SunOS 4.1.1 cc rejects this.  */
+  char const *const *pcpcc;
+  char **ppc;
+  /* NEC SVR4.0.2 mips cc rejects this.  */
+  struct point {int x, y;};
+  static struct point const zero = {0,0};
+  /* AIX XL C 1.02.0.0 rejects this.
+     It does not let you subtract one const X* pointer from another in
+     an arm of an if-expression whose if-part is not a constant
+     expression */
+  const char *g = "string";
+  pcpcc = &g + (g ? g-g : 0);
+  /* HPUX 7.0 cc rejects these. */
+  ++pcpcc;
+  ppc = (char**) pcpcc;
+  pcpcc = (char const *const *) ppc;
+  { /* SCO 3.2v4 cc rejects this.  */
+    char *t;
+    char const *s = 0 ? (char *) 0 : (char const *) 0;
+
+    *t++ = 0;
+    if (s) return 0;
+  }
+  { /* Someone thinks the Sun supposedly-ANSI compiler will reject this.  */
+    int x[] = {25, 17};
+    const int *foo = &x[0];
+    ++foo;
+  }
+  { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */
+    typedef const int *iptr;
+    iptr p = 0;
+    ++p;
+  }
+  { /* AIX XL C 1.02.0.0 rejects this saying
+       "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */
+    struct s { int j; const int *ap[3]; };
+    struct s *b; b->j = 5;
+  }
+  { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */
+    const int foo = 10;
+    if (!foo) return 0;
+  }
+  return !cs[0] && !zero.x;
+#endif
+]])],
+		   [ac_cv_c_const=yes],
+		   [ac_cv_c_const=no])])
+if test $ac_cv_c_const = no; then
+  AC_DEFINE(const,,
+	    [Define to empty if `const' does not conform to ANSI C.])
+fi
+])# AC_C_CONST
+
+
+# AC_C_RESTRICT
+# -------------
+# based on acx_restrict.m4, from the GNU Autoconf Macro Archive at:
+# http://autoconf-archive.cryp.to/acx_restrict.html
+#
+# Determine whether the C/C++ compiler supports the "restrict" keyword
+# introduced in ANSI C99, or an equivalent.  Define "restrict" to the alternate
+# spelling, if any; these are more likely to work in both C and C++ compilers of
+# the same family, and in the presence of varying compiler options.  If only
+# plain "restrict" works, do nothing.  Here are some variants:
+# - GCC supports both __restrict and __restrict__
+# - older DEC Alpha C compilers support only __restrict
+# - _Restrict is the only spelling accepted by Sun WorkShop 6 update 2 C
+# Otherwise, define "restrict" to be empty.
+AN_IDENTIFIER([restrict], [AC_C_RESTRICT])
+AC_DEFUN([AC_C_RESTRICT],
+[AC_CACHE_CHECK([for C/C++ restrict keyword], ac_cv_c_restrict,
+  [ac_cv_c_restrict=no
+   # The order here caters to the fact that C++ does not require restrict.
+   for ac_kw in __restrict __restrict__ _Restrict restrict; do
+     AC_COMPILE_IFELSE([AC_LANG_PROGRAM(
+      [[typedef int * int_ptr;
+	int foo (int_ptr $ac_kw ip) {
+	return ip[0];
+       }]],
+      [[int s[1];
+	int * $ac_kw t = s;
+	t[0] = 0;
+	return foo(t)]])],
+      [ac_cv_c_restrict=$ac_kw])
+     test "$ac_cv_c_restrict" != no && break
+   done
+  ])
+ AH_VERBATIM([restrict],
+[/* Define to the equivalent of the C99 'restrict' keyword, or to
+   nothing if this is not supported.  Do not define if restrict is
+   supported directly.  */
+#undef restrict
+/* Work around a bug in Sun C++: it does not support _Restrict or
+   __restrict__, even though the corresponding Sun C compiler ends up with
+   "#define restrict _Restrict" or "#define restrict __restrict__" in the
+   previous line.  Perhaps some future version of Sun C++ will work with
+   restrict; if so, hopefully it defines __RESTRICT like Sun C does.  */
+#if defined __SUNPRO_CC && !defined __RESTRICT
+# define _Restrict
+# define __restrict__
+#endif])
+ case $ac_cv_c_restrict in
+   restrict) ;;
+   no) AC_DEFINE([restrict], []) ;;
+   *)  AC_DEFINE_UNQUOTED([restrict], [$ac_cv_c_restrict]) ;;
+ esac
+])# AC_C_RESTRICT
+
+
+# AC_C_VOLATILE
+# -------------
+# Note that, unlike const, #defining volatile to be the empty string can
+# actually turn a correct program into an incorrect one, since removing
+# uses of volatile actually grants the compiler permission to perform
+# optimizations that could break the user's code.  So, do not #define
+# volatile away unless it is really necessary to allow the user's code
+# to compile cleanly.  Benign compiler failures should be tolerated.
+AC_DEFUN([AC_C_VOLATILE],
+[AC_CACHE_CHECK([for working volatile], ac_cv_c_volatile,
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([], [
+volatile int x;
+int * volatile y = (int *) 0;
+return !x && !y;])],
+		   [ac_cv_c_volatile=yes],
+		   [ac_cv_c_volatile=no])])
+if test $ac_cv_c_volatile = no; then
+  AC_DEFINE(volatile,,
+	    [Define to empty if the keyword `volatile' does not work.
+	     Warning: valid code using `volatile' can become incorrect
+	     without.  Disable with care.])
+fi
+])# AC_C_VOLATILE
+
+
+# AC_C_STRINGIZE
+# --------------
+# Checks if `#' can be used to glue strings together at the CPP level.
+# Defines HAVE_STRINGIZE if positive.
+AC_DEFUN([AC_C_STRINGIZE],
+[AC_CACHE_CHECK([for preprocessor stringizing operator],
+		[ac_cv_c_stringize],
+[AC_EGREP_CPP([@%:@teststring],
+	      [@%:@define x(y) #y
+
+char *s = x(teststring);],
+	      [ac_cv_c_stringize=no],
+	      [ac_cv_c_stringize=yes])])
+if test $ac_cv_c_stringize = yes; then
+  AC_DEFINE(HAVE_STRINGIZE, 1,
+	    [Define to 1 if cpp supports the ANSI @%:@ stringizing operator.])
+fi
+])# AC_C_STRINGIZE
+
+
+# AC_C_PROTOTYPES
+# ---------------
+# Check if the C compiler supports prototypes, included if it needs
+# options.
+AC_DEFUN([AC_C_PROTOTYPES],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_MSG_CHECKING([for function prototypes])
+if test "$ac_cv_prog_cc_c89" != no; then
+  AC_MSG_RESULT([yes])
+  AC_DEFINE(PROTOTYPES, 1,
+	    [Define to 1 if the C compiler supports function prototypes.])
+  AC_DEFINE(__PROTOTYPES, 1,
+	    [Define like PROTOTYPES; this can be used by system headers.])
+else
+  AC_MSG_RESULT([no])
+fi
+])# AC_C_PROTOTYPES
+
+
+# AC_C_FLEXIBLE_ARRAY_MEMBER
+# --------------------------
+# Check whether the C compiler supports flexible array members.
+AC_DEFUN([AC_C_FLEXIBLE_ARRAY_MEMBER],
+[
+  AC_CACHE_CHECK([for flexible array members],
+    ac_cv_c_flexmember,
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM(
+	  [[#include <stdlib.h>
+	    #include <stdio.h>
+	    #include <stddef.h>
+	    struct s { int n; double d[]; };]],
+	  [[int m = getchar ();
+	    struct s *p = malloc (offsetof (struct s, d)
+				  + m * sizeof (double));
+	    p->d[0] = 0.0;
+	    return p->d != (double *) NULL;]])],
+       [ac_cv_c_flexmember=yes],
+       [ac_cv_c_flexmember=no])])
+  if test $ac_cv_c_flexmember = yes; then
+    AC_DEFINE([FLEXIBLE_ARRAY_MEMBER], [],
+      [Define to nothing if C supports flexible array members, and to
+       1 if it does not.  That way, with a declaration like `struct s
+       { int n; double d@<:@FLEXIBLE_ARRAY_MEMBER@:>@; };', the struct hack
+       can be used with pre-C99 compilers.
+       When computing the size of such an object, don't use 'sizeof (struct s)'
+       as it overestimates the size.  Use 'offsetof (struct s, d)' instead.
+       Don't use 'offsetof (struct s, d@<:@0@:>@)', as this doesn't work with
+       MSVC and with C++ compilers.])
+  else
+    AC_DEFINE([FLEXIBLE_ARRAY_MEMBER], 1)
+  fi
+])
+
+
+# AC_C_VARARRAYS
+# --------------
+# Check whether the C compiler supports variable-length arrays.
+AC_DEFUN([AC_C_VARARRAYS],
+[
+  AC_CACHE_CHECK([for variable-length arrays],
+    ac_cv_c_vararrays,
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM([],
+	  [[static int x; char a[++x]; a[sizeof a - 1] = 0; return a[0];]])],
+       [ac_cv_c_vararrays=yes],
+       [ac_cv_c_vararrays=no])])
+  if test $ac_cv_c_vararrays = yes; then
+    AC_DEFINE([HAVE_C_VARARRAYS], 1,
+      [Define to 1 if C supports variable-length arrays.])
+  fi
+])
+
+
+# AC_C_TYPEOF
+# -----------
+# Check if the C compiler supports GCC's typeof syntax.
+# The test case provokes incompatibilities in the Sun C compilers
+# (both Solaris 8 and Solaris 10).
+AC_DEFUN([AC_C_TYPEOF],
+[
+  AC_CACHE_CHECK([for typeof syntax and keyword spelling], ac_cv_c_typeof,
+    [ac_cv_c_typeof=no
+     for ac_kw in typeof __typeof__ no; do
+       test $ac_kw = no && break
+       AC_COMPILE_IFELSE([AC_LANG_PROGRAM([],
+	 [[
+	   int value;
+	   typedef struct {
+		   char a [1
+			   + ! (($ac_kw (value))
+				(($ac_kw (value)) 0 < ($ac_kw (value)) -1
+				 ? ($ac_kw (value)) - 1
+				 : ~ (~ ($ac_kw (value)) 0
+				      << sizeof ($ac_kw (value)))))]; }
+	      ac__typeof_type_;
+	   return
+	     (! ((void) ((ac__typeof_type_ *) 0), 0));
+	 ]])],
+	 [ac_cv_c_typeof=$ac_kw])
+       test $ac_cv_c_typeof != no && break
+     done])
+  if test $ac_cv_c_typeof != no; then
+    AC_DEFINE([HAVE_TYPEOF], 1,
+      [Define to 1 if typeof works with your compiler.])
+    if test $ac_cv_c_typeof != typeof; then
+      AC_DEFINE_UNQUOTED([typeof], [$ac_cv_c_typeof],
+	[Define to __typeof__ if your compiler spells it that way.])
+    fi
+  fi
+])
+
+
+# _AC_LANG_OPENMP
+# ---------------
+# Expands to some language dependent source code for testing the presence of
+# OpenMP.
+AC_DEFUN([_AC_LANG_OPENMP],
+[AC_LANG_SOURCE([_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])])
+
+# _AC_LANG_OPENMP(C)
+# ------------------
+m4_define([_AC_LANG_OPENMP(C)],
+[
+#ifndef _OPENMP
+ choke me
+#endif
+#include <omp.h>
+int main () { return omp_get_num_threads (); }
+])
+
+# _AC_LANG_OPENMP(C++)
+# --------------------
+m4_copy([_AC_LANG_OPENMP(C)], [_AC_LANG_OPENMP(C++)])
+
+# _AC_LANG_OPENMP(Fortran 77)
+# ---------------------------
+m4_define([_AC_LANG_OPENMP(Fortran 77)],
+[AC_LANG_FUNC_LINK_TRY([omp_get_num_threads])])
+
+# _AC_LANG_OPENMP(Fortran)
+# ------------------------
+m4_copy([_AC_LANG_OPENMP(Fortran 77)], [_AC_LANG_OPENMP(Fortran)])
+
+# AC_OPENMP
+# ---------
+# Check which options need to be passed to the C compiler to support OpenMP.
+# Set the OPENMP_CFLAGS / OPENMP_CXXFLAGS / OPENMP_FFLAGS variable to these
+# options.
+# The options are necessary at compile time (so the #pragmas are understood)
+# and at link time (so the appropriate library is linked with).
+# This macro takes care to not produce redundant options if $CC $CFLAGS already
+# supports OpenMP. It also is careful to not pass options to compilers that
+# misinterpret them; for example, most compilers accept "-openmp" and create
+# an output file called 'penmp' rather than activating OpenMP support.
+AC_DEFUN([AC_OPENMP],
+[
+  OPENMP_[]_AC_LANG_PREFIX[]FLAGS=
+  AC_ARG_ENABLE([openmp],
+    [AS_HELP_STRING([--disable-openmp], [do not use OpenMP])])
+  if test "$enable_openmp" != no; then
+    AC_CACHE_CHECK([for $[]_AC_CC[] option to support OpenMP],
+      [ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp],
+      [AC_LINK_IFELSE([_AC_LANG_OPENMP],
+	 [ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp='none needed'],
+	 [ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp='unsupported'
+	  dnl Try these flags:
+	  dnl   GCC >= 4.2           -fopenmp
+	  dnl   SunPRO C             -xopenmp
+	  dnl   Intel C              -openmp
+	  dnl   SGI C, PGI C         -mp
+	  dnl   Tru64 Compaq C       -omp
+	  dnl   IBM C (AIX, Linux)   -qsmp=omp
+	  dnl If in this loop a compiler is passed an option that it doesn't
+	  dnl understand or that it misinterprets, the AC_LINK_IFELSE test
+	  dnl will fail (since we know that it failed without the option),
+	  dnl therefore the loop will continue searching for an option, and
+	  dnl no output file called 'penmp' or 'mp' is created.
+	  for ac_option in -fopenmp -xopenmp -openmp -mp -omp -qsmp=omp; do
+	    ac_save_[]_AC_LANG_PREFIX[]FLAGS=$[]_AC_LANG_PREFIX[]FLAGS
+	    _AC_LANG_PREFIX[]FLAGS="$[]_AC_LANG_PREFIX[]FLAGS $ac_option"
+	    AC_LINK_IFELSE([_AC_LANG_OPENMP],
+	      [ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp=$ac_option])
+	    _AC_LANG_PREFIX[]FLAGS=$ac_save_[]_AC_LANG_PREFIX[]FLAGS
+	    if test "$ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp" != unsupported; then
+	      break
+	    fi
+	  done])])
+    case $ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp in #(
+      "none needed" | unsupported)
+	;; #(
+      *)
+	OPENMP_[]_AC_LANG_PREFIX[]FLAGS=$ac_cv_prog_[]_AC_LANG_ABBREV[]_openmp ;;
+    esac
+  fi
+  AC_SUBST([OPENMP_]_AC_LANG_PREFIX[FLAGS])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/erlang.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/erlang.m4
new file mode 100644
index 0000000..bcdbb75
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/erlang.m4
@@ -0,0 +1,320 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Erlang/OTP language support.
+# Copyright (C) 2006, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Romain Lenglet.
+
+
+# Table of Contents:
+#
+# 0. Utility macros
+#
+# 1. Language selection
+#    and routines to produce programs in a given language.
+#
+# 2. Producing programs in a given language.
+#
+# 3. Looking for a compiler
+#    And possibly the associated preprocessor.
+
+
+
+## ------------------- ##
+## 0. Utility macros.  ##
+## ------------------- ##
+
+
+# AC_ERLANG_PATH_ERLC([VALUE-IF-NOT-FOUND], [PATH])
+# -------------------------------------------------
+AC_DEFUN([AC_ERLANG_PATH_ERLC],
+[AC_ARG_VAR([ERLC], [Erlang/OTP compiler command [autodetected]])dnl
+if test -n "$ERLC"; then
+    AC_MSG_CHECKING([for erlc])
+    AC_MSG_RESULT([$ERLC])
+else
+    AC_PATH_TOOL(ERLC, erlc, [$1], [$2])
+fi
+AC_ARG_VAR([ERLCFLAGS], [Erlang/OTP compiler flags [none]])dnl
+])
+
+
+# AC_ERLANG_NEED_ERLC([PATH])
+# ---------------------------
+AC_DEFUN([AC_ERLANG_NEED_ERLC],
+[AC_ERLANG_PATH_ERLC([not found], [$1])
+if test "$ERLC" = "not found"; then
+    AC_MSG_ERROR([Erlang/OTP compiler (erlc) not found but required])
+fi
+])
+
+
+# AC_ERLANG_PATH_ERL([VALUE-IF-NOT-FOUND], [PATH])
+# ------------------------------------------------
+AC_DEFUN([AC_ERLANG_PATH_ERL],
+[AC_ARG_VAR([ERL], [Erlang/OTP interpreter command [autodetected]])dnl
+if test -n "$ERL"; then
+    AC_MSG_CHECKING([for erl])
+    AC_MSG_RESULT([$ERL])
+else
+    AC_PATH_TOOL(ERL, erl, [$1], [$2])[]dnl
+fi
+])
+
+
+# AC_ERLANG_NEED_ERL([PATH])
+# --------------------------
+AC_DEFUN([AC_ERLANG_NEED_ERL],
+[AC_ERLANG_PATH_ERL([not found], [$1])
+if test "$ERL" = "not found"; then
+    AC_MSG_ERROR([Erlang/OTP interpreter (erl) not found but required])
+fi
+])
+
+
+
+## ----------------------- ##
+## 1. Language selection.  ##
+## ----------------------- ##
+
+
+# AC_LANG(Erlang)
+# ---------------
+AC_LANG_DEFINE([Erlang], [erl], [ERL], [ERLC], [],
+[ac_ext=erl
+ac_compile='$ERLC $ERLCFLAGS -b beam conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$ERLC $ERLCFLAGS -b beam conftest.$ac_ext >&AS_MESSAGE_LOG_FD && echo "[#]!/bin/sh" > conftest$ac_exeext && AS_ECHO(["\"$ERL\" -run conftest start -run init stop -noshell"]) >> conftest$ac_exeext && chmod +x conftest$ac_exeext'
+])
+
+
+
+# AC_LANG_ERLANG
+# --------------
+AU_DEFUN([AC_LANG_ERLANG], [AC_LANG(Erlang)])
+
+
+
+## ----------------------- ##
+## 2. Producing programs.  ##
+## ----------------------- ##
+
+
+# AC_LANG_PROGRAM(Erlang)([PROLOGUE], [BODY])
+# -------------------------------------------
+m4_define([AC_LANG_PROGRAM(Erlang)],
+[[-module(conftest).
+-export([start/0]).]]
+[$1
+start() ->
+$2
+.
+])
+
+
+# _AC_LANG_NULL_PROGRAM(Erlang)
+# -----------------------------
+# Produce source that does nothing.
+m4_define([_AC_LANG_NULL_PROGRAM(Erlang)],
+[AC_LANG_PROGRAM([], [halt(0)])])
+
+
+# _AC_LANG_IO_PROGRAM(Erlang)
+# ---------------------------
+# Produce source that performs I/O.
+m4_define([_AC_LANG_IO_PROGRAM(Erlang)],
+[AC_LANG_PROGRAM([], [dnl
+   ReturnValue = case file:write_file("conftest.out", "") of
+       {error, _} -> 1;
+       ok -> 0
+   end,
+   halt(ReturnValue)])])
+
+
+## -------------------------------------------- ##
+## 3. Looking for Compilers and Preprocessors.  ##
+## -------------------------------------------- ##
+
+
+# AC_LANG_PREPROC(Erlang)
+# -----------------------
+# Find the Erlang preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(Erlang)],
+[m4_warn([syntax],
+	 [$0: No preprocessor defined for ]_AC_LANG)])
+
+# AC_LANG_COMPILER(Erlang)
+# ------------------------
+# Find the Erlang compiler.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(Erlang)],
+[AC_REQUIRE([AC_ERLANG_PATH_ERLC])])
+
+
+# AC_ERLANG_CHECK_LIB(LIBRARY, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ----------------------------------------------------------------------
+# Macro for checking if an Erlang library is installed, and to
+# determine its version.
+AC_DEFUN([AC_ERLANG_CHECK_LIB],
+[AC_REQUIRE([AC_ERLANG_PATH_ERLC])[]dnl
+AC_REQUIRE([AC_ERLANG_PATH_ERL])[]dnl
+AC_CACHE_CHECK([for Erlang/OTP '$1' library subdirectory],
+    [ac_cv_erlang_lib_dir_$1],
+    [AC_LANG_PUSH(Erlang)[]dnl
+     AC_RUN_IFELSE(
+	[AC_LANG_PROGRAM([], [dnl
+	    ReturnValue = case code:lib_dir("[$1]") of
+	    {error, bad_name} ->
+		file:write_file("conftest.out", "not found\n"),
+		1;
+	    LibDir ->
+		file:write_file("conftest.out", LibDir),
+		0
+	    end,
+	    halt(ReturnValue)])],
+	[ac_cv_erlang_lib_dir_$1=`cat conftest.out`
+	 rm -f conftest.out],
+	[if test ! -f conftest.out; then
+	     AC_MSG_FAILURE([test Erlang program execution failed])
+	 else
+	     ac_cv_erlang_lib_dir_$1="not found"
+	     rm -f conftest.out
+	 fi])
+     AC_LANG_POP(Erlang)[]dnl
+    ])
+AC_CACHE_CHECK([for Erlang/OTP '$1' library version],
+    [ac_cv_erlang_lib_ver_$1],
+    [AS_IF([test "$ac_cv_erlang_lib_dir_$1" = "not found"],
+	[ac_cv_erlang_lib_ver_$1="not found"],
+	[ac_cv_erlang_lib_ver_$1=`AS_ECHO(["$ac_cv_erlang_lib_dir_$1"]) | sed -n -e 's,^.*-\([[^/-]]*\)$,\1,p'`])[]dnl
+    ])
+AC_SUBST([ERLANG_LIB_DIR_$1], [$ac_cv_erlang_lib_dir_$1])
+AC_SUBST([ERLANG_LIB_VER_$1], [$ac_cv_erlang_lib_ver_$1])
+AS_IF([test "$ac_cv_erlang_lib_dir_$1" = "not found"], [$3], [$2])
+])# AC_ERLANG_CHECK_LIB
+
+
+# AC_ERLANG_SUBST_ROOT_DIR
+# ------------------------
+# Determines the Erlang/OTP root directory.
+AC_DEFUN([AC_ERLANG_SUBST_ROOT_DIR],
+[AC_REQUIRE([AC_ERLANG_NEED_ERLC])[]dnl
+AC_REQUIRE([AC_ERLANG_NEED_ERL])[]dnl
+AC_CACHE_CHECK([for Erlang/OTP root directory],
+    [ac_cv_erlang_root_dir],
+    [AC_LANG_PUSH(Erlang)[]dnl
+     AC_RUN_IFELSE(
+	[AC_LANG_PROGRAM([], [dnl
+	    RootDir = code:root_dir(),
+	    file:write_file("conftest.out", RootDir),
+	    ReturnValue = 0,
+	    halt(ReturnValue)])],
+	[ac_cv_erlang_root_dir=`cat conftest.out`
+	 rm -f conftest.out],
+	[rm -f conftest.out
+	 AC_MSG_FAILURE([test Erlang program execution failed])])
+     AC_LANG_POP(Erlang)[]dnl
+    ])
+AC_SUBST([ERLANG_ROOT_DIR], [$ac_cv_erlang_root_dir])
+])# AC_ERLANG_SUBST_ROOT_DIR
+
+
+# AC_ERLANG_SUBST_LIB_DIR
+# -----------------------
+AC_DEFUN([AC_ERLANG_SUBST_LIB_DIR],
+[AC_REQUIRE([AC_ERLANG_NEED_ERLC])[]dnl
+AC_REQUIRE([AC_ERLANG_NEED_ERL])[]dnl
+AC_CACHE_CHECK([for Erlang/OTP library base directory],
+    [ac_cv_erlang_lib_dir],
+    [AC_LANG_PUSH(Erlang)[]dnl
+     AC_RUN_IFELSE(
+	[AC_LANG_PROGRAM([], [dnl
+	    LibDir = code:lib_dir(),
+	    file:write_file("conftest.out", LibDir),
+	    ReturnValue = 0,
+	    halt(ReturnValue)])],
+	[ac_cv_erlang_lib_dir=`cat conftest.out`
+	 rm -f conftest.out],
+	[rm -f conftest.out
+	 AC_MSG_FAILURE([test Erlang program execution failed])])
+     AC_LANG_POP(Erlang)[]dnl
+    ])
+AC_SUBST([ERLANG_LIB_DIR], [$ac_cv_erlang_lib_dir])
+])# AC_ERLANG_SUBST_LIB_DIR
+
+
+# AC_ERLANG_SUBST_INSTALL_LIB_DIR
+# -------------------------------
+# Directories for installing Erlang/OTP packages are separated from the
+# directories determined by running the Erlang/OTP installation that is used
+# for building.
+AC_DEFUN([AC_ERLANG_SUBST_INSTALL_LIB_DIR],
+[AC_MSG_CHECKING([for Erlang/OTP library installation base directory])
+AC_ARG_VAR([ERLANG_INSTALL_LIB_DIR],
+    [Erlang/OTP library installation base directory [LIBDIR/erlang/lib]])
+if test -n "$ERLANG_INSTALL_LIB_DIR"; then
+    AC_MSG_RESULT([$ERLANG_INSTALL_LIB_DIR])
+else
+    AC_SUBST([ERLANG_INSTALL_LIB_DIR], ['${libdir}/erlang/lib'])
+    AC_MSG_RESULT([$libdir/erlang/lib])
+fi
+])# AC_ERLANG_SUBST_INSTALL_LIB_DIR
+
+
+# AC_ERLANG_SUBST_INSTALL_LIB_SUBDIR(PACKAGE_TARNAME, PACKAGE_VERSION)
+# --------------------------------------------------------------------
+AC_DEFUN([AC_ERLANG_SUBST_INSTALL_LIB_SUBDIR],
+[AC_REQUIRE([AC_ERLANG_SUBST_INSTALL_LIB_DIR])[]dnl
+AC_MSG_CHECKING([for Erlang/OTP '$1' library installation subdirectory])
+AC_ARG_VAR([ERLANG_INSTALL_LIB_DIR_$1],
+    [Erlang/OTP '$1' library installation subdirectory
+       [ERLANG_INSTALL_LIB_DIR/$1-$2]])
+if test -n "$ERLANG_INSTALL_LIB_DIR_$1"; then
+    AC_MSG_RESULT([$ERLANG_INSTALL_LIB_DIR_$1])
+else
+    AC_SUBST([ERLANG_INSTALL_LIB_DIR_$1], ['${ERLANG_INSTALL_LIB_DIR}/$1-$2'])
+    AC_MSG_RESULT([$ERLANG_INSTALL_LIB_DIR/$1-$2])
+fi
+])# AC_ERLANG_SUBST_INSTALL_LIB_SUBDIR
+
+
+# AC_ERLANG_SUBST_ERTS_VER
+# ------------------------
+# Determines the Erlang runtime system version.
+AC_DEFUN([AC_ERLANG_SUBST_ERTS_VER],
+[AC_REQUIRE([AC_ERLANG_NEED_ERLC])[]dnl
+AC_REQUIRE([AC_ERLANG_NEED_ERL])[]dnl
+AC_CACHE_CHECK([for Erlang/OTP ERTS version],
+    [ac_cv_erlang_erts_ver],
+    [AC_LANG_PUSH([Erlang])[]dnl
+     AC_RUN_IFELSE(
+	[AC_LANG_PROGRAM([], [dnl
+	    Version = erlang:system_info(version),
+	    file:write_file("conftest.out", Version),
+	    ReturnValue = 0,
+	    halt(ReturnValue)])],
+	[ac_cv_erlang_erts_ver=`cat conftest.out`
+	 rm -f conftest.out],
+	[rm -f conftest.out
+	 AC_MSG_FAILURE([test Erlang program execution failed])])
+     AC_LANG_POP([Erlang])[]dnl
+    ])
+AC_SUBST([ERLANG_ERTS_VER], [$ac_cv_erlang_erts_ver])
+])# AC_ERLANG_SUBST_ERTS_VER
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/fortran.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/fortran.m4
new file mode 100644
index 0000000..82a7a5d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/fortran.m4
@@ -0,0 +1,1345 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Fortran languages support.
+# Copyright (C) 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# Table of Contents:
+#
+# Preamble
+#
+# 0. Utility macros
+#
+# 1. Language selection
+#    and routines to produce programs in a given language.
+#
+# 2. Producing programs in a given language.
+#
+# 3. Looking for a compiler
+#    And possibly the associated preprocessor.
+#
+# 4. Compilers' characteristics.
+
+
+
+## ---------- ##
+## Preamble.  ##
+## ---------- ##
+
+# Fortran vs. Fortran 77:
+#   This file contains macros for both "Fortran 77" and "Fortran", where
+# the former is the "classic" autoconf Fortran interface and is intended
+# for legacy F77 codes, while the latter is intended to support newer Fortran
+# dialects.  Fortran 77 uses environment variables F77, FFLAGS, and FLIBS,
+# while Fortran uses FC, FCFLAGS, and FCLIBS.  For each user-callable AC_*
+# macro, there is generally both an F77 and an FC version, where both versions
+# share the same _AC_*_FC_* backend.  This backend macro requires that
+# the appropriate language be AC_LANG_PUSH'ed, and uses _AC_LANG_ABBREV and
+# _AC_LANG_PREFIX in order to name cache and environment variables, etc.
+
+
+
+## ------------------- ##
+## 0. Utility macros.  ##
+## ------------------- ##
+
+
+# _AC_LIST_MEMBER_IF(ELEMENT, LIST, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ---------------------------------------------------------------------------
+#
+# Processing the elements of a list is tedious in shell programming,
+# as lists tend to be implemented as space delimited strings.
+#
+# This macro searches LIST for ELEMENT, and executes ACTION-IF-FOUND
+# if ELEMENT is a member of LIST, otherwise it executes
+# ACTION-IF-NOT-FOUND.
+AC_DEFUN([_AC_LIST_MEMBER_IF],
+dnl Do some sanity checking of the arguments.
+[m4_if([$1], , [m4_fatal([$0: missing argument 1])],
+      [$2], , [m4_fatal([$0: missing argument 2])])]dnl
+[  ac_exists=false
+  for ac_i in $2; do
+    if test x"$1" = x"$ac_i"; then
+      ac_exists=true
+      break
+    fi
+  done
+
+  AS_IF([test x"$ac_exists" = xtrue], [$3], [$4])[]dnl
+])# _AC_LIST_MEMBER_IF
+
+
+# _AC_LINKER_OPTION(LINKER-OPTIONS, SHELL-VARIABLE)
+# -------------------------------------------------
+#
+# Specifying options to the compiler (whether it be the C, C++ or
+# Fortran 77 compiler) that are meant for the linker is compiler
+# dependent.  This macro lets you give options to the compiler that
+# are meant for the linker in a portable, compiler-independent way.
+#
+# This macro take two arguments, a list of linker options that the
+# compiler should pass to the linker (LINKER-OPTIONS) and the name of
+# a shell variable (SHELL-VARIABLE).  The list of linker options are
+# appended to the shell variable in a compiler-dependent way.
+#
+# For example, if the selected language is C, then this:
+#
+#   _AC_LINKER_OPTION([-R /usr/local/lib/foo], foo_LDFLAGS)
+#
+# will expand into this if the selected C compiler is gcc:
+#
+#   foo_LDFLAGS="-Xlinker -R -Xlinker /usr/local/lib/foo"
+#
+# otherwise, it will expand into this:
+#
+#   foo_LDFLAGS"-R /usr/local/lib/foo"
+#
+# You are encouraged to add support for compilers that this macro
+# doesn't currently support.
+# FIXME: Get rid of this macro.
+AC_DEFUN([_AC_LINKER_OPTION],
+[if test "$ac_compiler_gnu" = yes; then
+  for ac_link_opt in $1; do
+    $2="[$]$2 -Xlinker $ac_link_opt"
+  done
+else
+  $2="[$]$2 $1"
+fi[]dnl
+])# _AC_LINKER_OPTION
+
+
+
+## ------------------------ ##
+## 1a. Language selection.  ##
+## ------------------------ ##
+
+
+# AC_LANG(Fortran 77)
+# -------------------
+AC_LANG_DEFINE([Fortran 77], [f77], [F], [F77], [],
+[ac_ext=f
+ac_compile='$F77 -c $FFLAGS conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$F77 -o conftest$ac_exeext $FFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_f77_compiler_gnu
+])
+
+
+# AC_LANG_FORTRAN77
+# -----------------
+AU_DEFUN([AC_LANG_FORTRAN77], [AC_LANG(Fortran 77)])
+
+
+# _AC_FORTRAN_ASSERT
+# ------------------
+# Current language must be Fortran or Fortran 77.
+m4_defun([_AC_FORTRAN_ASSERT],
+[m4_if(_AC_LANG, [Fortran], [],
+       [m4_if(_AC_LANG, [Fortran 77], [],
+	      [m4_fatal([$0: current language is not Fortran: ] _AC_LANG)])])])
+
+
+# _AC_FC
+# ------
+# Return F77 or FC, depending upon the language.
+AC_DEFUN([_AC_FC],
+[_AC_FORTRAN_ASSERT()dnl
+AC_LANG_CASE([Fortran 77], [F77],
+	     [Fortran],    [FC])])
+
+
+
+## ----------------------- ##
+## 2. Producing programs.  ##
+## ----------------------- ##
+
+
+# AC_LANG_PROGRAM(Fortran 77)([PROLOGUE], [BODY])
+# -----------------------------------------------
+# Yes, we discard the PROLOGUE.
+m4_define([AC_LANG_PROGRAM(Fortran 77)],
+[m4_ifval([$1],
+       [m4_warn([syntax], [$0: ignoring PROLOGUE: $1])])dnl
+      program main
+$2
+      end])
+
+
+# _AC_LANG_IO_PROGRAM(Fortran 77)
+# -------------------------------
+# Produce source that performs I/O.
+m4_define([_AC_LANG_IO_PROGRAM(Fortran 77)],
+[AC_LANG_PROGRAM([],
+[dnl
+      open(unit=9,file='conftest.out')
+      close(unit=9)
+])])
+
+
+# AC_LANG_CALL(Fortran 77)(PROLOGUE, FUNCTION)
+# --------------------------------------------
+# FIXME: This is a guess, help!
+m4_define([AC_LANG_CALL(Fortran 77)],
+[AC_LANG_PROGRAM([$1],
+[      call $2])])
+
+
+# AC_LANG_FUNC_LINK_TRY(Fortran 77)(FUNCTION)
+# -------------------------------------------
+m4_define([AC_LANG_FUNC_LINK_TRY(Fortran 77)],
+[AC_LANG_PROGRAM([],
+[      call $1])])
+
+## ------------------------ ##
+## 1b. Language selection.  ##
+## ------------------------ ##
+
+
+# AC_LANG(Fortran)
+# ----------------
+AC_LANG_DEFINE([Fortran], [fc], [FC], [FC], [Fortran 77],
+[ac_ext=${ac_fc_srcext-f}
+ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&AS_MESSAGE_LOG_FD'
+ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&AS_MESSAGE_LOG_FD'
+ac_compiler_gnu=$ac_cv_fc_compiler_gnu
+])
+
+
+## -------------------------------------------- ##
+## 3. Looking for Compilers and Preprocessors.  ##
+## -------------------------------------------- ##
+
+
+# AC_LANG_PREPROC(Fortran 77)
+# ---------------------------
+# Find the Fortran 77 preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(Fortran 77)],
+[m4_warn([syntax],
+	 [$0: No preprocessor defined for ]_AC_LANG)])
+
+# AC_LANG_PREPROC(Fortran)
+# ------------------------
+# Find the Fortran preprocessor.  Must be AC_DEFUN'd to be AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_PREPROC(Fortran)],
+[m4_warn([syntax],
+	 [$0: No preprocessor defined for ]_AC_LANG)])
+
+
+# AC_LANG_COMPILER(Fortran 77)
+# ----------------------------
+# Find the Fortran 77 compiler.  Must be AC_DEFUN'd to be
+# AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(Fortran 77)],
+[AC_REQUIRE([AC_PROG_F77])])
+
+# AC_LANG_COMPILER(Fortran)
+# -------------------------
+# Find the Fortran compiler.  Must be AC_DEFUN'd to be
+# AC_REQUIRE'able.
+AC_DEFUN([AC_LANG_COMPILER(Fortran)],
+[AC_REQUIRE([AC_PROG_FC])])
+
+
+# ac_cv_prog_g77
+# --------------
+# We used to name the cache variable this way.
+AU_DEFUN([ac_cv_prog_g77],
+[ac_cv_f77_compiler_gnu])
+
+
+# _AC_FC_DIALECT_YEAR([DIALECT])
+# ------------------------------
+# Given a Fortran DIALECT, which is Fortran [YY]YY or simply [YY]YY,
+# convert to a 4-digit year.  The dialect must be one of Fortran 77,
+# 90, 95, or 2000, currently.  If DIALECT is simply Fortran or the
+# empty string, returns the empty string.
+AC_DEFUN([_AC_FC_DIALECT_YEAR],
+[m4_case(m4_bpatsubsts(m4_tolower([$1]), [fortran],[], [ *],[]),
+	 [77],[1977], [1977],[1977],
+	 [90],[1990], [1990],[1990],
+	 [95],[1995], [1995],[1995],
+	 [2000],[2000],
+	 [],[],
+	 [m4_fatal([unknown Fortran dialect])])])
+
+
+# _AC_PROG_FC([DIALECT], [COMPILERS...])
+# --------------------------------------
+# DIALECT is a Fortran dialect, given by Fortran [YY]YY or simply [YY]YY,
+# and must be one of those supported by _AC_FC_DIALECT_YEAR
+#
+# If DIALECT is supplied, then we search for compilers of that dialect
+# first, and then later dialects.  Otherwise, we search for compilers
+# of the newest dialect first, and then earlier dialects in increasing age.
+# This search order is necessarily imperfect because the dialect cannot
+# always be inferred from the compiler name.
+#
+# Known compilers:
+#  f77/f90/f95: generic compiler names
+#  g77: GNU Fortran 77 compiler
+#  gfortran: GNU Fortran 95+ compiler (released in gcc 4.0)
+#  g95: original gcc-based f95 compiler (gfortran is a fork)
+#  ftn: native Fortran 95 compiler on Cray X1
+#  cf77: native F77 compiler under older Crays (prefer over fort77)
+#  fort77: native F77 compiler under HP-UX (and some older Crays)
+#  frt: Fujitsu F77 compiler
+#  pgf77/pgf90/pghpf/pgf95/pgfortran: Portland Group F77/F90/F95 compilers
+#  xlf/xlf90/xlf95: IBM (AIX) F77/F90/F95 compilers
+#    Prefer xlf9x to the generic names because they do not reject files
+#    with extension `.f'.
+#  lf95: Lahey-Fujitsu F95 compiler
+#  fl32: Microsoft Fortran 77 "PowerStation" compiler
+#  af77: Apogee F77 compiler for Intergraph hardware running CLIX
+#  epcf90: "Edinburgh Portable Compiler" F90
+#  fort: Compaq (now HP) Fortran 90/95 compiler for Tru64 and Linux/Alpha
+#  ifort, previously ifc: Intel Fortran 95 compiler for Linux/x86
+#  efc: Intel Fortran 95 compiler for IA64
+m4_define([_AC_F95_FC], [gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn])
+m4_define([_AC_F90_FC], [xlf90 f90 pgf90 pghpf epcf90])
+m4_define([_AC_F77_FC], [g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77])
+AC_DEFUN([_AC_PROG_FC],
+[_AC_FORTRAN_ASSERT()dnl
+AC_CHECK_TOOLS([]_AC_FC[],
+      m4_default([$2],
+	m4_case(_AC_FC_DIALECT_YEAR([$1]),
+		[1995], [_AC_F95_FC],
+		[1990], [_AC_F90_FC _AC_F95_FC],
+		[1977], [_AC_F77_FC _AC_F90_FC _AC_F95_FC],
+		[_AC_F95_FC _AC_F90_FC _AC_F77_FC])))
+
+# Provide some information about the compiler.
+_AS_ECHO_LOG([checking for _AC_LANG compiler version])
+set X $ac_compile
+ac_compiler=$[2]
+for ac_option in --version -v -V -qversion; do
+  _AC_DO_LIMIT([$ac_compiler $ac_option >&AS_MESSAGE_LOG_FD])
+done
+rm -f a.out
+
+m4_expand_once([_AC_COMPILER_EXEEXT])[]dnl
+m4_expand_once([_AC_COMPILER_OBJEXT])[]dnl
+# If we don't use `.F' as extension, the preprocessor is not run on the
+# input file.  (Note that this only needs to work for GNU compilers.)
+ac_save_ext=$ac_ext
+ac_ext=F
+_AC_LANG_COMPILER_GNU
+ac_ext=$ac_save_ext
+_AC_PROG_FC_G
+])# _AC_PROG_FC
+
+
+# AC_PROG_F77([COMPILERS...])
+# ---------------------------
+# COMPILERS is a space separated list of Fortran 77 compilers to search
+# for.  See also _AC_PROG_FC.
+AC_DEFUN([AC_PROG_F77],
+[AC_LANG_PUSH(Fortran 77)dnl
+AC_ARG_VAR([F77],    [Fortran 77 compiler command])dnl
+AC_ARG_VAR([FFLAGS], [Fortran 77 compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_PROG_FC([Fortran 77], [$1])
+if test $ac_compiler_gnu = yes; then
+  G77=yes
+else
+  G77=
+fi
+AC_LANG_POP(Fortran 77)dnl
+])# AC_PROG_F77
+
+
+# AC_PROG_FC([COMPILERS...], [DIALECT])
+# -------------------------------------
+# COMPILERS is a space separated list of Fortran 77 compilers to search
+# for, and [DIALECT] is an optional dialect.  See also _AC_PROG_FC.
+AC_DEFUN([AC_PROG_FC],
+[AC_LANG_PUSH(Fortran)dnl
+AC_ARG_VAR([FC],    [Fortran compiler command])dnl
+AC_ARG_VAR([FCFLAGS], [Fortran compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+_AC_ARG_VAR_LIBS()dnl
+_AC_PROG_FC([$2], [$1])
+AC_LANG_POP(Fortran)dnl
+])# AC_PROG_FC
+
+
+# _AC_PROG_FC_G
+# -------------
+# Check whether -g works, even if F[C]FLAGS is set, in case the package
+# plays around with F[C]FLAGS (such as to build both debugging and normal
+# versions of a library), tasteless as that idea is.
+m4_define([_AC_PROG_FC_G],
+[_AC_FORTRAN_ASSERT()dnl
+ac_test_[]_AC_LANG_PREFIX[]FLAGS=${[]_AC_LANG_PREFIX[]FLAGS+set}
+ac_save_[]_AC_LANG_PREFIX[]FLAGS=$[]_AC_LANG_PREFIX[]FLAGS
+_AC_LANG_PREFIX[]FLAGS=
+AC_CACHE_CHECK(whether $[]_AC_FC[] accepts -g, ac_cv_prog_[]_AC_LANG_ABBREV[]_g,
+[_AC_LANG_PREFIX[]FLAGS=-g
+_AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+[ac_cv_prog_[]_AC_LANG_ABBREV[]_g=yes],
+[ac_cv_prog_[]_AC_LANG_ABBREV[]_g=no])
+])
+if test "$ac_test_[]_AC_LANG_PREFIX[]FLAGS" = set; then
+  _AC_LANG_PREFIX[]FLAGS=$ac_save_[]_AC_LANG_PREFIX[]FLAGS
+elif test $ac_cv_prog_[]_AC_LANG_ABBREV[]_g = yes; then
+  if test "x$ac_cv_[]_AC_LANG_ABBREV[]_compiler_gnu" = xyes; then
+    _AC_LANG_PREFIX[]FLAGS="-g -O2"
+  else
+    _AC_LANG_PREFIX[]FLAGS="-g"
+  fi
+else
+  if test "x$ac_cv_[]_AC_LANG_ABBREV[]_compiler_gnu" = xyes; then
+    _AC_LANG_PREFIX[]FLAGS="-O2"
+  else
+    _AC_LANG_PREFIX[]FLAGS=
+  fi
+fi[]dnl
+])# _AC_PROG_FC_G
+
+
+# _AC_PROG_FC_C_O
+# ---------------
+# Test if the Fortran compiler accepts the options `-c' and `-o'
+# simultaneously, and define `[F77/FC]_NO_MINUS_C_MINUS_O' if it does not.
+#
+# The usefulness of this macro is questionable, as I can't really see
+# why anyone would use it.  The only reason I include it is for
+# completeness, since a similar test exists for the C compiler.
+#
+# FIXME: it seems like we could merge the C/C++/Fortran versions of this.
+AC_DEFUN([_AC_PROG_FC_C_O],
+[_AC_FORTRAN_ASSERT()dnl
+AC_CACHE_CHECK([whether $[]_AC_FC[] understands -c and -o together],
+	       [ac_cv_prog_[]_AC_LANG_ABBREV[]_c_o],
+[AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+# We test twice because some compilers refuse to overwrite an existing
+# `.o' file with `-o', although they will create one.
+ac_try='$[]_AC_FC[] $[]_AC_LANG_PREFIX[]FLAGS -c conftest.$ac_ext -o conftest2.$ac_objext >&AS_MESSAGE_LOG_FD'
+rm -f conftest2.*
+if _AC_DO_VAR(ac_try) &&
+     test -f conftest2.$ac_objext &&
+     _AC_DO_VAR(ac_try); then
+  ac_cv_prog_[]_AC_LANG_ABBREV[]_c_o=yes
+else
+  ac_cv_prog_[]_AC_LANG_ABBREV[]_c_o=no
+fi
+rm -f conftest*])
+if test $ac_cv_prog_[]_AC_LANG_ABBREV[]_c_o = no; then
+  AC_DEFINE([]_AC_FC[]_NO_MINUS_C_MINUS_O, 1,
+	    [Define to 1 if your Fortran compiler doesn't accept
+	     -c and -o together.])
+fi
+])# _AC_PROG_FC_C_O
+
+
+# AC_PROG_F77_C_O
+# ---------------
+AC_DEFUN([AC_PROG_F77_C_O],
+[AC_REQUIRE([AC_PROG_F77])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_PROG_FC_C_O
+AC_LANG_POP(Fortran 77)dnl
+])# AC_PROG_F77_C_O
+
+
+# AC_PROG_FC_C_O
+# --------------
+AC_DEFUN([AC_PROG_FC_C_O],
+[AC_REQUIRE([AC_PROG_FC])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_PROG_FC_C_O
+AC_LANG_POP(Fortran)dnl
+])# AC_PROG_FC_C_O
+
+
+
+## ------------------------------- ##
+## 4. Compilers' characteristics.  ##
+## ------------------------------- ##
+
+
+# _AC_PROG_FC_V_OUTPUT([FLAG = $ac_cv_prog_{f77/fc}_v])
+# -----------------------------------------------------
+# Link a trivial Fortran program, compiling with a verbose output FLAG
+# (whose default value, $ac_cv_prog_{f77/fc}_v, is computed by
+# _AC_PROG_FC_V), and return the output in $ac_{f77/fc}_v_output.  This
+# output is processed in the way expected by _AC_FC_LIBRARY_LDFLAGS,
+# so that any link flags that are echoed by the compiler appear as
+# space-separated items.
+AC_DEFUN([_AC_PROG_FC_V_OUTPUT],
+[_AC_FORTRAN_ASSERT()dnl
+AC_LANG_CONFTEST([AC_LANG_PROGRAM([])])
+
+# Compile and link our simple test program by passing a flag (argument
+# 1 to this macro) to the Fortran compiler in order to get
+# "verbose" output that we can then parse for the Fortran linker
+# flags.
+ac_save_[]_AC_LANG_PREFIX[]FLAGS=$[]_AC_LANG_PREFIX[]FLAGS
+_AC_LANG_PREFIX[]FLAGS="$[]_AC_LANG_PREFIX[]FLAGS m4_default([$1], [$ac_cv_prog_[]_AC_LANG_ABBREV[]_v])"
+eval "set x $ac_link"
+shift
+_AS_ECHO_LOG([$[*]])
+# gfortran 4.3 outputs lines setting COLLECT_GCC_OPTIONS, COMPILER_PATH,
+# LIBRARY_PATH; skip all such settings.
+ac_[]_AC_LANG_ABBREV[]_v_output=`eval $ac_link AS_MESSAGE_LOG_FD>&1 2>&1 |
+  sed '/^Driving:/d; /^Configured with:/d;
+      '"/^[[_$as_cr_Letters]][[_$as_cr_alnum]]*=/d"`
+AS_ECHO(["$ac_[]_AC_LANG_ABBREV[]_v_output"]) >&AS_MESSAGE_LOG_FD
+_AC_LANG_PREFIX[]FLAGS=$ac_save_[]_AC_LANG_PREFIX[]FLAGS
+
+rm -rf conftest*
+
+# On HP/UX there is a line like: "LPATH is: /foo:/bar:/baz" where
+# /foo, /bar, and /baz are search directories for the Fortran linker.
+# Here, we change these into -L/foo -L/bar -L/baz (and put it first):
+ac_[]_AC_LANG_ABBREV[]_v_output="`echo $ac_[]_AC_LANG_ABBREV[]_v_output |
+	grep 'LPATH is:' |
+	sed 's|.*LPATH is\(: *[[^ ]]*\).*|\1|;s|: */| -L/|g'` $ac_[]_AC_LANG_ABBREV[]_v_output"
+
+# FIXME: we keep getting bitten by quoted arguments; a more general fix
+#        that detects unbalanced quotes in FLIBS should be implemented
+#        and (ugh) tested at some point.
+case $ac_[]_AC_LANG_ABBREV[]_v_output in
+  # If we are using xlf then replace all the commas with spaces.
+  *xlfentry*)
+    ac_[]_AC_LANG_ABBREV[]_v_output=`echo $ac_[]_AC_LANG_ABBREV[]_v_output | sed 's/,/ /g'` ;;
+
+  # With Intel ifc, ignore the quoted -mGLOB_options_string stuff (quoted
+  # $LIBS confuse us, and the libraries appear later in the output anyway).
+  *mGLOB_options_string*)
+    ac_[]_AC_LANG_ABBREV[]_v_output=`echo $ac_[]_AC_LANG_ABBREV[]_v_output | sed 's/"-mGLOB[[^"]]*"/ /g'` ;;
+
+  # Portland Group compiler has singly- or doubly-quoted -cmdline argument
+  # Singly-quoted arguments were reported for versions 5.2-4 and 6.0-4.
+  # Doubly-quoted arguments were reported for "PGF90/x86 Linux/x86 5.0-2".
+  *-cmdline\ * | *-ignore\ * | *-def\ *)
+    ac_[]_AC_LANG_ABBREV[]_v_output=`echo $ac_[]_AC_LANG_ABBREV[]_v_output | sed "\
+	s/-cmdline  *'[[^']]*'/ /g; s/-cmdline  *\"[[^\"]]*\"/ /g
+	s/-ignore  *'[[^']]*'/ /g; s/-ignore  *\"[[^\"]]*\"/ /g
+	s/-def  *'[[^']]*'/ /g; s/-def  *\"[[^\"]]*\"/ /g"` ;;
+
+  # If we are using Cray Fortran then delete quotes.
+  *cft90*)
+    ac_[]_AC_LANG_ABBREV[]_v_output=`echo $ac_[]_AC_LANG_ABBREV[]_v_output | sed 's/"//g'` ;;
+esac
+
+])# _AC_PROG_FC_V_OUTPUT
+
+
+# _AC_PROG_FC_V
+# -------------
+#
+# Determine the flag that causes the Fortran compiler to print
+# information of library and object files (normally -v)
+# Needed for _AC_FC_LIBRARY_FLAGS
+# Some compilers don't accept -v (Lahey: -verbose, xlf: -V, Fujitsu: -###)
+AC_DEFUN([_AC_PROG_FC_V],
+[_AC_FORTRAN_ASSERT()dnl
+AC_CACHE_CHECK([how to get verbose linking output from $[]_AC_FC[]],
+		[ac_cv_prog_[]_AC_LANG_ABBREV[]_v],
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM()],
+[ac_cv_prog_[]_AC_LANG_ABBREV[]_v=
+# Try some options frequently used verbose output
+for ac_verb in -v -verbose --verbose -V -\#\#\#; do
+  _AC_PROG_FC_V_OUTPUT($ac_verb)
+  # look for -l* and *.a constructs in the output
+  for ac_arg in $ac_[]_AC_LANG_ABBREV[]_v_output; do
+     case $ac_arg in
+	[[\\/]]*.a | ?:[[\\/]]*.a | -[[lLRu]]*)
+	  ac_cv_prog_[]_AC_LANG_ABBREV[]_v=$ac_verb
+	  break 2 ;;
+     esac
+  done
+done
+if test -z "$ac_cv_prog_[]_AC_LANG_ABBREV[]_v"; then
+   AC_MSG_WARN([cannot determine how to obtain linking information from $[]_AC_FC[]])
+fi],
+		  [AC_MSG_WARN([compilation failed])])
+])])# _AC_PROG_FC_V
+
+
+# _AC_FC_LIBRARY_LDFLAGS
+# ----------------------
+#
+# Determine the linker flags (e.g. "-L" and "-l") for the Fortran
+# intrinsic and runtime libraries that are required to successfully
+# link a Fortran program or shared library.  The output variable
+# FLIBS/FCLIBS is set to these flags.
+#
+# This macro is intended to be used in those situations when it is
+# necessary to mix, e.g. C++ and Fortran, source code into a single
+# program or shared library.
+#
+# For example, if object files from a C++ and Fortran compiler must
+# be linked together, then the C++ compiler/linker must be used for
+# linking (since special C++-ish things need to happen at link time
+# like calling global constructors, instantiating templates, enabling
+# exception support, etc.).
+#
+# However, the Fortran intrinsic and runtime libraries must be
+# linked in as well, but the C++ compiler/linker doesn't know how to
+# add these Fortran libraries.  Hence, the macro
+# "AC_F77_LIBRARY_LDFLAGS" was created to determine these Fortran
+# libraries.
+#
+# This macro was packaged in its current form by Matthew D. Langston.
+# However, nearly all of this macro came from the "OCTAVE_FLIBS" macro
+# in "octave-2.0.13/aclocal.m4", and full credit should go to John
+# W. Eaton for writing this extremely useful macro.  Thank you John.
+AC_DEFUN([_AC_FC_LIBRARY_LDFLAGS],
+[_AC_FORTRAN_ASSERT()dnl
+_AC_PROG_FC_V
+AC_CACHE_CHECK([for _AC_LANG libraries of $[]_AC_FC[]], ac_cv_[]_AC_LANG_ABBREV[]_libs,
+[if test "x$[]_AC_LANG_PREFIX[]LIBS" != "x"; then
+  ac_cv_[]_AC_LANG_ABBREV[]_libs="$[]_AC_LANG_PREFIX[]LIBS" # Let the user override the test.
+else
+
+_AC_PROG_FC_V_OUTPUT
+
+ac_cv_[]_AC_LANG_ABBREV[]_libs=
+
+# Save positional arguments (if any)
+ac_save_positional="$[@]"
+
+set X $ac_[]_AC_LANG_ABBREV[]_v_output
+while test $[@%:@] != 1; do
+  shift
+  ac_arg=$[1]
+  case $ac_arg in
+	[[\\/]]*.a | ?:[[\\/]]*.a)
+	  _AC_LIST_MEMBER_IF($ac_arg, $ac_cv_[]_AC_LANG_ABBREV[]_libs, ,
+	      ac_cv_[]_AC_LANG_ABBREV[]_libs="$ac_cv_[]_AC_LANG_ABBREV[]_libs $ac_arg")
+	  ;;
+	-bI:*)
+	  _AC_LIST_MEMBER_IF($ac_arg, $ac_cv_[]_AC_LANG_ABBREV[]_libs, ,
+	     [_AC_LINKER_OPTION([$ac_arg], ac_cv_[]_AC_LANG_ABBREV[]_libs)])
+	  ;;
+	  # Ignore these flags.
+	-lang* | -lcrt*.o | -lc | -lgcc* | -lSystem | -libmil | -little \
+	  |-LANG:=* | -LIST:* | -LNO:* | -link)
+	  ;;
+	-lkernel32)
+	  test x"$CYGWIN" != xyes && ac_cv_[]_AC_LANG_ABBREV[]_libs="$ac_cv_[]_AC_LANG_ABBREV[]_libs $ac_arg"
+	  ;;
+	-[[LRuYz]])
+	  # These flags, when seen by themselves, take an argument.
+	  # We remove the space between option and argument and re-iterate
+	  # unless we find an empty arg or a new option (starting with -)
+	  case $[2] in
+	     "" | -*);;
+	     *)
+		ac_arg="$ac_arg$[2]"
+		shift; shift
+		set X $ac_arg "$[@]"
+		;;
+	  esac
+	  ;;
+	-YP,*)
+	  for ac_j in `AS_ECHO(["$ac_arg"]) | sed -e 's/-YP,/-L/;s/:/ -L/g'`; do
+	    _AC_LIST_MEMBER_IF($ac_j, $ac_cv_[]_AC_LANG_ABBREV[]_libs, ,
+			       [ac_arg="$ac_arg $ac_j"
+			       ac_cv_[]_AC_LANG_ABBREV[]_libs="$ac_cv_[]_AC_LANG_ABBREV[]_libs $ac_j"])
+	  done
+	  ;;
+	-[[lLR]]*)
+	  _AC_LIST_MEMBER_IF($ac_arg, $ac_cv_[]_AC_LANG_ABBREV[]_libs, ,
+			     ac_cv_[]_AC_LANG_ABBREV[]_libs="$ac_cv_[]_AC_LANG_ABBREV[]_libs $ac_arg")
+	  ;;
+	-zallextract*| -zdefaultextract)
+	  ac_cv_[]_AC_LANG_ABBREV[]_libs="$ac_cv_[]_AC_LANG_ABBREV[]_libs $ac_arg"
+	  ;;
+	  # Ignore everything else.
+  esac
+done
+# restore positional arguments
+set X $ac_save_positional; shift
+
+# We only consider "LD_RUN_PATH" on Solaris systems.  If this is seen,
+# then we insist that the "run path" must be an absolute path (i.e. it
+# must begin with a "/").
+case `(uname -sr) 2>/dev/null` in
+   "SunOS 5"*)
+      ac_ld_run_path=`AS_ECHO(["$ac_[]_AC_LANG_ABBREV[]_v_output"]) |
+			sed -n 's,^.*LD_RUN_PATH *= *\(/[[^ ]]*\).*$,-R\1,p'`
+      test "x$ac_ld_run_path" != x &&
+	_AC_LINKER_OPTION([$ac_ld_run_path], ac_cv_[]_AC_LANG_ABBREV[]_libs)
+      ;;
+esac
+fi # test "x$[]_AC_LANG_PREFIX[]LIBS" = "x"
+])
+[]_AC_LANG_PREFIX[]LIBS="$ac_cv_[]_AC_LANG_ABBREV[]_libs"
+AC_SUBST([]_AC_LANG_PREFIX[]LIBS)
+])# _AC_FC_LIBRARY_LDFLAGS
+
+
+# AC_F77_LIBRARY_LDFLAGS
+# ----------------------
+AC_DEFUN([AC_F77_LIBRARY_LDFLAGS],
+[AC_REQUIRE([AC_PROG_F77])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_FC_LIBRARY_LDFLAGS
+AC_LANG_POP(Fortran 77)dnl
+])# AC_F77_LIBRARY_LDFLAGS
+
+
+# AC_FC_LIBRARY_LDFLAGS
+# ---------------------
+AC_DEFUN([AC_FC_LIBRARY_LDFLAGS],
+[AC_REQUIRE([AC_PROG_FC])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_FC_LIBRARY_LDFLAGS
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_LIBRARY_LDFLAGS
+
+
+# _AC_FC_DUMMY_MAIN([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# -----------------------------------------------------------
+#
+# Detect name of dummy main routine required by the Fortran libraries,
+# (if any) and define {F77,FC}_DUMMY_MAIN to this name (which should be
+# used for a dummy declaration, if it is defined).  On some systems,
+# linking a C program to the Fortran library does not work unless you
+# supply a dummy function called something like MAIN__.
+#
+# Execute ACTION-IF-NOT-FOUND if no way of successfully linking a C
+# program with the {F77,FC} libs is found; default to exiting with an error
+# message.  Execute ACTION-IF-FOUND if a dummy routine name is needed
+# and found or if it is not needed (default to defining {F77,FC}_DUMMY_MAIN
+# when needed).
+#
+# What is technically happening is that the Fortran libraries provide
+# their own main() function, which usually initializes Fortran I/O and
+# similar stuff, and then calls MAIN__, which is the entry point of
+# your program.  Usually, a C program will override this with its own
+# main() routine, but the linker sometimes complain if you don't
+# provide a dummy (never-called) MAIN__ routine anyway.
+#
+# Of course, programs that want to allow Fortran subroutines to do
+# I/O, etcetera, should call their main routine MAIN__() (or whatever)
+# instead of main().  A separate autoconf test (_AC_FC_MAIN) checks
+# for the routine to use in this case (since the semantics of the test
+# are slightly different).  To link to e.g. purely numerical
+# libraries, this is normally not necessary, however, and most C/C++
+# programs are reluctant to turn over so much control to Fortran.  =)
+#
+# The name variants we check for are (in order):
+#   MAIN__ (g77, MAIN__ required on some systems; IRIX, MAIN__ optional)
+#   MAIN_, __main (SunOS)
+#   MAIN _MAIN __MAIN main_ main__ _main (we follow DDD and try these too)
+AC_DEFUN([_AC_FC_DUMMY_MAIN],
+[_AC_FORTRAN_ASSERT()dnl
+m4_define(_AC_LANG_PROGRAM_C_[]_AC_FC[]_HOOKS,
+[#ifdef ]_AC_FC[_DUMMY_MAIN
+]AC_LANG_CASE([Fortran], [#ifndef FC_DUMMY_MAIN_EQ_F77])
+[#  ifdef __cplusplus
+     extern "C"
+#  endif
+   int ]_AC_FC[_DUMMY_MAIN() { return 1; }
+]AC_LANG_CASE([Fortran], [#endif])
+[#endif
+])
+AC_CACHE_CHECK([for dummy main to link with _AC_LANG libraries],
+	       ac_cv_[]_AC_LANG_ABBREV[]_dummy_main,
+[ac_[]_AC_LANG_ABBREV[]_dm_save_LIBS=$LIBS
+ LIBS="$LIBS $[]_AC_LANG_PREFIX[]LIBS"
+ ac_fortran_dm_var=[]_AC_FC[]_DUMMY_MAIN
+ AC_LANG_PUSH(C)dnl
+
+ # First, try linking without a dummy main:
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([], [])],
+		[ac_cv_fortran_dummy_main=none],
+		[ac_cv_fortran_dummy_main=unknown])
+
+ if test $ac_cv_fortran_dummy_main = unknown; then
+   for ac_func in MAIN__ MAIN_ __main MAIN _MAIN __MAIN main_ main__ _main; do
+     AC_LINK_IFELSE([AC_LANG_PROGRAM([[@%:@define $ac_fortran_dm_var $ac_func]])],
+		    [ac_cv_fortran_dummy_main=$ac_func; break])
+   done
+ fi
+ AC_LANG_POP(C)dnl
+ ac_cv_[]_AC_LANG_ABBREV[]_dummy_main=$ac_cv_fortran_dummy_main
+ rm -rf conftest*
+ LIBS=$ac_[]_AC_LANG_ABBREV[]_dm_save_LIBS
+])
+[]_AC_FC[]_DUMMY_MAIN=$ac_cv_[]_AC_LANG_ABBREV[]_dummy_main
+AS_IF([test "$[]_AC_FC[]_DUMMY_MAIN" != unknown],
+      [m4_default([$1],
+[if test $[]_AC_FC[]_DUMMY_MAIN != none; then
+  AC_DEFINE_UNQUOTED([]_AC_FC[]_DUMMY_MAIN, $[]_AC_FC[]_DUMMY_MAIN,
+		     [Define to dummy `main' function (if any) required to
+		      link to the Fortran libraries.])
+  if test "x$ac_cv_fc_dummy_main" = "x$ac_cv_f77_dummy_main"; then
+	AC_DEFINE([FC_DUMMY_MAIN_EQ_F77], 1,
+		  [Define if F77 and FC dummy `main' functions are identical.])
+  fi
+fi])],
+      [m4_default([$2],
+	    [AC_MSG_FAILURE([linking to Fortran libraries from C fails])])])
+])# _AC_FC_DUMMY_MAIN
+
+
+# AC_F77_DUMMY_MAIN
+# -----------------
+AC_DEFUN([AC_F77_DUMMY_MAIN],
+[AC_REQUIRE([AC_F77_LIBRARY_LDFLAGS])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_FC_DUMMY_MAIN($@)
+AC_LANG_POP(Fortran 77)dnl
+])# AC_F77_DUMMY_MAIN
+
+
+# AC_FC_DUMMY_MAIN
+# ----------------
+AC_DEFUN([AC_FC_DUMMY_MAIN],
+[AC_REQUIRE([AC_FC_LIBRARY_LDFLAGS])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_FC_DUMMY_MAIN($@)
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_DUMMY_MAIN
+
+
+# _AC_FC_MAIN
+# -----------
+# Define {F77,FC}_MAIN to name of alternate main() function for use with
+# the Fortran libraries.  (Typically, the libraries may define their
+# own main() to initialize I/O, etcetera, that then call your own
+# routine called MAIN__ or whatever.)  See _AC_FC_DUMMY_MAIN, above.
+# If no such alternate name is found, just define {F77,FC}_MAIN to main.
+#
+AC_DEFUN([_AC_FC_MAIN],
+[_AC_FORTRAN_ASSERT()dnl
+AC_CACHE_CHECK([for alternate main to link with _AC_LANG libraries],
+	       ac_cv_[]_AC_LANG_ABBREV[]_main,
+[ac_[]_AC_LANG_ABBREV[]_m_save_LIBS=$LIBS
+ LIBS="$LIBS $[]_AC_LANG_PREFIX[]LIBS"
+ ac_fortran_dm_var=[]_AC_FC[]_DUMMY_MAIN
+ AC_LANG_PUSH(C)dnl
+ ac_cv_fortran_main="main" # default entry point name
+ for ac_func in MAIN__ MAIN_ __main MAIN _MAIN __MAIN main_ main__ _main; do
+   AC_LINK_IFELSE([AC_LANG_PROGRAM([@%:@ifdef FC_DUMMY_MAIN_EQ_F77
+@%:@  undef F77_DUMMY_MAIN
+@%:@  undef FC_DUMMY_MAIN
+@%:@else
+@%:@  undef $ac_fortran_dm_var
+@%:@endif
+@%:@define main $ac_func])],
+		  [ac_cv_fortran_main=$ac_func; break])
+ done
+ AC_LANG_POP(C)dnl
+ ac_cv_[]_AC_LANG_ABBREV[]_main=$ac_cv_fortran_main
+ rm -rf conftest*
+ LIBS=$ac_[]_AC_LANG_ABBREV[]_m_save_LIBS
+])
+AC_DEFINE_UNQUOTED([]_AC_FC[]_MAIN, $ac_cv_[]_AC_LANG_ABBREV[]_main,
+		   [Define to alternate name for `main' routine that is
+		    called from a `main' in the Fortran libraries.])
+])# _AC_FC_MAIN
+
+
+# AC_F77_MAIN
+# -----------
+AC_DEFUN([AC_F77_MAIN],
+[AC_REQUIRE([AC_F77_LIBRARY_LDFLAGS])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_FC_MAIN
+AC_LANG_POP(Fortran 77)dnl
+])# AC_F77_MAIN
+
+
+# AC_FC_MAIN
+# ----------
+AC_DEFUN([AC_FC_MAIN],
+[AC_REQUIRE([AC_FC_LIBRARY_LDFLAGS])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_FC_MAIN
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_MAIN
+
+
+# __AC_FC_NAME_MANGLING
+# ---------------------
+# Test for the name mangling scheme used by the Fortran compiler.
+#
+# Sets ac_cv_{f77,fc}_mangling. The value contains three fields, separated
+# by commas:
+#
+# lower case / upper case:
+#    case translation of the Fortran symbols
+# underscore / no underscore:
+#    whether the compiler appends "_" to symbol names
+# extra underscore / no extra underscore:
+#    whether the compiler appends an extra "_" to symbol names already
+#    containing at least one underscore
+#
+AC_DEFUN([__AC_FC_NAME_MANGLING],
+[_AC_FORTRAN_ASSERT()dnl
+AC_CACHE_CHECK([for _AC_LANG name-mangling scheme],
+	       ac_cv_[]_AC_LANG_ABBREV[]_mangling,
+[AC_COMPILE_IFELSE(
+[[      subroutine foobar()
+      return
+      end
+      subroutine foo_bar()
+      return
+      end]],
+[mv conftest.$ac_objext cfortran_test.$ac_objext
+
+  ac_save_LIBS=$LIBS
+  LIBS="cfortran_test.$ac_objext $LIBS $[]_AC_LANG_PREFIX[]LIBS"
+
+  AC_LANG_PUSH(C)dnl
+  ac_success=no
+  for ac_foobar in foobar FOOBAR; do
+    for ac_underscore in "" "_"; do
+      ac_func="$ac_foobar$ac_underscore"
+      AC_LINK_IFELSE([AC_LANG_CALL([], [$ac_func])],
+		     [ac_success=yes; break 2])
+    done
+  done
+  AC_LANG_POP(C)dnl
+
+  if test "$ac_success" = "yes"; then
+     case $ac_foobar in
+	foobar)
+	   ac_case=lower
+	   ac_foo_bar=foo_bar
+	   ;;
+	FOOBAR)
+	   ac_case=upper
+	   ac_foo_bar=FOO_BAR
+	   ;;
+     esac
+
+     AC_LANG_PUSH(C)dnl
+     ac_success_extra=no
+     for ac_extra in "" "_"; do
+	ac_func="$ac_foo_bar$ac_underscore$ac_extra"
+	AC_LINK_IFELSE([AC_LANG_CALL([], [$ac_func])],
+		       [ac_success_extra=yes; break])
+     done
+     AC_LANG_POP(C)dnl
+
+     if test "$ac_success_extra" = "yes"; then
+	ac_cv_[]_AC_LANG_ABBREV[]_mangling="$ac_case case"
+	if test -z "$ac_underscore"; then
+	   ac_cv_[]_AC_LANG_ABBREV[]_mangling="$ac_cv_[]_AC_LANG_ABBREV[]_mangling, no underscore"
+	else
+	   ac_cv_[]_AC_LANG_ABBREV[]_mangling="$ac_cv_[]_AC_LANG_ABBREV[]_mangling, underscore"
+	fi
+	if test -z "$ac_extra"; then
+	   ac_cv_[]_AC_LANG_ABBREV[]_mangling="$ac_cv_[]_AC_LANG_ABBREV[]_mangling, no extra underscore"
+	else
+	   ac_cv_[]_AC_LANG_ABBREV[]_mangling="$ac_cv_[]_AC_LANG_ABBREV[]_mangling, extra underscore"
+	fi
+      else
+	ac_cv_[]_AC_LANG_ABBREV[]_mangling="unknown"
+      fi
+  else
+     ac_cv_[]_AC_LANG_ABBREV[]_mangling="unknown"
+  fi
+
+  LIBS=$ac_save_LIBS
+  rm -rf conftest*
+  rm -f cfortran_test*],
+  [AC_MSG_FAILURE([cannot compile a simple Fortran program])])
+])
+])# __AC_FC_NAME_MANGLING
+
+# The replacement is empty.
+AU_DEFUN([AC_F77_NAME_MANGLING], [])
+
+
+# _AC_F77_NAME_MANGLING
+# ---------------------
+AC_DEFUN([_AC_F77_NAME_MANGLING],
+[AC_REQUIRE([AC_F77_LIBRARY_LDFLAGS])dnl
+AC_REQUIRE([AC_F77_DUMMY_MAIN])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+__AC_FC_NAME_MANGLING
+AC_LANG_POP(Fortran 77)dnl
+])# _AC_F77_NAME_MANGLING
+
+
+# _AC_FC_NAME_MANGLING
+# --------------------
+AC_DEFUN([_AC_FC_NAME_MANGLING],
+[AC_REQUIRE([AC_FC_LIBRARY_LDFLAGS])dnl
+AC_REQUIRE([AC_FC_DUMMY_MAIN])dnl
+AC_LANG_PUSH(Fortran)dnl
+__AC_FC_NAME_MANGLING
+AC_LANG_POP(Fortran)dnl
+])# _AC_FC_NAME_MANGLING
+
+
+# _AC_FC_WRAPPERS
+# ---------------
+# Defines C macros {F77,FC}_FUNC(name,NAME) and {F77,FC}_FUNC_(name,NAME) to
+# properly mangle the names of C identifiers, and C identifiers with
+# underscores, respectively, so that they match the name mangling
+# scheme used by the Fortran compiler.
+AC_DEFUN([_AC_FC_WRAPPERS],
+[_AC_FORTRAN_ASSERT()dnl
+AH_TEMPLATE(_AC_FC[_FUNC],
+    [Define to a macro mangling the given C identifier (in lower and upper
+     case), which must not contain underscores, for linking with Fortran.])dnl
+AH_TEMPLATE(_AC_FC[_FUNC_],
+    [As ]_AC_FC[_FUNC, but for C identifiers containing underscores.])dnl
+case $ac_cv_[]_AC_LANG_ABBREV[]_mangling in
+  "lower case, no underscore, no extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [name])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [name]) ;;
+  "lower case, no underscore, extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [name])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [name [##] _]) ;;
+  "lower case, underscore, no extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [name [##] _])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [name [##] _]) ;;
+  "lower case, underscore, extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [name [##] _])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [name [##] __]) ;;
+  "upper case, no underscore, no extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [NAME])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [NAME]) ;;
+  "upper case, no underscore, extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [NAME])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [NAME [##] _]) ;;
+  "upper case, underscore, no extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [NAME [##] _])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [NAME [##] _]) ;;
+  "upper case, underscore, extra underscore")
+	  AC_DEFINE(_AC_FC[_FUNC(name,NAME)],  [NAME [##] _])
+	  AC_DEFINE(_AC_FC[_FUNC_(name,NAME)], [NAME [##] __]) ;;
+  *)
+	  AC_MSG_WARN([unknown Fortran name-mangling scheme])
+	  ;;
+esac
+])# _AC_FC_WRAPPERS
+
+
+# AC_F77_WRAPPERS
+# ---------------
+AC_DEFUN([AC_F77_WRAPPERS],
+[AC_REQUIRE([_AC_F77_NAME_MANGLING])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_FC_WRAPPERS
+AC_LANG_POP(Fortran 77)dnl
+])# AC_F77_WRAPPERS
+
+
+# AC_FC_WRAPPERS
+# --------------
+AC_DEFUN([AC_FC_WRAPPERS],
+[AC_REQUIRE([_AC_FC_NAME_MANGLING])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_FC_WRAPPERS
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_WRAPPERS
+
+
+# _AC_FC_FUNC(NAME, [SHELLVAR = NAME])
+# ------------------------------------
+# For a Fortran subroutine of given NAME, define a shell variable
+# $SHELLVAR to the Fortran-mangled name.  If the SHELLVAR
+# argument is not supplied, it defaults to NAME.
+AC_DEFUN([_AC_FC_FUNC],
+[_AC_FORTRAN_ASSERT()dnl
+case $ac_cv_[]_AC_LANG_ABBREV[]_mangling in
+  upper*) ac_val="m4_toupper([$1])" ;;
+  lower*) ac_val="m4_tolower([$1])" ;;
+  *)      ac_val="unknown" ;;
+esac
+case $ac_cv_[]_AC_LANG_ABBREV[]_mangling in *," underscore"*) ac_val="$ac_val"_ ;; esac
+m4_if(m4_index([$1],[_]),-1,[],
+[case $ac_cv_[]_AC_LANG_ABBREV[]_mangling in *," extra underscore"*) ac_val="$ac_val"_ ;; esac
+])
+m4_default([$2],[$1])="$ac_val"
+])# _AC_FC_FUNC
+
+
+# AC_F77_FUNC(NAME, [SHELLVAR = NAME])
+# ------------------------------------
+AC_DEFUN([AC_F77_FUNC],
+[AC_REQUIRE([_AC_F77_NAME_MANGLING])dnl
+AC_LANG_PUSH(Fortran 77)dnl
+_AC_FC_FUNC([$1],[$2])
+AC_LANG_POP(Fortran 77)dnl
+])# AC_F77_FUNC
+
+
+# AC_FC_FUNC(NAME, [SHELLVAR = NAME])
+# -----------------------------------
+AC_DEFUN([AC_FC_FUNC],
+[AC_REQUIRE([_AC_FC_NAME_MANGLING])dnl
+AC_LANG_PUSH(Fortran)dnl
+_AC_FC_FUNC([$1],[$2])
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_FUNC
+
+
+# AC_FC_SRCEXT(EXT, [ACTION-IF-SUCCESS], [ACTION-IF-FAILURE])
+# -----------------------------------------------------------
+# Set the source-code extension used in Fortran (FC) tests to EXT (which
+# defaults to f).  Also, look for any necessary additional FCFLAGS needed
+# to allow this extension, and store them in the output variable
+# FCFLAGS_<EXT> (e.g. FCFLAGS_f90 for EXT=f90).  If successful,
+# call ACTION-IF-SUCCESS.  If unable to compile source code with EXT,
+# call ACTION-IF-FAILURE, which defaults to failing with an error
+# message.
+#
+# (The flags for the current source-code extension, if any, are stored in
+# $ac_fcflags_srcext and used automatically in subsequent autoconf tests.)
+#
+# For ordinary extensions like f90, etcetera, the modified FCFLAGS
+# are currently needed for IBM's xlf* and Intel's ifc (grrr).  Unfortunately,
+# xlf* will only take flags to recognize one extension at a time, so if the
+# user wants to compile multiple extensions (.f90 and .f95, say), she
+# will need to use the FCFLAGS_F90 and FCFLAGS_F95 individually rather
+# than just adding them all to FCFLAGS, for example.
+#
+# Also, for Intel's ifc compiler (which does not accept .f95 by default in
+# some versions), the $FCFLAGS_<EXT> variable *must* go immediately before
+# the source file on the command line, unlike other $FCFLAGS.  Ugh.
+AC_DEFUN([AC_FC_SRCEXT],
+[AC_LANG_PUSH(Fortran)dnl
+AC_CACHE_CHECK([for Fortran flag to compile .$1 files],
+		ac_cv_fc_srcext_$1,
+[ac_ext=$1
+ac_fcflags_srcext_save=$ac_fcflags_srcext
+ac_fcflags_srcext=
+ac_cv_fc_srcext_$1=unknown
+for ac_flag in none -qsuffix=f=$1 -Tf; do
+  test "x$ac_flag" != xnone && ac_fcflags_srcext="$ac_flag"
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], [ac_cv_fc_srcext_$1=$ac_flag; break])
+done
+rm -f conftest.$ac_objext conftest.$1
+ac_fcflags_srcext=$ac_fcflags_srcext_save
+])
+if test "x$ac_cv_fc_srcext_$1" = xunknown; then
+  m4_default([$3],[AC_MSG_ERROR([Fortran could not compile .$1 files])])
+else
+  ac_fc_srcext=$1
+  if test "x$ac_cv_fc_srcext_$1" = xnone; then
+    ac_fcflags_srcext=""
+    FCFLAGS_[]$1[]=""
+  else
+    ac_fcflags_srcext=$ac_cv_fc_srcext_$1
+    FCFLAGS_[]$1[]=$ac_cv_fc_srcext_$1
+  fi
+  AC_SUBST(FCFLAGS_[]$1)
+  $2
+fi
+AC_LANG_POP(Fortran)dnl
+])# AC_FC_SRCEXT
+
+
+# AC_FC_FREEFORM([ACTION-IF-SUCCESS], [ACTION-IF-FAILURE = FAILURE])
+# ------------------------------------------------------------------
+# Look for a compiler flag to make the Fortran (FC) compiler accept
+# free-format source code, and adds it to FCFLAGS.  Call
+# ACTION-IF-SUCCESS (defaults to nothing) if successful (i.e. can
+# compile code using new extension) and ACTION-IF-FAILURE (defaults to
+# failing with an error message) if not.  (Defined via DEFUN_ONCE to
+# prevent flag from being added to FCFLAGS multiple times.)
+#
+# The known flags are:
+#        -ffree-form: GNU g77, gfortran
+#         -FR, -free: Intel compiler (icc, ecc, ifort)
+#              -free: Compaq compiler (fort), Sun compiler (f95)
+#             -qfree: IBM compiler (xlf)
+# -Mfree, -Mfreeform: Portland Group compiler
+#          -freeform: SGI compiler
+#            -f free: Absoft Fortran
+#       +source=free: HP Fortran
+#              -nfix: Lahey/Fujitsu Fortran
+# We try to test the "more popular" flags first, by some prejudiced
+# notion of popularity.
+AC_DEFUN_ONCE([AC_FC_FREEFORM],
+[AC_LANG_PUSH([Fortran])dnl
+AC_CACHE_CHECK([for Fortran flag needed to accept free-form source],
+	       [ac_cv_fc_freeform],
+[ac_cv_fc_freeform=unknown
+ac_fc_freeform_FCFLAGS_save=$FCFLAGS
+for ac_flag in none -ffree-form -FR -free -qfree -Mfree -Mfreeform \
+	       -freeform "-f free" +source=free -nfix
+do
+  test "x$ac_flag" != xnone && FCFLAGS="$ac_fc_freeform_FCFLAGS_save $ac_flag"
+dnl Use @&t@ below to ensure that editors don't turn 8+ spaces into tab.
+  AC_COMPILE_IFELSE([[
+  program freeform
+       ! FIXME: how to best confuse non-freeform compilers?
+       print *, 'Hello ', &
+     @&t@     'world.'
+       end]],
+		    [ac_cv_fc_freeform=$ac_flag; break])
+done
+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext
+FCFLAGS=$ac_fc_freeform_FCFLAGS_save
+])
+if test "x$ac_cv_fc_freeform" = xunknown; then
+  m4_default([$2],
+	     [AC_MSG_ERROR([Fortran does not accept free-form source], 77)])
+else
+  if test "x$ac_cv_fc_freeform" != xnone; then
+    FCFLAGS="$FCFLAGS $ac_cv_fc_freeform"
+  fi
+  $1
+fi
+AC_LANG_POP([Fortran])dnl
+])# AC_FC_FREEFORM
+
+
+# AC_FC_FIXEDFORM([ACTION-IF-SUCCESS], [ACTION-IF-FAILURE = FAILURE])
+# ------------------------------------------------------------------
+# Look for a compiler flag to make the Fortran (FC) compiler accept
+# fixed-format source code, and adds it to FCFLAGS.  Call
+# ACTION-IF-SUCCESS (defaults to nothing) if successful (i.e. can
+# compile code using new extension) and ACTION-IF-FAILURE (defaults to
+# failing with an error message) if not.  (Defined via DEFUN_ONCE to
+# prevent flag from being added to FCFLAGS multiple times.)
+#
+# The known flags are:
+#       -ffixed-form: GNU g77, gfortran
+#             -fixed: Intel compiler (ifort), Sun compiler (f95)
+#            -qfixed: IBM compiler (xlf*)
+#            -Mfixed: Portland Group compiler
+#         -fixedform: SGI compiler
+#           -f fixed: Absoft Fortran
+#      +source=fixed: HP Fortran
+#              -fix: Lahey/Fujitsu Fortran
+# Since compilers may accept fixed form based on file name extension,
+# but users may want to use it with others as well, call AC_FC_SRCEXT
+# with the respective source extension before calling this macro.
+AC_DEFUN_ONCE([AC_FC_FIXEDFORM],
+[AC_LANG_PUSH([Fortran])dnl
+AC_CACHE_CHECK([for Fortran flag needed to accept fixed-form source],
+	       [ac_cv_fc_fixedform],
+[ac_cv_fc_fixedform=unknown
+ac_fc_fixedform_FCFLAGS_save=$FCFLAGS
+for ac_flag in none -ffixed-form -fixed -qfixed -Mfixed -fixedform "-f fixed" \
+	       +source=fixed -fix
+do
+  test "x$ac_flag" != xnone && FCFLAGS="$ac_fc_fixedform_FCFLAGS_save $ac_flag"
+  AC_COMPILE_IFELSE([[
+C     This comment should confuse free-form compilers.
+      program main
+      end]],
+		    [ac_cv_fc_fixedform=$ac_flag; break])
+done
+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext
+FCFLAGS=$ac_fc_fixedform_FCFLAGS_save
+])
+if test "x$ac_cv_fc_fixedform" = xunknown; then
+  m4_default([$2],
+	     [AC_MSG_ERROR([Fortran does not accept fixed-form source], 77)])
+else
+  if test "x$ac_cv_fc_fixedform" != xnone; then
+    FCFLAGS="$FCFLAGS $ac_cv_fc_fixedform"
+  fi
+  $1
+fi
+AC_LANG_POP([Fortran])dnl
+])# AC_FC_FIXEDFORM
+
+
+# AC_FC_LINE_LENGTH([LENGTH], [ACTION-IF-SUCCESS],
+#		    [ACTION-IF-FAILURE = FAILURE])
+# ------------------------------------------------
+# Look for a compiler flag to make the Fortran (FC) compiler accept long lines
+# in the current (free- or fixed-format) source code, and adds it to FCFLAGS.
+# The optional LENGTH may be 80, 132 (default), or `unlimited' for longer
+# lines.  Note that line lengths above 254 columns are not portable, and some
+# compilers (hello ifort) do not accept more than 132 columns at least for
+# fixed format.  Call ACTION-IF-SUCCESS (defaults to nothing) if successful
+# (i.e. can compile code using new extension) and ACTION-IF-FAILURE (defaults
+# to failing with an error message) if not.  (Defined via DEFUN_ONCE to
+# prevent flag from being added to FCFLAGS multiple times.)
+# You should call AC_FC_FREEFORM or AC_FC_FIXEDFORM to set the desired format
+# prior to using this macro.
+#
+# The known flags are:
+# -f{free,fixed}-line-length-N with N 72, 80, 132, or 0 or none for none.
+# -ffree-line-length-none: GNU gfortran
+#       -qfixed=132 80 72: IBM compiler (xlf)
+#                -Mextend: Cray
+#            -132 -80 -72: Intel compiler (ifort)
+#                          Needs to come before -extend_source because ifort
+#                          accepts that as well with an optional parameter and
+#                          doesn't fail but only warns about unknown arguments.
+#          -extend_source: SGI compiler
+#     -W NN (132, 80, 72): Absoft Fortran
+#          +extend_source: HP Fortran (254 in either form, default is 72 fixed,
+#			   132 free)
+#                   -wide: Lahey/Fujitsu Fortran (255 cols in fixed form)
+#                      -e: Sun Fortran compiler (132 characters)
+AC_DEFUN_ONCE([AC_FC_LINE_LENGTH],
+[AC_LANG_PUSH([Fortran])dnl
+m4_case(m4_default([$1], [132]),
+  [unlimited], [ac_fc_line_len_string=unlimited
+	               ac_fc_line_len=0
+                       ac_fc_line_length_test='
+      subroutine longer_than_132(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,'\
+'arg9,arg10,arg11,arg12,arg13,arg14,arg15,arg16,arg17,arg18,arg19)'],
+  [132],            [ac_fc_line_len=132
+		       ac_fc_line_length_test='
+      subroutine longer_than_80(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9,'\
+'arg10)'],
+  [80],             [ac_fc_line_len=80
+		       ac_fc_line_length_test='
+      subroutine longer_than_72(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9)'],
+  [m4_warning([Invalid length argument `$1'])])
+: ${ac_fc_line_len_string=$ac_fc_line_len}
+AC_CACHE_CHECK(
+[for Fortran flag needed to accept $ac_fc_line_len_string column source lines],
+	       [ac_cv_fc_line_length],
+[ac_cv_fc_line_length=unknown
+ac_fc_line_length_FCFLAGS_save=$FCFLAGS
+for ac_flag in none \
+	       -ffree-line-length-none -ffixed-line-length-none \
+	       -ffree-line-length-$ac_fc_line_len \
+	       -ffixed-line-length-$ac_fc_line_len \
+	       -qfixed=$ac_fc_line_len -Mextend \
+	       -$ac_fc_line_len -extend_source \
+	       "-W $ac_fc_line_len" +extend_source -wide -e
+do
+  test "x$ac_flag" != xnone && FCFLAGS="$ac_fc_line_length_FCFLAGS_save $ac_flag"
+  AC_COMPILE_IFELSE([[$ac_fc_line_length_test
+      end subroutine]],
+		    [ac_cv_fc_line_length=$ac_flag; break])
+done
+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext
+FCFLAGS=$ac_fc_line_length_FCFLAGS_save
+])
+if test "x$ac_cv_fc_line_length" = xunknown; then
+  m4_default([$3],
+	     [AC_MSG_ERROR([Fortran does not accept long source lines], 77)])
+else
+  if test "x$ac_cv_fc_line_length" != xnone; then
+    FCFLAGS="$FCFLAGS $ac_cv_fc_line_length"
+  fi
+  $2
+fi
+AC_LANG_POP([Fortran])dnl
+])# AC_FC_LINE_LENGTH
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/functions.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/functions.m4
new file mode 100644
index 0000000..f2048bf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/functions.m4
@@ -0,0 +1,2034 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# Checking for functions.
+# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,
+# 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# Table of contents
+#
+# 1. Generic tests for functions.
+# 2. Functions to check with AC_CHECK_FUNCS
+# 3. Tests for specific functions.
+
+
+## -------------------------------- ##
+## 1. Generic tests for functions.  ##
+## -------------------------------- ##
+
+# _AC_CHECK_FUNC_BODY
+# -------------------
+# Shell function body for AC_CHECK_FUNC.
+m4_define([_AC_CHECK_FUNC_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for $[]2], [$[]3],
+  [AC_LINK_IFELSE([AC_LANG_FUNC_LINK_TRY($[]2)],
+		  [AS_VAR_SET([$[]3], [yes])],
+		  [AS_VAR_SET([$[]3], [no])])])
+  AS_LINENO_POP
+])# _AC_CHECK_FUNC_BODY
+
+
+# AC_CHECK_FUNC(FUNCTION, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# -----------------------------------------------------------------
+# Check whether FUNCTION links in the current language.  Set the cache
+# variable ac_cv_func_FUNCTION accordingly, then execute
+# ACTION-IF-FOUND or ACTION-IF-NOT-FOUND.
+AC_DEFUN([AC_CHECK_FUNC],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_func],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_func],
+    [LINENO FUNC VAR],
+    [Tests whether FUNC exists, setting the cache variable VAR accordingly])],
+  [_$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_var], [ac_cv_func_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_func "$LINENO" "$1" "ac_var"
+AS_VAR_IF([ac_var], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_var])])# AC_CHECK_FUNC
+
+
+# _AH_CHECK_FUNC(FUNCTION)
+# ------------------------
+# Prepare the autoheader snippet for FUNCTION.
+m4_define([_AH_CHECK_FUNC],
+[AH_TEMPLATE(AS_TR_CPP([HAVE_$1]),
+  [Define to 1 if you have the `$1' function.])])
+
+
+# AC_CHECK_FUNCS(FUNCTION..., [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ---------------------------------------------------------------------
+# Check for each whitespace-separated FUNCTION, and perform
+# ACTION-IF-FOUND or ACTION-IF-NOT-FOUND for each function.
+# Additionally, make the preprocessor definition HAVE_FUNCTION
+# available for each found function.  Either ACTION may include
+# `break' to stop the search.
+AC_DEFUN([AC_CHECK_FUNCS],
+[m4_map_args_w([$1], [_AH_CHECK_FUNC(], [)])]dnl
+[AS_FOR([AC_func], [ac_func], [$1],
+[AC_CHECK_FUNC(AC_func,
+	       [AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_]AC_func)) $2],
+	       [$3])dnl])
+])# AC_CHECK_FUNCS
+
+
+# _AC_CHECK_FUNC_ONCE(FUNCTION)
+# -----------------------------
+# Check for a single FUNCTION once.
+m4_define([_AC_CHECK_FUNC_ONCE],
+[_AH_CHECK_FUNC([$1])AC_DEFUN([_AC_Func_$1],
+  [m4_divert_text([INIT_PREPARE], [AS_VAR_APPEND([ac_func_list], [" $1"])])
+_AC_FUNCS_EXPANSION])AC_REQUIRE([_AC_Func_$1])])
+
+# AC_CHECK_FUNCS_ONCE(FUNCTION...)
+# --------------------------------
+# Add each whitespace-separated name in FUNCTION to the list of functions
+# to check once.
+AC_DEFUN([AC_CHECK_FUNCS_ONCE],
+[m4_map_args_w([$1], [_AC_CHECK_FUNC_ONCE(], [)])])
+
+m4_define([_AC_FUNCS_EXPANSION],
+[
+  m4_divert_text([DEFAULTS], [ac_func_list=])
+  AC_CHECK_FUNCS([$ac_func_list])
+  m4_define([_AC_FUNCS_EXPANSION], [])
+])
+
+
+# _AC_REPLACE_FUNC(FUNCTION)
+# --------------------------
+# If FUNCTION exists, define HAVE_FUNCTION; else add FUNCTION.c
+# to the list of library objects.  FUNCTION must be literal.
+m4_define([_AC_REPLACE_FUNC],
+[AC_CHECK_FUNC([$1],
+  [_AH_CHECK_FUNC([$1])AC_DEFINE(AS_TR_CPP([HAVE_$1]))],
+  [_AC_LIBOBJ([$1])AC_LIBSOURCE([$1.c])])])
+
+# AC_REPLACE_FUNCS(FUNCTION...)
+# -----------------------------
+# For each FUNCTION in the whitespace separated list, perform the
+# equivalent of AC_CHECK_FUNC, then call AC_LIBOBJ if the function
+# was not found.
+AC_DEFUN([AC_REPLACE_FUNCS],
+[_$0(m4_flatten([$1]))])
+
+m4_define([_AC_REPLACE_FUNCS],
+[AS_LITERAL_IF([$1],
+[m4_map_args_w([$1], [_AC_REPLACE_FUNC(], [)
+])],
+[AC_CHECK_FUNCS([$1],
+  [_AH_CHECK_FUNC([$ac_func])AC_DEFINE(AS_TR_CPP([HAVE_$ac_func]))],
+  [_AC_LIBOBJ([$ac_func])])])])
+
+
+# AC_TRY_LINK_FUNC(FUNC, ACTION-IF-FOUND, ACTION-IF-NOT-FOUND)
+# ------------------------------------------------------------
+# Try to link a program that calls FUNC, handling GCC builtins.  If
+# the link succeeds, execute ACTION-IF-FOUND; otherwise, execute
+# ACTION-IF-NOT-FOUND.
+AC_DEFUN([AC_TRY_LINK_FUNC],
+[AC_LINK_IFELSE([AC_LANG_CALL([], [$1])], [$2], [$3])])
+
+
+# AU::AC_FUNC_CHECK
+# -----------------
+AU_ALIAS([AC_FUNC_CHECK], [AC_CHECK_FUNC])
+
+
+# AU::AC_HAVE_FUNCS
+# -----------------
+AU_ALIAS([AC_HAVE_FUNCS], [AC_CHECK_FUNCS])
+
+
+
+
+## ------------------------------------------- ##
+## 2. Functions to check with AC_CHECK_FUNCS.  ##
+## ------------------------------------------- ##
+
+AN_FUNCTION([__argz_count],            [AC_CHECK_FUNCS])
+AN_FUNCTION([__argz_next],             [AC_CHECK_FUNCS])
+AN_FUNCTION([__argz_stringify],        [AC_CHECK_FUNCS])
+AN_FUNCTION([__fpending],              [AC_CHECK_FUNCS])
+AN_FUNCTION([acl],                     [AC_CHECK_FUNCS])
+AN_FUNCTION([alarm],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([atexit],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([btowc],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([bzero],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([clock_gettime],           [AC_CHECK_FUNCS])
+AN_FUNCTION([doprnt],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([dup2],                    [AC_CHECK_FUNCS])
+AN_FUNCTION([endgrent],                [AC_CHECK_FUNCS])
+AN_FUNCTION([endpwent],                [AC_CHECK_FUNCS])
+AN_FUNCTION([euidaccess],              [AC_CHECK_FUNCS])
+AN_FUNCTION([fchdir],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([fdatasync],               [AC_CHECK_FUNCS])
+AN_FUNCTION([fesetround],              [AC_CHECK_FUNCS])
+AN_FUNCTION([floor],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([fs_stat_dev],             [AC_CHECK_FUNCS])
+AN_FUNCTION([ftime],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([ftruncate],               [AC_CHECK_FUNCS])
+AN_FUNCTION([getcwd],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([getdelim],                [AC_CHECK_FUNCS])
+AN_FUNCTION([gethostbyaddr],           [AC_CHECK_FUNCS])
+AN_FUNCTION([gethostbyname],           [AC_CHECK_FUNCS])
+AN_FUNCTION([gethostname],             [AC_CHECK_FUNCS])
+AN_FUNCTION([gethrtime],               [AC_CHECK_FUNCS])
+AN_FUNCTION([getmntent],               [AC_CHECK_FUNCS])
+AN_FUNCTION([getmntinfo],              [AC_CHECK_FUNCS])
+AN_FUNCTION([getpagesize],             [AC_CHECK_FUNCS])
+AN_FUNCTION([getpass],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([getspnam],                [AC_CHECK_FUNCS])
+AN_FUNCTION([gettimeofday],            [AC_CHECK_FUNCS])
+AN_FUNCTION([getusershell],            [AC_CHECK_FUNCS])
+AN_FUNCTION([hasmntopt],               [AC_CHECK_FUNCS])
+AN_FUNCTION([inet_ntoa],               [AC_CHECK_FUNCS])
+AN_FUNCTION([isascii],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([iswprint],                [AC_CHECK_FUNCS])
+AN_FUNCTION([lchown],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([listmntent],              [AC_CHECK_FUNCS])
+AN_FUNCTION([localeconv],              [AC_CHECK_FUNCS])
+AN_FUNCTION([localtime_r],             [AC_CHECK_FUNCS])
+AN_FUNCTION([mblen],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([mbrlen],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([memchr],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([memmove],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([mempcpy],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([memset],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([mkdir],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([mkfifo],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([modf],                    [AC_CHECK_FUNCS])
+AN_FUNCTION([munmap],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([next_dev],                [AC_CHECK_FUNCS])
+AN_FUNCTION([nl_langinfo],             [AC_CHECK_FUNCS])
+AN_FUNCTION([pathconf],                [AC_CHECK_FUNCS])
+AN_FUNCTION([pow],                     [AC_CHECK_FUNCS])
+AN_FUNCTION([pstat_getdynamic],        [AC_CHECK_FUNCS])
+AN_FUNCTION([putenv],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([re_comp],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([realpath],                [AC_CHECK_FUNCS])
+AN_FUNCTION([regcmp],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([regcomp],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([resolvepath],             [AC_CHECK_FUNCS])
+AN_FUNCTION([rint],                    [AC_CHECK_FUNCS])
+AN_FUNCTION([rmdir],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([rpmatch],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([select],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([setenv],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([sethostname],             [AC_CHECK_FUNCS])
+AN_FUNCTION([setlocale],               [AC_CHECK_FUNCS])
+AN_FUNCTION([socket],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([sqrt],                    [AC_CHECK_FUNCS])
+AN_FUNCTION([stime],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([stpcpy],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strcasecmp],              [AC_CHECK_FUNCS])
+AN_FUNCTION([strchr],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strcspn],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([strdup],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strerror],                [AC_CHECK_FUNCS])
+AN_FUNCTION([strncasecmp],             [AC_CHECK_FUNCS])
+AN_FUNCTION([strndup],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([strpbrk],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([strrchr],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([strspn],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strstr],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strtol],                  [AC_CHECK_FUNCS])
+AN_FUNCTION([strtoul],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([strtoull],                [AC_CHECK_FUNCS])
+AN_FUNCTION([strtoumax],               [AC_CHECK_FUNCS])
+AN_FUNCTION([strverscmp],              [AC_CHECK_FUNCS])
+AN_FUNCTION([sysinfo],                 [AC_CHECK_FUNCS])
+AN_FUNCTION([tzset],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([uname],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([utime],                   [AC_CHECK_FUNCS])
+AN_FUNCTION([utmpname],                [AC_CHECK_FUNCS])
+AN_FUNCTION([utmpxname],               [AC_CHECK_FUNCS])
+AN_FUNCTION([wcwidth],                 [AC_CHECK_FUNCS])
+
+
+AN_FUNCTION([dcgettext],    [AM_GNU_GETTEXT])
+AN_FUNCTION([getwd],        [warn: getwd is deprecated, use getcwd instead])
+
+
+## --------------------------------- ##
+## 3. Tests for specific functions.  ##
+## --------------------------------- ##
+
+
+# The macros are sorted:
+#
+# 1. AC_FUNC_* macros are sorted by alphabetical order.
+#
+# 2. Helping macros such as _AC_LIBOBJ_* are before the macro that
+#    uses it.
+#
+# 3. Obsolete macros are right after the modern macro.
+
+
+
+# _AC_LIBOBJ_ALLOCA
+# -----------------
+# Set up the LIBOBJ replacement of `alloca'.  Well, not exactly
+# AC_LIBOBJ since we actually set the output variable `ALLOCA'.
+# Nevertheless, for Automake, AC_LIBSOURCES it.
+m4_define([_AC_LIBOBJ_ALLOCA],
+[# The SVR3 libPW and SVR4 libucb both contain incompatible functions
+# that cause trouble.  Some versions do not even contain alloca or
+# contain a buggy version.  If you still want to use their alloca,
+# use ar to extract alloca.o from them instead of compiling alloca.c.
+AC_LIBSOURCES(alloca.c)
+AC_SUBST([ALLOCA], [\${LIBOBJDIR}alloca.$ac_objext])dnl
+AC_DEFINE(C_ALLOCA, 1, [Define to 1 if using `alloca.c'.])
+
+AC_CACHE_CHECK(whether `alloca.c' needs Cray hooks, ac_cv_os_cray,
+[AC_EGREP_CPP(webecray,
+[#if defined CRAY && ! defined CRAY2
+webecray
+#else
+wenotbecray
+#endif
+], ac_cv_os_cray=yes, ac_cv_os_cray=no)])
+if test $ac_cv_os_cray = yes; then
+  for ac_func in _getb67 GETB67 getb67; do
+    AC_CHECK_FUNC($ac_func,
+		  [AC_DEFINE_UNQUOTED(CRAY_STACKSEG_END, $ac_func,
+				      [Define to one of `_getb67', `GETB67',
+				       `getb67' for Cray-2 and Cray-YMP
+				       systems. This function is required for
+				       `alloca.c' support on those systems.])
+    break])
+  done
+fi
+
+AC_CACHE_CHECK([stack direction for C alloca],
+	       [ac_cv_c_stack_direction],
+[AC_RUN_IFELSE([AC_LANG_SOURCE(
+[AC_INCLUDES_DEFAULT
+int
+find_stack_direction ()
+{
+  static char *addr = 0;
+  auto char dummy;
+  if (addr == 0)
+    {
+      addr = &dummy;
+      return find_stack_direction ();
+    }
+  else
+    return (&dummy > addr) ? 1 : -1;
+}
+
+int
+main ()
+{
+  return find_stack_direction () < 0;
+}])],
+	       [ac_cv_c_stack_direction=1],
+	       [ac_cv_c_stack_direction=-1],
+	       [ac_cv_c_stack_direction=0])])
+AH_VERBATIM([STACK_DIRECTION],
+[/* If using the C implementation of alloca, define if you know the
+   direction of stack growth for your system; otherwise it will be
+   automatically deduced at runtime.
+	STACK_DIRECTION > 0 => grows toward higher addresses
+	STACK_DIRECTION < 0 => grows toward lower addresses
+	STACK_DIRECTION = 0 => direction of growth unknown */
+@%:@undef STACK_DIRECTION])dnl
+AC_DEFINE_UNQUOTED(STACK_DIRECTION, $ac_cv_c_stack_direction)
+])# _AC_LIBOBJ_ALLOCA
+
+
+# AC_FUNC_ALLOCA
+# --------------
+AN_FUNCTION([alloca], [AC_FUNC_ALLOCA])
+AN_HEADER([alloca.h], [AC_FUNC_ALLOCA])
+AC_DEFUN([AC_FUNC_ALLOCA],
+[AC_REQUIRE([AC_TYPE_SIZE_T])]dnl
+[# The Ultrix 4.2 mips builtin alloca declared by alloca.h only works
+# for constant arguments.  Useless!
+AC_CACHE_CHECK([for working alloca.h], ac_cv_working_alloca_h,
+[AC_LINK_IFELSE(
+       [AC_LANG_PROGRAM([[@%:@include <alloca.h>]],
+			[[char *p = (char *) alloca (2 * sizeof (int));
+			  if (p) return 0;]])],
+		[ac_cv_working_alloca_h=yes],
+		[ac_cv_working_alloca_h=no])])
+if test $ac_cv_working_alloca_h = yes; then
+  AC_DEFINE(HAVE_ALLOCA_H, 1,
+	    [Define to 1 if you have <alloca.h> and it should be used
+	     (not on Ultrix).])
+fi
+
+AC_CACHE_CHECK([for alloca], ac_cv_func_alloca_works,
+[AC_LINK_IFELSE([AC_LANG_PROGRAM(
+[[#ifdef __GNUC__
+# define alloca __builtin_alloca
+#else
+# ifdef _MSC_VER
+#  include <malloc.h>
+#  define alloca _alloca
+# else
+#  ifdef HAVE_ALLOCA_H
+#   include <alloca.h>
+#  else
+#   ifdef _AIX
+ #pragma alloca
+#   else
+#    ifndef alloca /* predefined by HP cc +Olibcalls */
+void *alloca (size_t);
+#    endif
+#   endif
+#  endif
+# endif
+#endif
+]],                               [[char *p = (char *) alloca (1);
+				    if (p) return 0;]])],
+		[ac_cv_func_alloca_works=yes],
+		[ac_cv_func_alloca_works=no])])
+
+if test $ac_cv_func_alloca_works = yes; then
+  AC_DEFINE(HAVE_ALLOCA, 1,
+	    [Define to 1 if you have `alloca', as a function or macro.])
+else
+  _AC_LIBOBJ_ALLOCA
+fi
+])# AC_FUNC_ALLOCA
+
+
+# AU::AC_ALLOCA
+# -------------
+AU_ALIAS([AC_ALLOCA], [AC_FUNC_ALLOCA])
+
+
+# AC_FUNC_CHOWN
+# -------------
+# Determine whether chown accepts arguments of -1 for uid and gid.
+AN_FUNCTION([chown], [AC_FUNC_CHOWN])
+AC_DEFUN([AC_FUNC_CHOWN],
+[AC_REQUIRE([AC_TYPE_UID_T])dnl
+AC_CHECK_HEADERS(unistd.h)
+AC_CACHE_CHECK([for working chown], ac_cv_func_chown_works,
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
+#include <fcntl.h>
+],
+[[  char *f = "conftest.chown";
+  struct stat before, after;
+
+  if (creat (f, 0600) < 0)
+    return 1;
+  if (stat (f, &before) < 0)
+    return 1;
+  if (chown (f, (uid_t) -1, (gid_t) -1) == -1)
+    return 1;
+  if (stat (f, &after) < 0)
+    return 1;
+  return ! (before.st_uid == after.st_uid && before.st_gid == after.st_gid);
+]])],
+	       [ac_cv_func_chown_works=yes],
+	       [ac_cv_func_chown_works=no],
+	       [ac_cv_func_chown_works=no])
+rm -f conftest.chown
+])
+if test $ac_cv_func_chown_works = yes; then
+  AC_DEFINE(HAVE_CHOWN, 1,
+	    [Define to 1 if your system has a working `chown' function.])
+fi
+])# AC_FUNC_CHOWN
+
+
+# AC_FUNC_CLOSEDIR_VOID
+# ---------------------
+# Check whether closedir returns void, and #define CLOSEDIR_VOID in
+# that case.
+AC_DEFUN([AC_FUNC_CLOSEDIR_VOID],
+[AC_REQUIRE([AC_HEADER_DIRENT])dnl
+AC_CACHE_CHECK([whether closedir returns void],
+	       [ac_cv_func_closedir_void],
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
+#include <$ac_header_dirent>
+#ifndef __cplusplus
+int closedir ();
+#endif
+],
+				[[return closedir (opendir (".")) != 0;]])],
+	       [ac_cv_func_closedir_void=no],
+	       [ac_cv_func_closedir_void=yes],
+	       [ac_cv_func_closedir_void=yes])])
+if test $ac_cv_func_closedir_void = yes; then
+  AC_DEFINE(CLOSEDIR_VOID, 1,
+	    [Define to 1 if the `closedir' function returns void instead
+	     of `int'.])
+fi
+])
+
+
+# AC_FUNC_ERROR_AT_LINE
+# ---------------------
+AN_FUNCTION([error],         [AC_FUNC_ERROR_AT_LINE])
+AN_FUNCTION([error_at_line], [AC_FUNC_ERROR_AT_LINE])
+AC_DEFUN([AC_FUNC_ERROR_AT_LINE],
+[AC_LIBSOURCES([error.h, error.c])dnl
+AC_CACHE_CHECK([for error_at_line], ac_cv_lib_error_at_line,
+[AC_LINK_IFELSE([AC_LANG_PROGRAM([#include <error.h>],
+				 [error_at_line (0, 0, "", 0, "an error occurred");])],
+		[ac_cv_lib_error_at_line=yes],
+		[ac_cv_lib_error_at_line=no])])
+if test $ac_cv_lib_error_at_line = no; then
+  AC_LIBOBJ(error)
+fi
+])
+
+
+# AU::AM_FUNC_ERROR_AT_LINE
+# -------------------------
+AU_ALIAS([AM_FUNC_ERROR_AT_LINE], [AC_FUNC_ERROR_AT_LINE])
+
+
+# _AC_FUNC_FNMATCH_IF(STANDARD = GNU | POSIX, CACHE_VAR, IF-TRUE, IF-FALSE)
+# -------------------------------------------------------------------------
+# If a STANDARD compliant fnmatch is found, run IF-TRUE, otherwise
+# IF-FALSE.  Use CACHE_VAR.
+AC_DEFUN([_AC_FUNC_FNMATCH_IF],
+[AC_CACHE_CHECK(
+   [for working $1 fnmatch],
+   [$2],
+  [# Some versions of Solaris, SCO, and the GNU C Library
+   # have a broken or incompatible fnmatch.
+   # So we run a test program.  If we are cross-compiling, take no chance.
+   # Thanks to John Oleynick, Franc,ois Pinard, and Paul Eggert for this test.
+   AC_RUN_IFELSE(
+      [AC_LANG_PROGRAM(
+	 [#include <fnmatch.h>
+#	   define y(a, b, c) (fnmatch (a, b, c) == 0)
+#	   define n(a, b, c) (fnmatch (a, b, c) == FNM_NOMATCH)
+	 ],
+	 [return
+	   (!(y ("a*", "abc", 0)
+	      && n ("d*/*1", "d/s/1", FNM_PATHNAME)
+	      && y ("a\\\\bc", "abc", 0)
+	      && n ("a\\\\bc", "abc", FNM_NOESCAPE)
+	      && y ("*x", ".x", 0)
+	      && n ("*x", ".x", FNM_PERIOD)
+	      && m4_if([$1], [GNU],
+		   [y ("xxXX", "xXxX", FNM_CASEFOLD)
+		    && y ("a++(x|yy)b", "a+xyyyyxb", FNM_EXTMATCH)
+		    && n ("d*/*1", "d/s/1", FNM_FILE_NAME)
+		    && y ("*", "x", FNM_FILE_NAME | FNM_LEADING_DIR)
+		    && y ("x*", "x/y/z", FNM_FILE_NAME | FNM_LEADING_DIR)
+		    && y ("*c*", "c/x", FNM_FILE_NAME | FNM_LEADING_DIR)],
+		   1)));])],
+      [$2=yes],
+      [$2=no],
+      [$2=cross])])
+AS_IF([test $$2 = yes], [$3], [$4])
+])# _AC_FUNC_FNMATCH_IF
+
+
+# AC_FUNC_FNMATCH
+# ---------------
+AC_DEFUN([AC_FUNC_FNMATCH],
+[_AC_FUNC_FNMATCH_IF([POSIX], [ac_cv_func_fnmatch_works],
+		     [AC_DEFINE([HAVE_FNMATCH], 1,
+		     [Define to 1 if your system has a working POSIX `fnmatch'
+		      function.])])
+])# AC_FUNC_FNMATCH
+
+
+# _AC_LIBOBJ_FNMATCH
+# ------------------
+# Prepare the replacement of fnmatch.
+AC_DEFUN([_AC_LIBOBJ_FNMATCH],
+[AC_REQUIRE([AC_C_CONST])dnl
+AC_REQUIRE([AC_FUNC_ALLOCA])dnl
+AC_REQUIRE([AC_TYPE_MBSTATE_T])dnl
+AC_CHECK_DECLS([getenv])
+AC_CHECK_FUNCS([btowc mbsrtowcs mempcpy wmempcpy])
+AC_CHECK_HEADERS([wchar.h wctype.h])
+AC_LIBOBJ([fnmatch])
+AC_CONFIG_LINKS([$ac_config_libobj_dir/fnmatch.h:$ac_config_libobj_dir/fnmatch_.h])
+AC_DEFINE(fnmatch, rpl_fnmatch,
+	  [Define to rpl_fnmatch if the replacement function should be used.])
+])# _AC_LIBOBJ_FNMATCH
+
+
+# AC_REPLACE_FNMATCH
+# ------------------
+AC_DEFUN([AC_REPLACE_FNMATCH],
+[_AC_FUNC_FNMATCH_IF([POSIX], [ac_cv_func_fnmatch_works],
+		     [rm -f "$ac_config_libobj_dir/fnmatch.h"],
+		     [_AC_LIBOBJ_FNMATCH])
+])# AC_REPLACE_FNMATCH
+
+
+# AC_FUNC_FNMATCH_GNU
+# -------------------
+AC_DEFUN([AC_FUNC_FNMATCH_GNU],
+[AC_REQUIRE([AC_GNU_SOURCE])
+_AC_FUNC_FNMATCH_IF([GNU], [ac_cv_func_fnmatch_gnu],
+		    [rm -f "$ac_config_libobj_dir/fnmatch.h"],
+		    [_AC_LIBOBJ_FNMATCH])
+])# AC_FUNC_FNMATCH_GNU
+
+
+# AU::AM_FUNC_FNMATCH
+# AU::fp_FUNC_FNMATCH
+# -------------------
+AU_ALIAS([AM_FUNC_FNMATCH], [AC_FUNC_FNMATCH])
+AU_ALIAS([fp_FUNC_FNMATCH], [AC_FUNC_FNMATCH])
+
+
+# AC_FUNC_FSEEKO
+# --------------
+AN_FUNCTION([ftello], [AC_FUNC_FSEEKO])
+AN_FUNCTION([fseeko], [AC_FUNC_FSEEKO])
+AC_DEFUN([AC_FUNC_FSEEKO],
+[_AC_SYS_LARGEFILE_MACRO_VALUE(_LARGEFILE_SOURCE, 1,
+   [ac_cv_sys_largefile_source],
+   [Define to 1 to make fseeko visible on some hosts (e.g. glibc 2.2).],
+   [[#include <sys/types.h> /* for off_t */
+     #include <stdio.h>]],
+   [[int (*fp) (FILE *, off_t, int) = fseeko;
+     return fseeko (stdin, 0, 0) && fp (stdin, 0, 0);]])
+
+# We used to try defining _XOPEN_SOURCE=500 too, to work around a bug
+# in glibc 2.1.3, but that breaks too many other things.
+# If you want fseeko and ftello with glibc, upgrade to a fixed glibc.
+if test $ac_cv_sys_largefile_source != unknown; then
+  AC_DEFINE(HAVE_FSEEKO, 1,
+    [Define to 1 if fseeko (and presumably ftello) exists and is declared.])
+fi
+])# AC_FUNC_FSEEKO
+
+
+# AC_FUNC_GETGROUPS
+# -----------------
+# Try to find `getgroups', and check that it works.
+# When cross-compiling, assume getgroups is broken.
+AN_FUNCTION([getgroups], [AC_FUNC_GETGROUPS])
+AC_DEFUN([AC_FUNC_GETGROUPS],
+[AC_REQUIRE([AC_TYPE_GETGROUPS])dnl
+AC_REQUIRE([AC_TYPE_SIZE_T])dnl
+AC_CHECK_FUNC(getgroups)
+
+# If we don't yet have getgroups, see if it's in -lbsd.
+# This is reported to be necessary on an ITOS 3000WS running SEIUX 3.1.
+ac_save_LIBS=$LIBS
+if test $ac_cv_func_getgroups = no; then
+  AC_CHECK_LIB(bsd, getgroups, [GETGROUPS_LIB=-lbsd])
+fi
+
+# Run the program to test the functionality of the system-supplied
+# getgroups function only if there is such a function.
+if test $ac_cv_func_getgroups = yes; then
+  AC_CACHE_CHECK([for working getgroups], ac_cv_func_getgroups_works,
+   [AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+      [[/* On Ultrix 4.3, getgroups (0, 0) always fails.  */
+       return getgroups (0, 0) == -1;]])],
+		  [ac_cv_func_getgroups_works=yes],
+		  [ac_cv_func_getgroups_works=no],
+		  [ac_cv_func_getgroups_works=no])
+   ])
+else
+  ac_cv_func_getgroups_works=no
+fi
+if test $ac_cv_func_getgroups_works = yes; then
+  AC_DEFINE(HAVE_GETGROUPS, 1,
+	    [Define to 1 if your system has a working `getgroups' function.])
+fi
+LIBS=$ac_save_LIBS
+])# AC_FUNC_GETGROUPS
+
+
+# _AC_LIBOBJ_GETLOADAVG
+# ---------------------
+# Set up the AC_LIBOBJ replacement of `getloadavg'.
+m4_define([_AC_LIBOBJ_GETLOADAVG],
+[AC_LIBOBJ(getloadavg)
+AC_DEFINE(C_GETLOADAVG, 1, [Define to 1 if using `getloadavg.c'.])
+# Figure out what our getloadavg.c needs.
+ac_have_func=no
+AC_CHECK_HEADER(sys/dg_sys_info.h,
+[ac_have_func=yes
+ AC_DEFINE(DGUX, 1, [Define to 1 for DGUX with <sys/dg_sys_info.h>.])
+ AC_CHECK_LIB(dgc, dg_sys_info)])
+
+AC_CHECK_HEADER(locale.h)
+AC_CHECK_FUNCS(setlocale)
+
+# We cannot check for <dwarf.h>, because Solaris 2 does not use dwarf (it
+# uses stabs), but it is still SVR4.  We cannot check for <elf.h> because
+# Irix 4.0.5F has the header but not the library.
+if test $ac_have_func = no && test "$ac_cv_lib_elf_elf_begin" = yes \
+    && test "$ac_cv_lib_kvm_kvm_open" = yes; then
+  ac_have_func=yes
+  AC_DEFINE(SVR4, 1, [Define to 1 on System V Release 4.])
+fi
+
+if test $ac_have_func = no; then
+  AC_CHECK_HEADER(inq_stats/cpustats.h,
+  [ac_have_func=yes
+   AC_DEFINE(UMAX, 1, [Define to 1 for Encore UMAX.])
+   AC_DEFINE(UMAX4_3, 1,
+	     [Define to 1 for Encore UMAX 4.3 that has <inq_status/cpustats.h>
+	      instead of <sys/cpustats.h>.])])
+fi
+
+if test $ac_have_func = no; then
+  AC_CHECK_HEADER(sys/cpustats.h,
+  [ac_have_func=yes; AC_DEFINE(UMAX)])
+fi
+
+if test $ac_have_func = no; then
+  AC_CHECK_HEADERS(mach/mach.h)
+fi
+
+AC_CHECK_HEADERS(nlist.h,
+[AC_CHECK_MEMBERS([struct nlist.n_un.n_name],
+		  [AC_DEFINE(NLIST_NAME_UNION, 1,
+			     [Define to 1 if your `struct nlist' has an
+			      `n_un' member.  Obsolete, depend on
+			      `HAVE_STRUCT_NLIST_N_UN_N_NAME])], [],
+		  [@%:@include <nlist.h>])
+])dnl
+])# _AC_LIBOBJ_GETLOADAVG
+
+
+# AC_FUNC_GETLOADAVG
+# ------------------
+AC_DEFUN([AC_FUNC_GETLOADAVG],
+[ac_have_func=no # yes means we've found a way to get the load average.
+
+# Make sure getloadavg.c is where it belongs, at configure-time.
+test -f "$srcdir/$ac_config_libobj_dir/getloadavg.c" ||
+  AC_MSG_ERROR([$srcdir/$ac_config_libobj_dir/getloadavg.c is missing])
+
+ac_save_LIBS=$LIBS
+
+# Check for getloadavg, but be sure not to touch the cache variable.
+(AC_CHECK_FUNC(getloadavg, exit 0, exit 1)) && ac_have_func=yes
+
+# On HPUX9, an unprivileged user can get load averages through this function.
+AC_CHECK_FUNCS(pstat_getdynamic)
+
+# Solaris has libkstat which does not require root.
+AC_CHECK_LIB(kstat, kstat_open)
+test $ac_cv_lib_kstat_kstat_open = yes && ac_have_func=yes
+
+# Some systems with -lutil have (and need) -lkvm as well, some do not.
+# On Solaris, -lkvm requires nlist from -lelf, so check that first
+# to get the right answer into the cache.
+# For kstat on solaris, we need libelf to force the definition of SVR4 below.
+if test $ac_have_func = no; then
+  AC_CHECK_LIB(elf, elf_begin, LIBS="-lelf $LIBS")
+fi
+if test $ac_have_func = no; then
+  AC_CHECK_LIB(kvm, kvm_open, LIBS="-lkvm $LIBS")
+  # Check for the 4.4BSD definition of getloadavg.
+  AC_CHECK_LIB(util, getloadavg,
+    [LIBS="-lutil $LIBS" ac_have_func=yes ac_cv_func_getloadavg_setgid=yes])
+fi
+
+if test $ac_have_func = no; then
+  # There is a commonly available library for RS/6000 AIX.
+  # Since it is not a standard part of AIX, it might be installed locally.
+  ac_getloadavg_LIBS=$LIBS
+  LIBS="-L/usr/local/lib $LIBS"
+  AC_CHECK_LIB(getloadavg, getloadavg,
+	       [LIBS="-lgetloadavg $LIBS"], [LIBS=$ac_getloadavg_LIBS])
+fi
+
+# Make sure it is really in the library, if we think we found it,
+# otherwise set up the replacement function.
+AC_CHECK_FUNCS(getloadavg, [],
+	       [_AC_LIBOBJ_GETLOADAVG])
+
+# Some definitions of getloadavg require that the program be installed setgid.
+AC_CACHE_CHECK(whether getloadavg requires setgid,
+	       ac_cv_func_getloadavg_setgid,
+[AC_EGREP_CPP([Yowza Am I SETGID yet],
+[#include "$srcdir/$ac_config_libobj_dir/getloadavg.c"
+#ifdef LDAV_PRIVILEGED
+Yowza Am I SETGID yet
+@%:@endif],
+	      ac_cv_func_getloadavg_setgid=yes,
+	      ac_cv_func_getloadavg_setgid=no)])
+if test $ac_cv_func_getloadavg_setgid = yes; then
+  NEED_SETGID=true
+  AC_DEFINE(GETLOADAVG_PRIVILEGED, 1,
+	    [Define to 1 if the `getloadavg' function needs to be run setuid
+	     or setgid.])
+else
+  NEED_SETGID=false
+fi
+AC_SUBST(NEED_SETGID)dnl
+
+if test $ac_cv_func_getloadavg_setgid = yes; then
+  AC_CACHE_CHECK(group of /dev/kmem, ac_cv_group_kmem,
+[ # On Solaris, /dev/kmem is a symlink.  Get info on the real file.
+  ac_ls_output=`ls -lgL /dev/kmem 2>/dev/null`
+  # If we got an error (system does not support symlinks), try without -L.
+  test -z "$ac_ls_output" && ac_ls_output=`ls -lg /dev/kmem`
+  ac_cv_group_kmem=`AS_ECHO(["$ac_ls_output"]) \
+    | sed -ne ['s/[	 ][	 ]*/ /g;
+	       s/^.[sSrwx-]* *[0-9]* *\([^0-9]*\)  *.*/\1/;
+	       / /s/.* //;p;']`
+])
+  AC_SUBST(KMEM_GROUP, $ac_cv_group_kmem)dnl
+fi
+if test "x$ac_save_LIBS" = x; then
+  GETLOADAVG_LIBS=$LIBS
+else
+  GETLOADAVG_LIBS=`AS_ECHO(["$LIBS"]) | sed "s|$ac_save_LIBS||"`
+fi
+LIBS=$ac_save_LIBS
+
+AC_SUBST(GETLOADAVG_LIBS)dnl
+])# AC_FUNC_GETLOADAVG
+
+
+# AU::AC_GETLOADAVG
+# -----------------
+AU_ALIAS([AC_GETLOADAVG], [AC_FUNC_GETLOADAVG])
+
+
+# AC_FUNC_GETMNTENT
+# -----------------
+AN_FUNCTION([getmntent], [AC_FUNC_GETMNTENT])
+AC_DEFUN([AC_FUNC_GETMNTENT],
+[# getmntent is in the standard C library on UNICOS, in -lsun on Irix 4,
+# -lseq on Dynix/PTX, -lgen on Unixware.
+AC_SEARCH_LIBS(getmntent, [sun seq gen],
+	       [ac_cv_func_getmntent=yes
+		AC_DEFINE([HAVE_GETMNTENT], 1,
+			  [Define to 1 if you have the `getmntent' function.])],
+	       [ac_cv_func_getmntent=no])
+])
+
+
+# AC_FUNC_GETPGRP
+# ---------------
+# Figure out whether getpgrp requires zero arguments.
+AC_DEFUN([AC_FUNC_GETPGRP],
+[AC_CACHE_CHECK(whether getpgrp requires zero arguments,
+ ac_cv_func_getpgrp_void,
+[# Use it with a single arg.
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT], [getpgrp (0);])],
+		  [ac_cv_func_getpgrp_void=no],
+		  [ac_cv_func_getpgrp_void=yes])
+])
+if test $ac_cv_func_getpgrp_void = yes; then
+  AC_DEFINE(GETPGRP_VOID, 1,
+	    [Define to 1 if the `getpgrp' function requires zero arguments.])
+fi
+])# AC_FUNC_GETPGRP
+
+
+# AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK
+# -------------------------------------
+# When cross-compiling, be pessimistic so we will end up using the
+# replacement version of lstat that checks for trailing slashes and
+# calls lstat a second time when necessary.
+AN_FUNCTION([lstat], [AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK])
+AC_DEFUN([AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK],
+[AC_CACHE_CHECK(
+       [whether lstat correctly handles trailing slash],
+       [ac_cv_func_lstat_dereferences_slashed_symlink],
+[rm -f conftest.sym conftest.file
+echo >conftest.file
+if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then
+  AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+    [struct stat sbuf;
+     /* Linux will dereference the symlink and fail, as required by POSIX.
+	That is better in the sense that it means we will not
+	have to compile and use the lstat wrapper.  */
+     return lstat ("conftest.sym/", &sbuf) == 0;])],
+		[ac_cv_func_lstat_dereferences_slashed_symlink=yes],
+		[ac_cv_func_lstat_dereferences_slashed_symlink=no],
+		[ac_cv_func_lstat_dereferences_slashed_symlink=no])
+else
+  # If the `ln -s' command failed, then we probably don't even
+  # have an lstat function.
+  ac_cv_func_lstat_dereferences_slashed_symlink=no
+fi
+rm -f conftest.sym conftest.file
+])
+
+test $ac_cv_func_lstat_dereferences_slashed_symlink = yes &&
+  AC_DEFINE_UNQUOTED([LSTAT_FOLLOWS_SLASHED_SYMLINK], [1],
+		     [Define to 1 if `lstat' dereferences a symlink specified
+		      with a trailing slash.])
+
+if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then
+  AC_LIBOBJ([lstat])
+fi
+])
+
+
+# _AC_FUNC_MALLOC_IF(IF-WORKS, IF-NOT)
+# ------------------------------------
+# If `malloc (0)' properly handled, run IF-WORKS, otherwise, IF-NOT.
+AC_DEFUN([_AC_FUNC_MALLOC_IF],
+[AC_REQUIRE([AC_HEADER_STDC])dnl
+AC_CHECK_HEADERS(stdlib.h)
+AC_CACHE_CHECK([for GNU libc compatible malloc], ac_cv_func_malloc_0_nonnull,
+[AC_RUN_IFELSE(
+[AC_LANG_PROGRAM(
+[[#if defined STDC_HEADERS || defined HAVE_STDLIB_H
+# include <stdlib.h>
+#else
+char *malloc ();
+#endif
+]],
+		 [return ! malloc (0);])],
+	       [ac_cv_func_malloc_0_nonnull=yes],
+	       [ac_cv_func_malloc_0_nonnull=no],
+	       [ac_cv_func_malloc_0_nonnull=no])])
+AS_IF([test $ac_cv_func_malloc_0_nonnull = yes], [$1], [$2])
+])# _AC_FUNC_MALLOC_IF
+
+
+# AC_FUNC_MALLOC
+# --------------
+# Report whether `malloc (0)' properly handled, and replace malloc if
+# needed.
+AN_FUNCTION([malloc], [AC_FUNC_MALLOC])
+AC_DEFUN([AC_FUNC_MALLOC],
+[_AC_FUNC_MALLOC_IF(
+  [AC_DEFINE([HAVE_MALLOC], 1,
+	     [Define to 1 if your system has a GNU libc compatible `malloc'
+	      function, and to 0 otherwise.])],
+  [AC_DEFINE([HAVE_MALLOC], 0)
+   AC_LIBOBJ(malloc)
+   AC_DEFINE([malloc], [rpl_malloc],
+      [Define to rpl_malloc if the replacement function should be used.])])
+])# AC_FUNC_MALLOC
+
+
+# AC_FUNC_MBRTOWC
+# ---------------
+AN_FUNCTION([mbrtowc], [AC_FUNC_MBRTOWC])
+AC_DEFUN([AC_FUNC_MBRTOWC],
+[
+  AC_CACHE_CHECK([whether mbrtowc and mbstate_t are properly declared],
+    ac_cv_func_mbrtowc,
+    [AC_LINK_IFELSE(
+       [AC_LANG_PROGRAM(
+	    [[@%:@include <wchar.h>]],
+	    [[wchar_t wc;
+	      char const s[] = "";
+	      size_t n = 1;
+	      mbstate_t state;
+	      return ! (sizeof state && (mbrtowc) (&wc, s, n, &state));]])],
+       ac_cv_func_mbrtowc=yes,
+       ac_cv_func_mbrtowc=no)])
+  if test $ac_cv_func_mbrtowc = yes; then
+    AC_DEFINE([HAVE_MBRTOWC], 1,
+      [Define to 1 if mbrtowc and mbstate_t are properly declared.])
+  fi
+])
+
+
+# AC_FUNC_MEMCMP
+# --------------
+AC_DEFUN([AC_FUNC_MEMCMP],
+[AC_CACHE_CHECK([for working memcmp], ac_cv_func_memcmp_working,
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT], [[
+  /* Some versions of memcmp are not 8-bit clean.  */
+  char c0 = '\100', c1 = '\200', c2 = '\201';
+  if (memcmp(&c0, &c2, 1) >= 0 || memcmp(&c1, &c2, 1) >= 0)
+    return 1;
+
+  /* The Next x86 OpenStep bug shows up only when comparing 16 bytes
+     or more and with at least one buffer not starting on a 4-byte boundary.
+     William Lewis provided this test program.   */
+  {
+    char foo[21];
+    char bar[21];
+    int i;
+    for (i = 0; i < 4; i++)
+      {
+	char *a = foo + i;
+	char *b = bar + i;
+	strcpy (a, "--------01111111");
+	strcpy (b, "--------10000000");
+	if (memcmp (a, b, 16) >= 0)
+	  return 1;
+      }
+    return 0;
+  }
+]])],
+	       [ac_cv_func_memcmp_working=yes],
+	       [ac_cv_func_memcmp_working=no],
+	       [ac_cv_func_memcmp_working=no])])
+test $ac_cv_func_memcmp_working = no && AC_LIBOBJ([memcmp])
+])# AC_FUNC_MEMCMP
+
+
+# AC_FUNC_MKTIME
+# --------------
+AN_FUNCTION([mktime], [AC_FUNC_MKTIME])
+AC_DEFUN([AC_FUNC_MKTIME],
+[AC_REQUIRE([AC_HEADER_TIME])dnl
+AC_CHECK_HEADERS_ONCE(sys/time.h unistd.h)
+AC_CHECK_FUNCS_ONCE(alarm)
+AC_CACHE_CHECK([for working mktime], ac_cv_func_working_mktime,
+[AC_RUN_IFELSE([AC_LANG_SOURCE(
+[[/* Test program from Paul Eggert and Tony Leneis.  */
+#ifdef TIME_WITH_SYS_TIME
+# include <sys/time.h>
+# include <time.h>
+#else
+# ifdef HAVE_SYS_TIME_H
+#  include <sys/time.h>
+# else
+#  include <time.h>
+# endif
+#endif
+
+#include <limits.h>
+#include <stdlib.h>
+
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif
+
+#ifndef HAVE_ALARM
+# define alarm(X) /* empty */
+#endif
+
+/* Work around redefinition to rpl_putenv by other config tests.  */
+#undef putenv
+
+static time_t time_t_max;
+static time_t time_t_min;
+
+/* Values we'll use to set the TZ environment variable.  */
+static const char *tz_strings[] = {
+  (const char *) 0, "TZ=GMT0", "TZ=JST-9",
+  "TZ=EST+3EDT+2,M10.1.0/00:00:00,M2.3.0/00:00:00"
+};
+#define N_STRINGS (sizeof (tz_strings) / sizeof (tz_strings[0]))
+
+/* Return 0 if mktime fails to convert a date in the spring-forward gap.
+   Based on a problem report from Andreas Jaeger.  */
+static int
+spring_forward_gap ()
+{
+  /* glibc (up to about 1998-10-07) failed this test. */
+  struct tm tm;
+
+  /* Use the portable POSIX.1 specification "TZ=PST8PDT,M4.1.0,M10.5.0"
+     instead of "TZ=America/Vancouver" in order to detect the bug even
+     on systems that don't support the Olson extension, or don't have the
+     full zoneinfo tables installed.  */
+  putenv ((char*) "TZ=PST8PDT,M4.1.0,M10.5.0");
+
+  tm.tm_year = 98;
+  tm.tm_mon = 3;
+  tm.tm_mday = 5;
+  tm.tm_hour = 2;
+  tm.tm_min = 0;
+  tm.tm_sec = 0;
+  tm.tm_isdst = -1;
+  return mktime (&tm) != (time_t) -1;
+}
+
+static int
+mktime_test1 (time_t now)
+{
+  struct tm *lt;
+  return ! (lt = localtime (&now)) || mktime (lt) == now;
+}
+
+static int
+mktime_test (time_t now)
+{
+  return (mktime_test1 (now)
+	  && mktime_test1 ((time_t) (time_t_max - now))
+	  && mktime_test1 ((time_t) (time_t_min + now)));
+}
+
+static int
+irix_6_4_bug ()
+{
+  /* Based on code from Ariel Faigon.  */
+  struct tm tm;
+  tm.tm_year = 96;
+  tm.tm_mon = 3;
+  tm.tm_mday = 0;
+  tm.tm_hour = 0;
+  tm.tm_min = 0;
+  tm.tm_sec = 0;
+  tm.tm_isdst = -1;
+  mktime (&tm);
+  return tm.tm_mon == 2 && tm.tm_mday == 31;
+}
+
+static int
+bigtime_test (int j)
+{
+  struct tm tm;
+  time_t now;
+  tm.tm_year = tm.tm_mon = tm.tm_mday = tm.tm_hour = tm.tm_min = tm.tm_sec = j;
+  now = mktime (&tm);
+  if (now != (time_t) -1)
+    {
+      struct tm *lt = localtime (&now);
+      if (! (lt
+	     && lt->tm_year == tm.tm_year
+	     && lt->tm_mon == tm.tm_mon
+	     && lt->tm_mday == tm.tm_mday
+	     && lt->tm_hour == tm.tm_hour
+	     && lt->tm_min == tm.tm_min
+	     && lt->tm_sec == tm.tm_sec
+	     && lt->tm_yday == tm.tm_yday
+	     && lt->tm_wday == tm.tm_wday
+	     && ((lt->tm_isdst < 0 ? -1 : 0 < lt->tm_isdst)
+		  == (tm.tm_isdst < 0 ? -1 : 0 < tm.tm_isdst))))
+	return 0;
+    }
+  return 1;
+}
+
+static int
+year_2050_test ()
+{
+  /* The correct answer for 2050-02-01 00:00:00 in Pacific time,
+     ignoring leap seconds.  */
+  unsigned long int answer = 2527315200UL;
+
+  struct tm tm;
+  time_t t;
+  tm.tm_year = 2050 - 1900;
+  tm.tm_mon = 2 - 1;
+  tm.tm_mday = 1;
+  tm.tm_hour = tm.tm_min = tm.tm_sec = 0;
+  tm.tm_isdst = -1;
+
+  /* Use the portable POSIX.1 specification "TZ=PST8PDT,M4.1.0,M10.5.0"
+     instead of "TZ=America/Vancouver" in order to detect the bug even
+     on systems that don't support the Olson extension, or don't have the
+     full zoneinfo tables installed.  */
+  putenv ((char*) "TZ=PST8PDT,M4.1.0,M10.5.0");
+
+  t = mktime (&tm);
+
+  /* Check that the result is either a failure, or close enough
+     to the correct answer that we can assume the discrepancy is
+     due to leap seconds.  */
+  return (t == (time_t) -1
+	  || (0 < t && answer - 120 <= t && t <= answer + 120));
+}
+
+int
+main ()
+{
+  time_t t, delta;
+  int i, j;
+
+  /* This test makes some buggy mktime implementations loop.
+     Give up after 60 seconds; a mktime slower than that
+     isn't worth using anyway.  */
+  alarm (60);
+
+  for (;;)
+    {
+      t = (time_t_max << 1) + 1;
+      if (t <= time_t_max)
+	break;
+      time_t_max = t;
+    }
+  time_t_min = - ((time_t) ~ (time_t) 0 == (time_t) -1) - time_t_max;
+
+  delta = time_t_max / 997; /* a suitable prime number */
+  for (i = 0; i < N_STRINGS; i++)
+    {
+      if (tz_strings[i])
+	putenv ((char*) tz_strings[i]);
+
+      for (t = 0; t <= time_t_max - delta; t += delta)
+	if (! mktime_test (t))
+	  return 1;
+      if (! (mktime_test ((time_t) 1)
+	     && mktime_test ((time_t) (60 * 60))
+	     && mktime_test ((time_t) (60 * 60 * 24))))
+	return 1;
+
+      for (j = 1; ; j <<= 1)
+	if (! bigtime_test (j))
+	  return 1;
+	else if (INT_MAX / 2 < j)
+	  break;
+      if (! bigtime_test (INT_MAX))
+	return 1;
+    }
+  return ! (irix_6_4_bug () && spring_forward_gap () && year_2050_test ());
+}]])],
+	       [ac_cv_func_working_mktime=yes],
+	       [ac_cv_func_working_mktime=no],
+	       [ac_cv_func_working_mktime=no])])
+if test $ac_cv_func_working_mktime = no; then
+  AC_LIBOBJ([mktime])
+fi
+])# AC_FUNC_MKTIME
+
+
+# AU::AM_FUNC_MKTIME
+# ------------------
+AU_ALIAS([AM_FUNC_MKTIME], [AC_FUNC_MKTIME])
+
+
+# AC_FUNC_MMAP
+# ------------
+AN_FUNCTION([mmap], [AC_FUNC_MMAP])
+AC_DEFUN([AC_FUNC_MMAP],
+[AC_CHECK_HEADERS_ONCE([stdlib.h unistd.h sys/param.h])
+AC_CHECK_FUNCS([getpagesize])
+AC_CACHE_CHECK([for working mmap], [ac_cv_func_mmap_fixed_mapped],
+[AC_RUN_IFELSE([AC_LANG_SOURCE([AC_INCLUDES_DEFAULT]
+[[/* malloc might have been renamed as rpl_malloc. */
+#undef malloc
+
+/* Thanks to Mike Haertel and Jim Avera for this test.
+   Here is a matrix of mmap possibilities:
+	mmap private not fixed
+	mmap private fixed at somewhere currently unmapped
+	mmap private fixed at somewhere already mapped
+	mmap shared not fixed
+	mmap shared fixed at somewhere currently unmapped
+	mmap shared fixed at somewhere already mapped
+   For private mappings, we should verify that changes cannot be read()
+   back from the file, nor mmap's back from the file at a different
+   address.  (There have been systems where private was not correctly
+   implemented like the infamous i386 svr4.0, and systems where the
+   VM page cache was not coherent with the file system buffer cache
+   like early versions of FreeBSD and possibly contemporary NetBSD.)
+   For shared mappings, we should conversely verify that changes get
+   propagated back to all the places they're supposed to be.
+
+   Grep wants private fixed already mapped.
+   The main things grep needs to know about mmap are:
+   * does it exist and is it safe to write into the mmap'd area
+   * how to use it (BSD variants)  */
+
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#if !defined STDC_HEADERS && !defined HAVE_STDLIB_H
+char *malloc ();
+#endif
+
+/* This mess was copied from the GNU getpagesize.h.  */
+#ifndef HAVE_GETPAGESIZE
+# ifdef _SC_PAGESIZE
+#  define getpagesize() sysconf(_SC_PAGESIZE)
+# else /* no _SC_PAGESIZE */
+#  ifdef HAVE_SYS_PARAM_H
+#   include <sys/param.h>
+#   ifdef EXEC_PAGESIZE
+#    define getpagesize() EXEC_PAGESIZE
+#   else /* no EXEC_PAGESIZE */
+#    ifdef NBPG
+#     define getpagesize() NBPG * CLSIZE
+#     ifndef CLSIZE
+#      define CLSIZE 1
+#     endif /* no CLSIZE */
+#    else /* no NBPG */
+#     ifdef NBPC
+#      define getpagesize() NBPC
+#     else /* no NBPC */
+#      ifdef PAGESIZE
+#       define getpagesize() PAGESIZE
+#      endif /* PAGESIZE */
+#     endif /* no NBPC */
+#    endif /* no NBPG */
+#   endif /* no EXEC_PAGESIZE */
+#  else /* no HAVE_SYS_PARAM_H */
+#   define getpagesize() 8192	/* punt totally */
+#  endif /* no HAVE_SYS_PARAM_H */
+# endif /* no _SC_PAGESIZE */
+
+#endif /* no HAVE_GETPAGESIZE */
+
+int
+main ()
+{
+  char *data, *data2, *data3;
+  const char *cdata2;
+  int i, pagesize;
+  int fd, fd2;
+
+  pagesize = getpagesize ();
+
+  /* First, make a file with some known garbage in it. */
+  data = (char *) malloc (pagesize);
+  if (!data)
+    return 1;
+  for (i = 0; i < pagesize; ++i)
+    *(data + i) = rand ();
+  umask (0);
+  fd = creat ("conftest.mmap", 0600);
+  if (fd < 0)
+    return 2;
+  if (write (fd, data, pagesize) != pagesize)
+    return 3;
+  close (fd);
+
+  /* Next, check that the tail of a page is zero-filled.  File must have
+     non-zero length, otherwise we risk SIGBUS for entire page.  */
+  fd2 = open ("conftest.txt", O_RDWR | O_CREAT | O_TRUNC, 0600);
+  if (fd2 < 0)
+    return 4;
+  cdata2 = "";
+  if (write (fd2, cdata2, 1) != 1)
+    return 5;
+  data2 = (char *) mmap (0, pagesize, PROT_READ | PROT_WRITE, MAP_SHARED, fd2, 0L);
+  if (data2 == MAP_FAILED)
+    return 6;
+  for (i = 0; i < pagesize; ++i)
+    if (*(data2 + i))
+      return 7;
+  close (fd2);
+  if (munmap (data2, pagesize))
+    return 8;
+
+  /* Next, try to mmap the file at a fixed address which already has
+     something else allocated at it.  If we can, also make sure that
+     we see the same garbage.  */
+  fd = open ("conftest.mmap", O_RDWR);
+  if (fd < 0)
+    return 9;
+  if (data2 != mmap (data2, pagesize, PROT_READ | PROT_WRITE,
+		     MAP_PRIVATE | MAP_FIXED, fd, 0L))
+    return 10;
+  for (i = 0; i < pagesize; ++i)
+    if (*(data + i) != *(data2 + i))
+      return 11;
+
+  /* Finally, make sure that changes to the mapped area do not
+     percolate back to the file as seen by read().  (This is a bug on
+     some variants of i386 svr4.0.)  */
+  for (i = 0; i < pagesize; ++i)
+    *(data2 + i) = *(data2 + i) + 1;
+  data3 = (char *) malloc (pagesize);
+  if (!data3)
+    return 12;
+  if (read (fd, data3, pagesize) != pagesize)
+    return 13;
+  for (i = 0; i < pagesize; ++i)
+    if (*(data + i) != *(data3 + i))
+      return 14;
+  close (fd);
+  return 0;
+}]])],
+	       [ac_cv_func_mmap_fixed_mapped=yes],
+	       [ac_cv_func_mmap_fixed_mapped=no],
+	       [ac_cv_func_mmap_fixed_mapped=no])])
+if test $ac_cv_func_mmap_fixed_mapped = yes; then
+  AC_DEFINE([HAVE_MMAP], [1],
+	    [Define to 1 if you have a working `mmap' system call.])
+fi
+rm -f conftest.mmap conftest.txt
+])# AC_FUNC_MMAP
+
+
+# AU::AC_MMAP
+# -----------
+AU_ALIAS([AC_MMAP], [AC_FUNC_MMAP])
+
+
+# AC_FUNC_OBSTACK
+# ---------------
+# Ensure obstack support.  Yeah, this is not exactly a `FUNC' check.
+AN_FUNCTION([obstack_init], [AC_FUNC_OBSTACK])
+AN_IDENTIFIER([obstack],    [AC_FUNC_OBSTACK])
+AC_DEFUN([AC_FUNC_OBSTACK],
+[AC_LIBSOURCES([obstack.h, obstack.c])dnl
+AC_CACHE_CHECK([for obstacks], ac_cv_func_obstack,
+[AC_LINK_IFELSE(
+    [AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
+		      [@%:@include "obstack.h"]],
+		     [[struct obstack mem;
+		       @%:@define obstack_chunk_alloc malloc
+		       @%:@define obstack_chunk_free free
+		       obstack_init (&mem);
+		       obstack_free (&mem, 0);]])],
+		[ac_cv_func_obstack=yes],
+		[ac_cv_func_obstack=no])])
+if test $ac_cv_func_obstack = yes; then
+  AC_DEFINE(HAVE_OBSTACK, 1, [Define to 1 if libc includes obstacks.])
+else
+  AC_LIBOBJ(obstack)
+fi
+])# AC_FUNC_OBSTACK
+
+
+# AU::AM_FUNC_OBSTACK
+# -------------------
+AU_ALIAS([AM_FUNC_OBSTACK], [AC_FUNC_OBSTACK])
+
+
+
+# _AC_FUNC_REALLOC_IF(IF-WORKS, IF-NOT)
+# -------------------------------------
+# If `realloc (0, 0)' is properly handled, run IF-WORKS, otherwise, IF-NOT.
+AC_DEFUN([_AC_FUNC_REALLOC_IF],
+[AC_REQUIRE([AC_HEADER_STDC])dnl
+AC_CHECK_HEADERS(stdlib.h)
+AC_CACHE_CHECK([for GNU libc compatible realloc], ac_cv_func_realloc_0_nonnull,
+[AC_RUN_IFELSE(
+[AC_LANG_PROGRAM(
+[[#if defined STDC_HEADERS || defined HAVE_STDLIB_H
+# include <stdlib.h>
+#else
+char *realloc ();
+#endif
+]],
+		 [return ! realloc (0, 0);])],
+	       [ac_cv_func_realloc_0_nonnull=yes],
+	       [ac_cv_func_realloc_0_nonnull=no],
+	       [ac_cv_func_realloc_0_nonnull=no])])
+AS_IF([test $ac_cv_func_realloc_0_nonnull = yes], [$1], [$2])
+])# AC_FUNC_REALLOC
+
+
+# AC_FUNC_REALLOC
+# ---------------
+# Report whether `realloc (0, 0)' is properly handled, and replace realloc if
+# needed.
+AN_FUNCTION([realloc], [AC_FUNC_REALLOC])
+AC_DEFUN([AC_FUNC_REALLOC],
+[_AC_FUNC_REALLOC_IF(
+  [AC_DEFINE([HAVE_REALLOC], 1,
+	     [Define to 1 if your system has a GNU libc compatible `realloc'
+	      function, and to 0 otherwise.])],
+  [AC_DEFINE([HAVE_REALLOC], 0)
+   AC_LIBOBJ([realloc])
+   AC_DEFINE([realloc], [rpl_realloc],
+      [Define to rpl_realloc if the replacement function should be used.])])
+])# AC_FUNC_REALLOC
+
+
+# AC_FUNC_SELECT_ARGTYPES
+# -----------------------
+# Determine the correct type to be passed to each of the `select'
+# function's arguments, and define those types in `SELECT_TYPE_ARG1',
+# `SELECT_TYPE_ARG234', and `SELECT_TYPE_ARG5'.
+AC_DEFUN([AC_FUNC_SELECT_ARGTYPES],
+[AC_CHECK_HEADERS(sys/select.h sys/socket.h)
+AC_CACHE_CHECK([types of arguments for select],
+[ac_cv_func_select_args],
+[for ac_arg234 in 'fd_set *' 'int *' 'void *'; do
+ for ac_arg1 in 'int' 'size_t' 'unsigned long int' 'unsigned int'; do
+  for ac_arg5 in 'struct timeval *' 'const struct timeval *'; do
+   AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM(
+[AC_INCLUDES_DEFAULT
+#ifdef HAVE_SYS_SELECT_H
+# include <sys/select.h>
+#endif
+#ifdef HAVE_SYS_SOCKET_H
+# include <sys/socket.h>
+#endif
+],
+			[extern int select ($ac_arg1,
+					    $ac_arg234, $ac_arg234, $ac_arg234,
+					    $ac_arg5);])],
+	      [ac_cv_func_select_args="$ac_arg1,$ac_arg234,$ac_arg5"; break 3])
+  done
+ done
+done
+# Provide a safe default value.
+: "${ac_cv_func_select_args=int,int *,struct timeval *}"
+])
+ac_save_IFS=$IFS; IFS=','
+set dummy `echo "$ac_cv_func_select_args" | sed 's/\*/\*/g'`
+IFS=$ac_save_IFS
+shift
+AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG1, $[1],
+		   [Define to the type of arg 1 for `select'.])
+AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG234, ($[2]),
+		   [Define to the type of args 2, 3 and 4 for `select'.])
+AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG5, ($[3]),
+		   [Define to the type of arg 5 for `select'.])
+rm -f conftest*
+])# AC_FUNC_SELECT_ARGTYPES
+
+
+# AC_FUNC_SETPGRP
+# ---------------
+AC_DEFUN([AC_FUNC_SETPGRP],
+[AC_CACHE_CHECK(whether setpgrp takes no argument, ac_cv_func_setpgrp_void,
+[AC_RUN_IFELSE(
+[AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+[/* If this system has a BSD-style setpgrp which takes arguments,
+  setpgrp(1, 1) will fail with ESRCH and return -1, in that case
+  exit successfully. */
+  return setpgrp (1,1) != -1;])],
+	       [ac_cv_func_setpgrp_void=no],
+	       [ac_cv_func_setpgrp_void=yes],
+	       [AC_MSG_ERROR([cannot check setpgrp when cross compiling])])])
+if test $ac_cv_func_setpgrp_void = yes; then
+  AC_DEFINE(SETPGRP_VOID, 1,
+	    [Define to 1 if the `setpgrp' function takes no argument.])
+fi
+])# AC_FUNC_SETPGRP
+
+
+# _AC_FUNC_STAT(STAT | LSTAT)
+# ---------------------------
+# Determine whether stat or lstat have the bug that it succeeds when
+# given the zero-length file name argument.  The stat and lstat from
+# SunOS4.1.4 and the Hurd (as of 1998-11-01) do this.
+#
+# If it does, then define HAVE_STAT_EMPTY_STRING_BUG (or
+# HAVE_LSTAT_EMPTY_STRING_BUG) and arrange to compile the wrapper
+# function.
+m4_define([_AC_FUNC_STAT],
+[AC_REQUIRE([AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK])dnl
+AC_CACHE_CHECK([whether $1 accepts an empty string],
+	       [ac_cv_func_$1_empty_string_bug],
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+[[struct stat sbuf;
+  return $1 ("", &sbuf) == 0;]])],
+	    [ac_cv_func_$1_empty_string_bug=no],
+	    [ac_cv_func_$1_empty_string_bug=yes],
+	    [ac_cv_func_$1_empty_string_bug=yes])])
+if test $ac_cv_func_$1_empty_string_bug = yes; then
+  AC_LIBOBJ([$1])
+  AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_$1_EMPTY_STRING_BUG]), 1,
+		     [Define to 1 if `$1' has the bug that it succeeds when
+		      given the zero-length file name argument.])
+fi
+])# _AC_FUNC_STAT
+
+
+# AC_FUNC_STAT & AC_FUNC_LSTAT
+# ----------------------------
+AC_DEFUN([AC_FUNC_STAT],  [_AC_FUNC_STAT(stat)])
+AC_DEFUN([AC_FUNC_LSTAT], [_AC_FUNC_STAT(lstat)])
+
+
+# _AC_LIBOBJ_STRTOD
+# -----------------
+m4_define([_AC_LIBOBJ_STRTOD],
+[AC_LIBOBJ(strtod)
+AC_CHECK_FUNC(pow)
+if test $ac_cv_func_pow = no; then
+  AC_CHECK_LIB(m, pow,
+	       [POW_LIB=-lm],
+	       [AC_MSG_WARN([cannot find library containing definition of pow])])
+fi
+])# _AC_LIBOBJ_STRTOD
+
+
+# AC_FUNC_STRTOD
+# --------------
+AN_FUNCTION([strtod], [AC_FUNC_STRTOD])
+AC_DEFUN([AC_FUNC_STRTOD],
+[AC_SUBST(POW_LIB)dnl
+AC_CACHE_CHECK(for working strtod, ac_cv_func_strtod,
+[AC_RUN_IFELSE([AC_LANG_SOURCE([[
+]AC_INCLUDES_DEFAULT[
+#ifndef strtod
+double strtod ();
+#endif
+int
+main()
+{
+  {
+    /* Some versions of Linux strtod mis-parse strings with leading '+'.  */
+    char *string = " +69";
+    char *term;
+    double value;
+    value = strtod (string, &term);
+    if (value != 69 || term != (string + 4))
+      return 1;
+  }
+
+  {
+    /* Under Solaris 2.4, strtod returns the wrong value for the
+       terminating character under some conditions.  */
+    char *string = "NaN";
+    char *term;
+    strtod (string, &term);
+    if (term != string && *(term - 1) == 0)
+      return 1;
+  }
+  return 0;
+}
+]])],
+	       ac_cv_func_strtod=yes,
+	       ac_cv_func_strtod=no,
+	       ac_cv_func_strtod=no)])
+if test $ac_cv_func_strtod = no; then
+  _AC_LIBOBJ_STRTOD
+fi
+])
+
+
+# AC_FUNC_STRTOLD
+# ---------------
+AC_DEFUN([AC_FUNC_STRTOLD],
+[
+  AC_CACHE_CHECK([whether strtold conforms to C99],
+    [ac_cv_func_strtold],
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM(
+	  [[/* On HP-UX before 11.23, strtold returns a struct instead of
+		long double.  Reject implementations like that, by requiring
+		compatibility with the C99 prototype.  */
+#	     include <stdlib.h>
+	     static long double (*p) (char const *, char **) = strtold;
+	     static long double
+	     test (char const *nptr, char **endptr)
+	     {
+	       long double r;
+	       r = strtold (nptr, endptr);
+	       return r;
+	     }]],
+	   [[return test ("1.0", NULL) != 1 || p ("1.0", NULL) != 1;]])],
+       [ac_cv_func_strtold=yes],
+       [ac_cv_func_strtold=no])])
+  if test $ac_cv_func_strtold = yes; then
+    AC_DEFINE([HAVE_STRTOLD], 1,
+      [Define to 1 if strtold exists and conforms to C99.])
+  fi
+])# AC_FUNC_STRTOLD
+
+
+# AU::AM_FUNC_STRTOD
+# ------------------
+AU_ALIAS([AM_FUNC_STRTOD], [AC_FUNC_STRTOD])
+
+
+# AC_FUNC_STRERROR_R
+# ------------------
+AN_FUNCTION([strerror_r], [AC_FUNC_STRERROR_R])
+AC_DEFUN([AC_FUNC_STRERROR_R],
+[AC_CHECK_DECLS([strerror_r])
+AC_CHECK_FUNCS([strerror_r])
+AC_CACHE_CHECK([whether strerror_r returns char *],
+	       ac_cv_func_strerror_r_char_p,
+   [
+    ac_cv_func_strerror_r_char_p=no
+    if test $ac_cv_have_decl_strerror_r = yes; then
+      AC_COMPILE_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+	[[
+	  char buf[100];
+	  char x = *strerror_r (0, buf, sizeof buf);
+	  char *p = strerror_r (0, buf, sizeof buf);
+	  return !p || x;
+	]])],
+			ac_cv_func_strerror_r_char_p=yes)
+    else
+      # strerror_r is not declared.  Choose between
+      # systems that have relatively inaccessible declarations for the
+      # function.  BeOS and DEC UNIX 4.0 fall in this category, but the
+      # former has a strerror_r that returns char*, while the latter
+      # has a strerror_r that returns `int'.
+      # This test should segfault on the DEC system.
+      AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
+	extern char *strerror_r ();],
+	[[char buf[100];
+	  char x = *strerror_r (0, buf, sizeof buf);
+	  return ! isalpha (x);]])],
+		    ac_cv_func_strerror_r_char_p=yes, , :)
+    fi
+  ])
+if test $ac_cv_func_strerror_r_char_p = yes; then
+  AC_DEFINE([STRERROR_R_CHAR_P], 1,
+	    [Define to 1 if strerror_r returns char *.])
+fi
+])# AC_FUNC_STRERROR_R
+
+
+# AC_FUNC_STRFTIME
+# ----------------
+AC_DEFUN([AC_FUNC_STRFTIME],
+[AC_CHECK_FUNCS(strftime, [],
+[# strftime is in -lintl on SCO UNIX.
+AC_CHECK_LIB(intl, strftime,
+	     [AC_DEFINE(HAVE_STRFTIME)
+LIBS="-lintl $LIBS"])])dnl
+])# AC_FUNC_STRFTIME
+
+
+# AC_FUNC_STRNLEN
+# ---------------
+AN_FUNCTION([strnlen], [AC_FUNC_STRNLEN])
+AC_DEFUN([AC_FUNC_STRNLEN],
+[AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])dnl
+AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles
+AC_CACHE_CHECK([for working strnlen], ac_cv_func_strnlen_working,
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT], [[
+#define S "foobar"
+#define S_LEN (sizeof S - 1)
+
+  /* At least one implementation is buggy: that of AIX 4.3 would
+     give strnlen (S, 1) == 3.  */
+
+  int i;
+  for (i = 0; i < S_LEN + 1; ++i)
+    {
+      int expected = i <= S_LEN ? i : S_LEN;
+      if (strnlen (S, i) != expected)
+	return 1;
+    }
+  return 0;
+]])],
+	       [ac_cv_func_strnlen_working=yes],
+	       [ac_cv_func_strnlen_working=no],
+	       [# Guess no on AIX systems, yes otherwise.
+		case "$host_os" in
+		  aix*) ac_cv_func_strnlen_working=no;;
+		  *)    ac_cv_func_strnlen_working=yes;;
+		esac])])
+test $ac_cv_func_strnlen_working = no && AC_LIBOBJ([strnlen])
+])# AC_FUNC_STRNLEN
+
+
+# AC_FUNC_SETVBUF_REVERSED
+# ------------------------
+AC_DEFUN([AC_FUNC_SETVBUF_REVERSED],
+[AC_DIAGNOSE([obsolete],
+[The macro `$0' is obsolete.  Remove it and all references to SETVBUF_REVERSED.])dnl
+AC_CACHE_VAL([ac_cv_func_setvbuf_reversed], [ac_cv_func_setvbuf_reversed=no])
+])# AC_FUNC_SETVBUF_REVERSED
+
+
+# AU::AC_SETVBUF_REVERSED
+# -----------------------
+AU_ALIAS([AC_SETVBUF_REVERSED], [AC_FUNC_SETVBUF_REVERSED])
+
+
+# AC_FUNC_STRCOLL
+# ---------------
+AN_FUNCTION([strcoll], [AC_FUNC_STRCOLL])
+AC_DEFUN([AC_FUNC_STRCOLL],
+[AC_CACHE_CHECK(for working strcoll, ac_cv_func_strcoll_works,
+[AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+  [[return (strcoll ("abc", "def") >= 0 ||
+	 strcoll ("ABC", "DEF") >= 0 ||
+	 strcoll ("123", "456") >= 0)]])],
+	       ac_cv_func_strcoll_works=yes,
+	       ac_cv_func_strcoll_works=no,
+	       ac_cv_func_strcoll_works=no)])
+if test $ac_cv_func_strcoll_works = yes; then
+  AC_DEFINE(HAVE_STRCOLL, 1,
+	    [Define to 1 if you have the `strcoll' function and it is properly
+	     defined.])
+fi
+])# AC_FUNC_STRCOLL
+
+
+# AU::AC_STRCOLL
+# --------------
+AU_ALIAS([AC_STRCOLL], [AC_FUNC_STRCOLL])
+
+
+# AC_FUNC_UTIME_NULL
+# ------------------
+AC_DEFUN([AC_FUNC_UTIME_NULL],
+[AC_CHECK_HEADERS_ONCE(utime.h)
+AC_CACHE_CHECK(whether utime accepts a null argument, ac_cv_func_utime_null,
+[rm -f conftest.data; >conftest.data
+# Sequent interprets utime(file, 0) to mean use start of epoch.  Wrong.
+AC_RUN_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
+	       #ifdef HAVE_UTIME_H
+	       # include <utime.h>
+	       #endif],
+[[struct stat s, t;
+  return ! (stat ("conftest.data", &s) == 0
+	    && utime ("conftest.data", 0) == 0
+	    && stat ("conftest.data", &t) == 0
+	    && t.st_mtime >= s.st_mtime
+	    && t.st_mtime - s.st_mtime < 120);]])],
+	      ac_cv_func_utime_null=yes,
+	      ac_cv_func_utime_null=no,
+	      ac_cv_func_utime_null='guessing yes')])
+if test "x$ac_cv_func_utime_null" != xno; then
+  ac_cv_func_utime_null=yes
+  AC_DEFINE(HAVE_UTIME_NULL, 1,
+	    [Define to 1 if `utime(file, NULL)' sets file's timestamp to the
+	     present.])
+fi
+rm -f conftest.data
+])# AC_FUNC_UTIME_NULL
+
+
+# AU::AC_UTIME_NULL
+# -----------------
+AU_ALIAS([AC_UTIME_NULL], [AC_FUNC_UTIME_NULL])
+
+
+# AC_FUNC_FORK
+# ------------
+AN_FUNCTION([fork],  [AC_FUNC_FORK])
+AN_FUNCTION([vfork], [AC_FUNC_FORK])
+AC_DEFUN([AC_FUNC_FORK],
+[AC_REQUIRE([AC_TYPE_PID_T])dnl
+AC_CHECK_HEADERS(vfork.h)
+AC_CHECK_FUNCS(fork vfork)
+if test "x$ac_cv_func_fork" = xyes; then
+  _AC_FUNC_FORK
+else
+  ac_cv_func_fork_works=$ac_cv_func_fork
+fi
+if test "x$ac_cv_func_fork_works" = xcross; then
+  case $host in
+    *-*-amigaos* | *-*-msdosdjgpp*)
+      # Override, as these systems have only a dummy fork() stub
+      ac_cv_func_fork_works=no
+      ;;
+    *)
+      ac_cv_func_fork_works=yes
+      ;;
+  esac
+  AC_MSG_WARN([result $ac_cv_func_fork_works guessed because of cross compilation])
+fi
+ac_cv_func_vfork_works=$ac_cv_func_vfork
+if test "x$ac_cv_func_vfork" = xyes; then
+  _AC_FUNC_VFORK
+fi;
+if test "x$ac_cv_func_fork_works" = xcross; then
+  ac_cv_func_vfork_works=$ac_cv_func_vfork
+  AC_MSG_WARN([result $ac_cv_func_vfork_works guessed because of cross compilation])
+fi
+
+if test "x$ac_cv_func_vfork_works" = xyes; then
+  AC_DEFINE(HAVE_WORKING_VFORK, 1, [Define to 1 if `vfork' works.])
+else
+  AC_DEFINE(vfork, fork, [Define as `fork' if `vfork' does not work.])
+fi
+if test "x$ac_cv_func_fork_works" = xyes; then
+  AC_DEFINE(HAVE_WORKING_FORK, 1, [Define to 1 if `fork' works.])
+fi
+])# AC_FUNC_FORK
+
+
+# _AC_FUNC_FORK
+# -------------
+AC_DEFUN([_AC_FUNC_FORK],
+  [AC_CACHE_CHECK(for working fork, ac_cv_func_fork_works,
+    [AC_RUN_IFELSE(
+      [AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+	[
+	  /* By Ruediger Kuhlmann. */
+	  return fork () < 0;
+	])],
+      [ac_cv_func_fork_works=yes],
+      [ac_cv_func_fork_works=no],
+      [ac_cv_func_fork_works=cross])])]
+)# _AC_FUNC_FORK
+
+
+# _AC_FUNC_VFORK
+# --------------
+AC_DEFUN([_AC_FUNC_VFORK],
+[AC_CACHE_CHECK(for working vfork, ac_cv_func_vfork_works,
+[AC_RUN_IFELSE([AC_LANG_SOURCE([[/* Thanks to Paul Eggert for this test.  */
+]AC_INCLUDES_DEFAULT[
+#include <sys/wait.h>
+#ifdef HAVE_VFORK_H
+# include <vfork.h>
+#endif
+/* On some sparc systems, changes by the child to local and incoming
+   argument registers are propagated back to the parent.  The compiler
+   is told about this with #include <vfork.h>, but some compilers
+   (e.g. gcc -O) don't grok <vfork.h>.  Test for this by using a
+   static variable whose address is put into a register that is
+   clobbered by the vfork.  */
+static void
+#ifdef __cplusplus
+sparc_address_test (int arg)
+# else
+sparc_address_test (arg) int arg;
+#endif
+{
+  static pid_t child;
+  if (!child) {
+    child = vfork ();
+    if (child < 0) {
+      perror ("vfork");
+      _exit(2);
+    }
+    if (!child) {
+      arg = getpid();
+      write(-1, "", 0);
+      _exit (arg);
+    }
+  }
+}
+
+int
+main ()
+{
+  pid_t parent = getpid ();
+  pid_t child;
+
+  sparc_address_test (0);
+
+  child = vfork ();
+
+  if (child == 0) {
+    /* Here is another test for sparc vfork register problems.  This
+       test uses lots of local variables, at least as many local
+       variables as main has allocated so far including compiler
+       temporaries.  4 locals are enough for gcc 1.40.3 on a Solaris
+       4.1.3 sparc, but we use 8 to be safe.  A buggy compiler should
+       reuse the register of parent for one of the local variables,
+       since it will think that parent can't possibly be used any more
+       in this routine.  Assigning to the local variable will thus
+       munge parent in the parent process.  */
+    pid_t
+      p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(),
+      p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid();
+    /* Convince the compiler that p..p7 are live; otherwise, it might
+       use the same hardware register for all 8 local variables.  */
+    if (p != p1 || p != p2 || p != p3 || p != p4
+	|| p != p5 || p != p6 || p != p7)
+      _exit(1);
+
+    /* On some systems (e.g. IRIX 3.3), vfork doesn't separate parent
+       from child file descriptors.  If the child closes a descriptor
+       before it execs or exits, this munges the parent's descriptor
+       as well.  Test for this by closing stdout in the child.  */
+    _exit(close(fileno(stdout)) != 0);
+  } else {
+    int status;
+    struct stat st;
+
+    while (wait(&status) != child)
+      ;
+    return (
+	 /* Was there some problem with vforking?  */
+	 child < 0
+
+	 /* Did the child fail?  (This shouldn't happen.)  */
+	 || status
+
+	 /* Did the vfork/compiler bug occur?  */
+	 || parent != getpid()
+
+	 /* Did the file descriptor bug occur?  */
+	 || fstat(fileno(stdout), &st) != 0
+	 );
+  }
+}]])],
+	    [ac_cv_func_vfork_works=yes],
+	    [ac_cv_func_vfork_works=no],
+	    [ac_cv_func_vfork_works=cross])])
+])# _AC_FUNC_VFORK
+
+
+# AU::AC_FUNC_VFORK
+# -----------------
+AU_ALIAS([AC_FUNC_VFORK], [AC_FUNC_FORK])
+
+# AU::AC_VFORK
+# ------------
+AU_ALIAS([AC_VFORK], [AC_FUNC_FORK])
+
+
+# AC_FUNC_VPRINTF
+# ---------------
+# Why the heck is that _doprnt does not define HAVE__DOPRNT???
+# That the logical name!
+AC_DEFUN([AC_FUNC_VPRINTF],
+[AC_CHECK_FUNCS(vprintf, []
+[AC_CHECK_FUNC(_doprnt,
+	       [AC_DEFINE(HAVE_DOPRNT, 1,
+			  [Define to 1 if you don't have `vprintf' but do have
+			  `_doprnt.'])])])
+])
+
+
+# AU::AC_VPRINTF
+# --------------
+AU_ALIAS([AC_VPRINTF], [AC_FUNC_VPRINTF])
+
+
+# AC_FUNC_WAIT3
+# -------------
+# Don't bother too hard maintaining this macro, as it's obsoleted.
+# We don't AU define it, since we don't have any alternative to propose,
+# any invocation should be removed, and the code adjusted.
+AN_FUNCTION([wait3], [AC_FUNC_WAIT3])
+AC_DEFUN([AC_FUNC_WAIT3],
+[AC_DIAGNOSE([obsolete],
+[$0: `wait3' has been removed from POSIX.
+Remove this `AC_FUNC_WAIT3' and adjust your code to use `waitpid' instead.])dnl
+AC_CACHE_CHECK([for wait3 that fills in rusage],
+	       [ac_cv_func_wait3_rusage],
+[AC_RUN_IFELSE([AC_LANG_SOURCE(
+[AC_INCLUDES_DEFAULT[
+#include <sys/time.h>
+#include <sys/resource.h>
+#include <sys/wait.h>
+/* HP-UX has wait3 but does not fill in rusage at all.  */
+int
+main ()
+{
+  struct rusage r;
+  int i;
+  /* Use a field that we can force nonzero --
+     voluntary context switches.
+     For systems like NeXT and OSF/1 that don't set it,
+     also use the system CPU time.  And page faults (I/O) for Linux.  */
+  r.ru_nvcsw = 0;
+  r.ru_stime.tv_sec = 0;
+  r.ru_stime.tv_usec = 0;
+  r.ru_majflt = r.ru_minflt = 0;
+  switch (fork ())
+    {
+    case 0: /* Child.  */
+      sleep(1); /* Give up the CPU.  */
+      _exit(0);
+      break;
+    case -1: /* What can we do?  */
+      _exit(0);
+      break;
+    default: /* Parent.  */
+      wait3(&i, 0, &r);
+      /* Avoid "text file busy" from rm on fast HP-UX machines.  */
+      sleep(2);
+      return (r.ru_nvcsw == 0 && r.ru_majflt == 0 && r.ru_minflt == 0
+	      && r.ru_stime.tv_sec == 0 && r.ru_stime.tv_usec == 0);
+    }
+}]])],
+	       [ac_cv_func_wait3_rusage=yes],
+	       [ac_cv_func_wait3_rusage=no],
+	       [ac_cv_func_wait3_rusage=no])])
+if test $ac_cv_func_wait3_rusage = yes; then
+  AC_DEFINE(HAVE_WAIT3, 1,
+	    [Define to 1 if you have the `wait3' system call.
+	     Deprecated, you should no longer depend upon `wait3'.])
+fi
+])# AC_FUNC_WAIT3
+
+
+# AU::AC_WAIT3
+# ------------
+AU_ALIAS([AC_WAIT3], [AC_FUNC_WAIT3])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/general.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/general.m4
new file mode 100644
index 0000000..59c47ab
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/general.m4
@@ -0,0 +1,3082 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Parameterized macros.
+m4_define([_AC_COPYRIGHT_YEARS], [
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+Foundation, Inc.
+])
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+## ---------------- ##
+## The diversions.  ##
+## ---------------- ##
+
+
+# We heavily use m4's diversions both for the initializations and for
+# required macros (see AC_REQUIRE), because in both cases we have to
+# issue high in `configure' something which is discovered late.
+#
+# KILL is only used to suppress output.
+#
+# The layers of `configure'.  We let m4 undivert them by itself, when
+# it reaches the end of `configure.ac'.
+#
+# - BINSH
+#   #! /bin/sh
+# - HEADER-REVISION
+#   Sent by AC_REVISION
+# - HEADER-COMMENT
+#   Purpose of the script.
+# - HEADER-COPYRIGHT
+#   Copyright notice(s)
+# - M4SH-INIT
+#   Initialization of bottom layers.
+#
+# - DEFAULTS
+#   early initializations (defaults)
+# - PARSE_ARGS
+#   initialization code, option handling loop.
+#
+# - HELP_BEGIN
+#   Handling `configure --help'.
+# - HELP_CANON
+#   Help msg for AC_CANONICAL_*
+# - HELP_ENABLE
+#   Help msg from AC_ARG_ENABLE.
+# - HELP_WITH
+#   Help msg from AC_ARG_WITH.
+# - HELP_VAR
+#   Help msg from AC_ARG_VAR.
+# - HELP_VAR_END
+#   A small paragraph on the use of the variables.
+# - HELP_END
+#   Tail of the handling of --help.
+#
+# - VERSION_BEGIN
+#   Head of the handling of --version.
+# - VERSION_FSF
+#   FSF copyright notice for --version.
+# - VERSION_USER
+#   User copyright notice for --version.
+# - VERSION_END
+#   Tail of the handling of --version.
+#
+# - SHELL_FN
+#   Shell functions.
+#
+# - INIT_PREPARE
+#   Tail of initialization code.
+#
+# - BODY
+#   the tests and output code
+#
+
+
+# _m4_divert(DIVERSION-NAME)
+# --------------------------
+# Convert a diversion name into its number.  Otherwise, return
+# DIVERSION-NAME which is supposed to be an actual diversion number.
+# Of course it would be nicer to use m4_case here, instead of zillions
+# of little macros, but it then takes twice longer to run `autoconf'!
+#
+# From M4sugar:
+#    -1. KILL
+# 10000. GROW
+#
+# From M4sh:
+#    0. BINSH
+#    1. HEADER-REVISION
+#    2. HEADER-COMMENT
+#    3. HEADER-COPYRIGHT
+#    4. M4SH-INIT
+# 1000. BODY
+m4_define([_m4_divert(DEFAULTS)],        10)
+m4_define([_m4_divert(PARSE_ARGS)],      20)
+
+m4_define([_m4_divert(HELP_BEGIN)],     100)
+m4_define([_m4_divert(HELP_CANON)],     101)
+m4_define([_m4_divert(HELP_ENABLE)],    102)
+m4_define([_m4_divert(HELP_WITH)],      103)
+m4_define([_m4_divert(HELP_VAR)],       104)
+m4_define([_m4_divert(HELP_VAR_END)],   105)
+m4_define([_m4_divert(HELP_END)],       106)
+
+m4_define([_m4_divert(VERSION_BEGIN)],  200)
+m4_define([_m4_divert(VERSION_FSF)],    201)
+m4_define([_m4_divert(VERSION_USER)],   202)
+m4_define([_m4_divert(VERSION_END)],    203)
+
+m4_define([_m4_divert(SHELL_FN)],       250)
+
+m4_define([_m4_divert(INIT_PREPARE)],   300)
+
+
+
+# AC_DIVERT_PUSH(DIVERSION-NAME)
+# AC_DIVERT_POP
+# ------------------------------
+m4_copy([m4_divert_push],[AC_DIVERT_PUSH])
+m4_copy([m4_divert_pop], [AC_DIVERT_POP])
+
+
+
+## ------------------------------------ ##
+## Defining/requiring Autoconf macros.  ##
+## ------------------------------------ ##
+
+
+# AC_DEFUN(NAME, EXPANSION)
+# AC_DEFUN_ONCE(NAME, EXPANSION)
+# AC_BEFORE(THIS-MACRO-NAME, CALLED-MACRO-NAME)
+# AC_REQUIRE(STRING)
+# AC_PROVIDE(MACRO-NAME)
+# AC_PROVIDE_IFELSE(MACRO-NAME, IF-PROVIDED, IF-NOT-PROVIDED)
+# -----------------------------------------------------------
+m4_copy([m4_defun],       [AC_DEFUN])
+m4_copy([m4_defun_once],  [AC_DEFUN_ONCE])
+m4_copy([m4_before],      [AC_BEFORE])
+m4_copy([m4_require],     [AC_REQUIRE])
+m4_copy([m4_provide],     [AC_PROVIDE])
+m4_copy([m4_provide_if],  [AC_PROVIDE_IFELSE])
+
+
+# AC_OBSOLETE(THIS-MACRO-NAME, [SUGGESTION])
+# ------------------------------------------
+m4_define([AC_OBSOLETE],
+[AC_DIAGNOSE([obsolete], [$1 is obsolete$2])])
+
+
+
+## ----------------------------- ##
+## Implementing shell functions. ##
+## ----------------------------- ##
+
+
+# AC_REQUIRE_SHELL_FN(NAME-TO-CHECK, COMMENT, BODY, [DIVERSION = SHELL_FN]
+# ------------------------------------------------------------------------
+# Same as AS_REQUIRE_SHELL_FN except that the default diversion comes
+# later in the script (speeding up configure --help and --version).
+AC_DEFUN([AC_REQUIRE_SHELL_FN],
+[AS_REQUIRE_SHELL_FN([$1], [$2], [$3], m4_default_quoted([$4], [SHELL_FN]))])
+
+
+
+## ----------------------------- ##
+## Implementing Autoconf loops.  ##
+## ----------------------------- ##
+
+
+# AU::AC_FOREACH(VARIABLE, LIST, EXPRESSION)
+# ------------------------------------------
+AU_DEFUN([AC_FOREACH], [[m4_foreach_w($@)]])
+AC_DEFUN([AC_FOREACH], [m4_foreach_w($@)dnl
+AC_DIAGNOSE([obsolete], [The macro `AC_FOREACH' is obsolete.
+You should run autoupdate.])])
+
+
+
+## ----------------------------------- ##
+## Helping macros to display strings.  ##
+## ----------------------------------- ##
+
+
+# AU::AC_HELP_STRING(LHS, RHS, [COLUMN])
+# --------------------------------------
+AU_ALIAS([AC_HELP_STRING], [AS_HELP_STRING])
+
+
+
+## ---------------------------------------------- ##
+## Information on the package being Autoconf'ed.  ##
+## ---------------------------------------------- ##
+
+
+# It is suggested that the macros in this section appear before
+# AC_INIT in `configure.ac'.  Nevertheless, this is just stylistic,
+# and from the implementation point of view, AC_INIT *must* be expanded
+# beforehand: it puts data in diversions which must appear before the
+# data provided by the macros of this section.
+
+# The solution is to require AC_INIT in each of these macros.  AC_INIT
+# has the needed magic so that it can't be expanded twice.
+
+# _AC_INIT_LITERAL(STRING)
+# ------------------------
+# Reject STRING if it contains newline, or if it cannot be used as-is
+# in single-quoted strings, double-quoted strings, and quoted and
+# unquoted here-docs.
+m4_define([_AC_INIT_LITERAL],
+[m4_if(m4_index(m4_translit([[$1]], [
+""], ['']), ['])AS_LITERAL_HEREDOC_IF([$1], [-]), [-1-], [],
+  [m4_warn([syntax], [AC_INIT: not a literal: $1])])])
+
+# _AC_INIT_PACKAGE(PACKAGE-NAME, VERSION, BUG-REPORT, [TARNAME], [URL])
+# ---------------------------------------------------------------------
+m4_define([_AC_INIT_PACKAGE],
+[_AC_INIT_LITERAL([$1])
+_AC_INIT_LITERAL([$2])
+_AC_INIT_LITERAL([$3])
+m4_ifndef([AC_PACKAGE_NAME],
+	  [m4_define([AC_PACKAGE_NAME],     [$1])])
+m4_ifndef([AC_PACKAGE_TARNAME],
+	  [m4_define([AC_PACKAGE_TARNAME],
+		     m4_default([$4],
+				[m4_bpatsubst(m4_tolower(m4_bpatsubst([[$1]],
+								     [GNU ])),
+				 [[^_abcdefghijklmnopqrstuvwxyz0123456789]],
+				 [-])]))])
+m4_ifndef([AC_PACKAGE_VERSION],
+	  [m4_define([AC_PACKAGE_VERSION],   [$2])])
+m4_ifndef([AC_PACKAGE_STRING],
+	  [m4_define([AC_PACKAGE_STRING],    [$1 $2])])
+m4_ifndef([AC_PACKAGE_BUGREPORT],
+	  [m4_define([AC_PACKAGE_BUGREPORT], [$3])])
+m4_ifndef([AC_PACKAGE_URL],
+	  [m4_define([AC_PACKAGE_URL],
+  m4_if([$5], [], [m4_if(m4_index([$1], [GNU ]), [0],
+	  [[http://www.gnu.org/software/]m4_defn([AC_PACKAGE_TARNAME])[/]])],
+	[[$5]]))])
+])
+
+
+# AC_COPYRIGHT(TEXT, [VERSION-DIVERSION = VERSION_USER],
+#              [FILTER = m4_newline])
+# ------------------------------------------------------
+# Emit TEXT, a copyright notice, in the top of `configure' and in
+# --version output.  Macros in TEXT are evaluated once.  Process
+# the --version output through FILTER (m4_newline, m4_do, and
+# m4_copyright_condense are common filters).
+m4_define([AC_COPYRIGHT],
+[AS_COPYRIGHT([$1])[]]dnl
+[m4_divert_text(m4_default_quoted([$2], [VERSION_USER]),
+[m4_default([$3], [m4_newline])([$1])])])# AC_COPYRIGHT
+
+
+# AC_REVISION(REVISION-INFO)
+# --------------------------
+# The second quote in the translit is just to cope with font-lock-mode
+# which sees the opening of a string.
+m4_define([AC_REVISION],
+[m4_divert_text([HEADER-REVISION],
+		[@%:@ From __file__ m4_translit([$1], [$""]).])dnl
+])
+
+
+
+
+## ---------------------------------------- ##
+## Requirements over the Autoconf version.  ##
+## ---------------------------------------- ##
+
+
+# AU::AC_PREREQ(VERSION)
+# ----------------------
+# Update this `AC_PREREQ' statement to require the current version of
+# Autoconf.  But fail if ever this autoupdate is too old.
+#
+# Note that `m4_defn([m4_PACKAGE_VERSION])' below are expanded before
+# calling `AU_DEFUN', i.e., it is hard coded.  Otherwise it would be
+# quite complex for autoupdate to import the value of
+# `m4_PACKAGE_VERSION'.  We could `AU_DEFUN' `m4_PACKAGE_VERSION', but
+# this would replace all its occurrences with the current version of
+# Autoconf, which is certainly not what the user intended.
+AU_DEFUN([AC_PREREQ],
+[m4_version_prereq([$1])[]dnl
+[AC_PREREQ(]]m4_dquote(m4_dquote(m4_defn([m4_PACKAGE_VERSION])))[[)]])
+
+
+# AC_PREREQ(VERSION)
+# ------------------
+# Complain and exit if the Autoconf version is less than VERSION.
+m4_undefine([AC_PREREQ])
+m4_copy([m4_version_prereq], [AC_PREREQ])
+
+
+# AC_AUTOCONF_VERSION
+# -------------------
+# The current version of Autoconf parsing this file.
+m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])
+
+
+
+
+
+## ---------------- ##
+## Initialization.  ##
+## ---------------- ##
+
+
+# All the following macros are used by AC_INIT.  Ideally, they should
+# be presented in the order in which they are output.  Please, help us
+# sorting it, or at least, don't augment the entropy.
+
+
+# _AC_INIT_NOTICE
+# ---------------
+# Provide useful headers; override the HEADER-COMMENT created by M4sh.
+m4_define([_AC_INIT_NOTICE],
+[m4_cleardivert([HEADER-COMMENT])]dnl
+[m4_divert_text([HEADER-COMMENT],
+[@%:@ Guess values for system-dependent variables and create Makefiles.
+@%:@ Generated by m4_PACKAGE_STRING[]dnl
+m4_ifset([AC_PACKAGE_STRING], [ for AC_PACKAGE_STRING]).])
+
+m4_ifset([AC_PACKAGE_BUGREPORT],
+	 [m4_divert_text([HEADER-COMMENT],
+			 [@%:@
+@%:@ Report bugs to <AC_PACKAGE_BUGREPORT>.])])
+])
+
+
+# _AC_INIT_COPYRIGHT
+# ------------------
+# We dump to VERSION_FSF to make sure we are inserted before the
+# user copyrights, and after the setup of the --version handling.
+m4_define([_AC_INIT_COPYRIGHT],
+[AC_COPYRIGHT(m4_defn([_AC_COPYRIGHT_YEARS]), [VERSION_FSF], [
+m4_copyright_condense])dnl
+AC_COPYRIGHT(
+[This configure script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it.],
+	     [VERSION_FSF], [m4_echo])])
+
+
+# File Descriptors
+# ----------------
+# Set up the file descriptors used by `configure'.
+# File descriptor usage:
+# 0 standard input (/dev/null)
+# 1 file creation
+# 2 errors and warnings
+# AS_MESSAGE_LOG_FD compiler messages saved in config.log
+# AS_MESSAGE_FD checking for... messages and results
+# AS_ORIGINAL_STDIN_FD original standard input (still open)
+#
+# stdin is /dev/null because checks that run programs may
+# inadvertently run interactive ones, which would stop configuration
+# until someone typed an EOF.
+m4_define([AS_MESSAGE_FD], 6)
+m4_define([AS_ORIGINAL_STDIN_FD], 7)
+# That's how they used to be named.
+AU_ALIAS([AC_FD_CC],  [AS_MESSAGE_LOG_FD])
+AU_ALIAS([AC_FD_MSG], [AS_MESSAGE_FD])
+
+
+# _AC_INIT_DEFAULTS
+# -----------------
+# Values which defaults can be set from `configure.ac'.
+# `/bin/machine' is used in `glibcbug'.  The others are used in config.*
+m4_define([_AC_INIT_DEFAULTS],
+[m4_divert_push([DEFAULTS])dnl
+
+test -n "$DJDIR" || exec AS_ORIGINAL_STDIN_FD<&0 </dev/null
+exec AS_MESSAGE_FD>&1
+
+# Name of the host.
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# so uname gets run too.
+ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
+
+#
+# Initializations.
+#
+ac_default_prefix=/usr/local
+ac_clean_files=
+ac_config_libobj_dir=.
+LIB@&t@OBJS=
+cross_compiling=no
+subdirs=
+MFLAGS=
+MAKEFLAGS=
+AC_SUBST([SHELL])dnl
+AC_SUBST([PATH_SEPARATOR])dnl
+
+# Identity of this package.
+AC_SUBST([PACKAGE_NAME],
+	 [m4_ifdef([AC_PACKAGE_NAME],      ['AC_PACKAGE_NAME'])])dnl
+AC_SUBST([PACKAGE_TARNAME],
+	 [m4_ifdef([AC_PACKAGE_TARNAME],   ['AC_PACKAGE_TARNAME'])])dnl
+AC_SUBST([PACKAGE_VERSION],
+	 [m4_ifdef([AC_PACKAGE_VERSION],   ['AC_PACKAGE_VERSION'])])dnl
+AC_SUBST([PACKAGE_STRING],
+	 [m4_ifdef([AC_PACKAGE_STRING],    ['AC_PACKAGE_STRING'])])dnl
+AC_SUBST([PACKAGE_BUGREPORT],
+	 [m4_ifdef([AC_PACKAGE_BUGREPORT], ['AC_PACKAGE_BUGREPORT'])])dnl
+AC_SUBST([PACKAGE_URL],
+	 [m4_ifdef([AC_PACKAGE_URL],       ['AC_PACKAGE_URL'])])dnl
+
+m4_divert_pop([DEFAULTS])dnl
+m4_wrap_lifo([m4_divert_text([DEFAULTS],
+[ac_subst_vars='m4_set_dump([_AC_SUBST_VARS], m4_newline)'
+ac_subst_files='m4_ifdef([_AC_SUBST_FILES], [m4_defn([_AC_SUBST_FILES])])'
+ac_user_opts='
+enable_option_checking
+m4_ifdef([_AC_USER_OPTS], [m4_defn([_AC_USER_OPTS])
+])'
+m4_ifdef([_AC_PRECIOUS_VARS],
+  [_AC_ARG_VAR_STORE[]dnl
+   _AC_ARG_VAR_VALIDATE[]dnl
+   ac_precious_vars='m4_defn([_AC_PRECIOUS_VARS])'])
+m4_ifdef([_AC_LIST_SUBDIRS],
+  [ac_subdirs_all='m4_defn([_AC_LIST_SUBDIRS])'])dnl
+])])dnl
+])# _AC_INIT_DEFAULTS
+
+
+# AC_PREFIX_DEFAULT(PREFIX)
+# -------------------------
+AC_DEFUN([AC_PREFIX_DEFAULT],
+[m4_divert_text([DEFAULTS], [ac_default_prefix=$1])])
+
+
+# AC_PREFIX_PROGRAM(PROGRAM)
+# --------------------------
+# Guess the value for the `prefix' variable by looking for
+# the argument program along PATH and taking its parent.
+# Example: if the argument is `gcc' and we find /usr/local/gnu/bin/gcc,
+# set `prefix' to /usr/local/gnu.
+# This comes too late to find a site file based on the prefix,
+# and it might use a cached value for the path.
+# No big loss, I think, since most configures don't use this macro anyway.
+AC_DEFUN([AC_PREFIX_PROGRAM],
+[if test "x$prefix" = xNONE; then
+dnl We reimplement AC_MSG_CHECKING (mostly) to avoid the ... in the middle.
+  _AS_ECHO_N([checking for prefix by ])
+  AC_PATH_PROG(ac_prefix_program, [$1])
+  if test -n "$ac_prefix_program"; then
+    prefix=`AS_DIRNAME(["$ac_prefix_program"])`
+    prefix=`AS_DIRNAME(["$prefix"])`
+  fi
+fi
+])# AC_PREFIX_PROGRAM
+
+
+# AC_CONFIG_SRCDIR([UNIQUE-FILE-IN-SOURCE-DIR])
+# ---------------------------------------------
+# UNIQUE-FILE-IN-SOURCE-DIR is a file name unique to this package,
+# relative to the directory that configure is in, which we can look
+# for to find out if srcdir is correct.
+AC_DEFUN([AC_CONFIG_SRCDIR],
+[m4_divert_text([DEFAULTS], [ac_unique_file="$1"])])
+
+
+# _AC_INIT_DIRCHECK
+# -----------------
+# Set ac_pwd, and sanity-check it and the source and installation directories.
+#
+# (This macro is AC_REQUIREd by _AC_INIT_SRCDIR, so it has to be AC_DEFUNed.)
+#
+AC_DEFUN([_AC_INIT_DIRCHECK],
+[m4_divert_push([PARSE_ARGS])dnl
+
+ac_pwd=`pwd` && test -n "$ac_pwd" &&
+ac_ls_di=`ls -di .` &&
+ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
+  AC_MSG_ERROR([working directory cannot be determined])
+test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
+  AC_MSG_ERROR([pwd does not report name of working directory])
+
+m4_divert_pop([PARSE_ARGS])dnl
+])# _AC_INIT_DIRCHECK
+
+# _AC_INIT_SRCDIR
+# ---------------
+# Compute `srcdir' based on `$ac_unique_file'.
+#
+# (We have to AC_DEFUN it, since we use AC_REQUIRE.)
+#
+AC_DEFUN([_AC_INIT_SRCDIR],
+[AC_REQUIRE([_AC_INIT_DIRCHECK])dnl
+m4_divert_push([PARSE_ARGS])dnl
+
+# Find the source files, if location was not specified.
+if test -z "$srcdir"; then
+  ac_srcdir_defaulted=yes
+  # Try the directory containing this script, then the parent directory.
+  ac_confdir=`AS_DIRNAME(["$as_myself"])`
+  srcdir=$ac_confdir
+  if test ! -r "$srcdir/$ac_unique_file"; then
+    srcdir=..
+  fi
+else
+  ac_srcdir_defaulted=no
+fi
+if test ! -r "$srcdir/$ac_unique_file"; then
+  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
+  AC_MSG_ERROR([cannot find sources ($ac_unique_file) in $srcdir])
+fi
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
+ac_abs_confdir=`(
+	cd "$srcdir" && test -r "./$ac_unique_file" || AC_MSG_ERROR([$ac_msg])
+	pwd)`
+# When building in place, set srcdir=.
+if test "$ac_abs_confdir" = "$ac_pwd"; then
+  srcdir=.
+fi
+# Remove unnecessary trailing slashes from srcdir.
+# Double slashes in file names in object file debugging info
+# mess up M-x gdb in Emacs.
+case $srcdir in
+*/) srcdir=`expr "X$srcdir" : 'X\(.*[[^/]]\)' \| "X$srcdir" : 'X\(.*\)'`;;
+esac
+m4_divert_pop([PARSE_ARGS])dnl
+])# _AC_INIT_SRCDIR
+
+
+# _AC_INIT_PARSE_ARGS
+# -------------------
+m4_define([_AC_INIT_PARSE_ARGS],
+[m4_divert_push([PARSE_ARGS])dnl
+
+# Initialize some variables set by options.
+ac_init_help=
+ac_init_version=false
+ac_unrecognized_opts=
+ac_unrecognized_sep=
+# The variables have the same names as the options, with
+# dashes changed to underlines.
+cache_file=/dev/null
+AC_SUBST(exec_prefix, NONE)dnl
+no_create=
+no_recursion=
+AC_SUBST(prefix, NONE)dnl
+program_prefix=NONE
+program_suffix=NONE
+AC_SUBST(program_transform_name, [s,x,x,])dnl
+silent=
+site=
+srcdir=
+verbose=
+x_includes=NONE
+x_libraries=NONE
+
+# Installation directory options.
+# These are left unexpanded so users can "make install exec_prefix=/foo"
+# and all the variables that are supposed to be based on exec_prefix
+# by default will actually change.
+# Use braces instead of parens because sh, perl, etc. also accept them.
+# (The list follows the same order as the GNU Coding Standards.)
+AC_SUBST([bindir],         ['${exec_prefix}/bin'])dnl
+AC_SUBST([sbindir],        ['${exec_prefix}/sbin'])dnl
+AC_SUBST([libexecdir],     ['${exec_prefix}/libexec'])dnl
+AC_SUBST([datarootdir],    ['${prefix}/share'])dnl
+AC_SUBST([datadir],        ['${datarootdir}'])dnl
+AC_SUBST([sysconfdir],     ['${prefix}/etc'])dnl
+AC_SUBST([sharedstatedir], ['${prefix}/com'])dnl
+AC_SUBST([localstatedir],  ['${prefix}/var'])dnl
+AC_SUBST([includedir],     ['${prefix}/include'])dnl
+AC_SUBST([oldincludedir],  ['/usr/include'])dnl
+AC_SUBST([docdir],         [m4_ifset([AC_PACKAGE_TARNAME],
+				     ['${datarootdir}/doc/${PACKAGE_TARNAME}'],
+				     ['${datarootdir}/doc/${PACKAGE}'])])dnl
+AC_SUBST([infodir],        ['${datarootdir}/info'])dnl
+AC_SUBST([htmldir],        ['${docdir}'])dnl
+AC_SUBST([dvidir],         ['${docdir}'])dnl
+AC_SUBST([pdfdir],         ['${docdir}'])dnl
+AC_SUBST([psdir],          ['${docdir}'])dnl
+AC_SUBST([libdir],         ['${exec_prefix}/lib'])dnl
+AC_SUBST([localedir],      ['${datarootdir}/locale'])dnl
+AC_SUBST([mandir],         ['${datarootdir}/man'])dnl
+
+ac_prev=
+ac_dashdash=
+for ac_option
+do
+  # If the previous option needs an argument, assign it.
+  if test -n "$ac_prev"; then
+    eval $ac_prev=\$ac_option
+    ac_prev=
+    continue
+  fi
+
+  case $ac_option in
+  *=?*) ac_optarg=`expr "X$ac_option" : '[[^=]]*=\(.*\)'` ;;
+  *=)   ac_optarg= ;;
+  *)    ac_optarg=yes ;;
+  esac
+
+  # Accept the important Cygnus configure options, so we can diagnose typos.
+
+  case $ac_dashdash$ac_option in
+  --)
+    ac_dashdash=yes ;;
+
+  -bindir | --bindir | --bindi | --bind | --bin | --bi)
+    ac_prev=bindir ;;
+  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
+    bindir=$ac_optarg ;;
+
+  -build | --build | --buil | --bui | --bu)
+    ac_prev=build_alias ;;
+  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
+    build_alias=$ac_optarg ;;
+
+  -cache-file | --cache-file | --cache-fil | --cache-fi \
+  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+    ac_prev=cache_file ;;
+  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
+    cache_file=$ac_optarg ;;
+
+  --config-cache | -C)
+    cache_file=config.cache ;;
+
+  -datadir | --datadir | --datadi | --datad)
+    ac_prev=datadir ;;
+  -datadir=* | --datadir=* | --datadi=* | --datad=*)
+    datadir=$ac_optarg ;;
+
+  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
+  | --dataroo | --dataro | --datar)
+    ac_prev=datarootdir ;;
+  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
+  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
+    datarootdir=$ac_optarg ;;
+
+  _AC_INIT_PARSE_ENABLE([disable])
+
+  -docdir | --docdir | --docdi | --doc | --do)
+    ac_prev=docdir ;;
+  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
+    docdir=$ac_optarg ;;
+
+  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
+    ac_prev=dvidir ;;
+  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
+    dvidir=$ac_optarg ;;
+
+  _AC_INIT_PARSE_ENABLE([enable])
+
+  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
+  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
+  | --exec | --exe | --ex)
+    ac_prev=exec_prefix ;;
+  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
+  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
+  | --exec=* | --exe=* | --ex=*)
+    exec_prefix=$ac_optarg ;;
+
+  -gas | --gas | --ga | --g)
+    # Obsolete; use --with-gas.
+    with_gas=yes ;;
+
+  -help | --help | --hel | --he | -h)
+    ac_init_help=long ;;
+  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
+    ac_init_help=recursive ;;
+  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
+    ac_init_help=short ;;
+
+  -host | --host | --hos | --ho)
+    ac_prev=host_alias ;;
+  -host=* | --host=* | --hos=* | --ho=*)
+    host_alias=$ac_optarg ;;
+
+  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
+    ac_prev=htmldir ;;
+  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
+  | --ht=*)
+    htmldir=$ac_optarg ;;
+
+  -includedir | --includedir | --includedi | --included | --include \
+  | --includ | --inclu | --incl | --inc)
+    ac_prev=includedir ;;
+  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
+  | --includ=* | --inclu=* | --incl=* | --inc=*)
+    includedir=$ac_optarg ;;
+
+  -infodir | --infodir | --infodi | --infod | --info | --inf)
+    ac_prev=infodir ;;
+  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
+    infodir=$ac_optarg ;;
+
+  -libdir | --libdir | --libdi | --libd)
+    ac_prev=libdir ;;
+  -libdir=* | --libdir=* | --libdi=* | --libd=*)
+    libdir=$ac_optarg ;;
+
+  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
+  | --libexe | --libex | --libe)
+    ac_prev=libexecdir ;;
+  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
+  | --libexe=* | --libex=* | --libe=*)
+    libexecdir=$ac_optarg ;;
+
+  -localedir | --localedir | --localedi | --localed | --locale)
+    ac_prev=localedir ;;
+  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
+    localedir=$ac_optarg ;;
+
+  -localstatedir | --localstatedir | --localstatedi | --localstated \
+  | --localstate | --localstat | --localsta | --localst | --locals)
+    ac_prev=localstatedir ;;
+  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
+  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
+    localstatedir=$ac_optarg ;;
+
+  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
+    ac_prev=mandir ;;
+  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
+    mandir=$ac_optarg ;;
+
+  -nfp | --nfp | --nf)
+    # Obsolete; use --without-fp.
+    with_fp=no ;;
+
+  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
+  | --no-cr | --no-c | -n)
+    no_create=yes ;;
+
+  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
+  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
+    no_recursion=yes ;;
+
+  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
+  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
+  | --oldin | --oldi | --old | --ol | --o)
+    ac_prev=oldincludedir ;;
+  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
+  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
+  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
+    oldincludedir=$ac_optarg ;;
+
+  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+    ac_prev=prefix ;;
+  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+    prefix=$ac_optarg ;;
+
+  -program-prefix | --program-prefix | --program-prefi | --program-pref \
+  | --program-pre | --program-pr | --program-p)
+    ac_prev=program_prefix ;;
+  -program-prefix=* | --program-prefix=* | --program-prefi=* \
+  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
+    program_prefix=$ac_optarg ;;
+
+  -program-suffix | --program-suffix | --program-suffi | --program-suff \
+  | --program-suf | --program-su | --program-s)
+    ac_prev=program_suffix ;;
+  -program-suffix=* | --program-suffix=* | --program-suffi=* \
+  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
+    program_suffix=$ac_optarg ;;
+
+  -program-transform-name | --program-transform-name \
+  | --program-transform-nam | --program-transform-na \
+  | --program-transform-n | --program-transform- \
+  | --program-transform | --program-transfor \
+  | --program-transfo | --program-transf \
+  | --program-trans | --program-tran \
+  | --progr-tra | --program-tr | --program-t)
+    ac_prev=program_transform_name ;;
+  -program-transform-name=* | --program-transform-name=* \
+  | --program-transform-nam=* | --program-transform-na=* \
+  | --program-transform-n=* | --program-transform-=* \
+  | --program-transform=* | --program-transfor=* \
+  | --program-transfo=* | --program-transf=* \
+  | --program-trans=* | --program-tran=* \
+  | --progr-tra=* | --program-tr=* | --program-t=*)
+    program_transform_name=$ac_optarg ;;
+
+  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
+    ac_prev=pdfdir ;;
+  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
+    pdfdir=$ac_optarg ;;
+
+  -psdir | --psdir | --psdi | --psd | --ps)
+    ac_prev=psdir ;;
+  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
+    psdir=$ac_optarg ;;
+
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil)
+    silent=yes ;;
+
+  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
+    ac_prev=sbindir ;;
+  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
+  | --sbi=* | --sb=*)
+    sbindir=$ac_optarg ;;
+
+  -sharedstatedir | --sharedstatedir | --sharedstatedi \
+  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
+  | --sharedst | --shareds | --shared | --share | --shar \
+  | --sha | --sh)
+    ac_prev=sharedstatedir ;;
+  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
+  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
+  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
+  | --sha=* | --sh=*)
+    sharedstatedir=$ac_optarg ;;
+
+  -site | --site | --sit)
+    ac_prev=site ;;
+  -site=* | --site=* | --sit=*)
+    site=$ac_optarg ;;
+
+  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+    ac_prev=srcdir ;;
+  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+    srcdir=$ac_optarg ;;
+
+  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
+  | --syscon | --sysco | --sysc | --sys | --sy)
+    ac_prev=sysconfdir ;;
+  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
+  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
+    sysconfdir=$ac_optarg ;;
+
+  -target | --target | --targe | --targ | --tar | --ta | --t)
+    ac_prev=target_alias ;;
+  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
+    target_alias=$ac_optarg ;;
+
+  -v | -verbose | --verbose | --verbos | --verbo | --verb)
+    verbose=yes ;;
+
+  -version | --version | --versio | --versi | --vers | -V)
+    ac_init_version=: ;;
+
+  _AC_INIT_PARSE_ENABLE([with])
+
+  _AC_INIT_PARSE_ENABLE([without])
+
+  --x)
+    # Obsolete; use --with-x.
+    with_x=yes ;;
+
+  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
+  | --x-incl | --x-inc | --x-in | --x-i)
+    ac_prev=x_includes ;;
+  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
+  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
+    x_includes=$ac_optarg ;;
+
+  -x-libraries | --x-libraries | --x-librarie | --x-librari \
+  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
+    ac_prev=x_libraries ;;
+  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
+  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
+    x_libraries=$ac_optarg ;;
+
+  -*) AC_MSG_ERROR([unrecognized option: `$ac_option'
+Try `$[0] --help' for more information])
+    ;;
+
+  *=*)
+    ac_envvar=`expr "x$ac_option" : 'x\([[^=]]*\)='`
+    # Reject names that are not valid shell variable names.
+    case $ac_envvar in #(
+      '' | [[0-9]]* | *[[!_$as_cr_alnum]]* )
+      AC_MSG_ERROR([invalid variable name: `$ac_envvar']) ;;
+    esac
+    eval $ac_envvar=\$ac_optarg
+    export $ac_envvar ;;
+
+  *)
+    # FIXME: should be removed in autoconf 3.0.
+    AC_MSG_WARN([you should use --build, --host, --target])
+    expr "x$ac_option" : "[.*[^-._$as_cr_alnum]]" >/dev/null &&
+      AC_MSG_WARN([invalid host type: $ac_option])
+    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+    ;;
+
+  esac
+done
+
+if test -n "$ac_prev"; then
+  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
+  AC_MSG_ERROR([missing argument to $ac_option])
+fi
+
+if test -n "$ac_unrecognized_opts"; then
+  case $enable_option_checking in
+    no) ;;
+    fatal) AC_MSG_ERROR([unrecognized options: $ac_unrecognized_opts]) ;;
+    *)     AC_MSG_WARN( [unrecognized options: $ac_unrecognized_opts]) ;;
+  esac
+fi
+
+# Check all directory arguments for consistency.
+for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
+		datadir sysconfdir sharedstatedir localstatedir includedir \
+		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
+		libdir localedir mandir
+do
+  eval ac_val=\$$ac_var
+  # Remove trailing slashes.
+  case $ac_val in
+    */ )
+      ac_val=`expr "X$ac_val" : 'X\(.*[[^/]]\)' \| "X$ac_val" : 'X\(.*\)'`
+      eval $ac_var=\$ac_val;;
+  esac
+  # Be sure to have absolute directory names.
+  case $ac_val in
+    [[\\/$]]* | ?:[[\\/]]* )  continue;;
+    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
+  esac
+  AC_MSG_ERROR([expected an absolute directory name for --$ac_var: $ac_val])
+done
+
+# There might be people who depend on the old broken behavior: `$host'
+# used to hold the argument of --host etc.
+# FIXME: To remove some day.
+build=$build_alias
+host=$host_alias
+target=$target_alias
+
+# FIXME: To remove some day.
+if test "x$host_alias" != x; then
+  if test "x$build_alias" = x; then
+    cross_compiling=maybe
+    AC_MSG_WARN([if you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used])
+  elif test "x$build_alias" != "x$host_alias"; then
+    cross_compiling=yes
+  fi
+fi
+
+ac_tool_prefix=
+test -n "$host_alias" && ac_tool_prefix=$host_alias-
+
+test "$silent" = yes && exec AS_MESSAGE_FD>/dev/null
+
+m4_divert_pop([PARSE_ARGS])dnl
+])# _AC_INIT_PARSE_ARGS
+
+
+# _AC_INIT_PARSE_ENABLE(OPTION-NAME)
+# ----------------------------------
+# A trivial front-end for _AC_INIT_PARSE_ENABLE2.
+#
+m4_define([_AC_INIT_PARSE_ENABLE],
+[m4_bmatch([$1], [^with],
+	   [_AC_INIT_PARSE_ENABLE2([$1], [with])],
+	   [_AC_INIT_PARSE_ENABLE2([$1], [enable])])])
+
+
+# _AC_INIT_PARSE_ENABLE2(OPTION-NAME, POSITIVE-NAME)
+# --------------------------------------------------
+# Handle an `--enable' or a `--with' option.
+#
+# OPTION-NAME is `enable', `disable', `with', or `without'.
+# POSITIVE-NAME is the corresponding positive variant, i.e. `enable' or `with'.
+#
+# Positive variant of the option is recognized by the condition
+#	OPTION-NAME == POSITIVE-NAME .
+#
+m4_define([_AC_INIT_PARSE_ENABLE2],
+[-$1-* | --$1-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*$1-\(m4_if([$1], [$2], [[[^=]]], [.])*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : "[.*[^-+._$as_cr_alnum]]" >/dev/null &&
+      AC_MSG_ERROR(
+	[invalid ]m4_if([$2], [with], [package], [feature])[ name: $ac_useropt])
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`AS_ECHO(["$ac_useropt"]) | sed 's/[[-+.]]/_/g'`
+    case $ac_user_opts in
+      *"
+"$2_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--$1-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval $2_$ac_useropt=m4_if([$1], [$2], [\$ac_optarg], [no]) ;;dnl
+])
+
+
+# _AC_INIT_HELP
+# -------------
+# Handle the `configure --help' message.
+m4_define([_AC_INIT_HELP],
+[m4_divert_push([HELP_BEGIN])dnl
+
+#
+# Report the --help message.
+#
+if test "$ac_init_help" = "long"; then
+  # Omit some internal or obsolete options to make the list less imposing.
+  # This message is too long to be a string in the A/UX 3.1 sh.
+  cat <<_ACEOF
+\`configure' configures m4_ifset([AC_PACKAGE_STRING],
+			[AC_PACKAGE_STRING],
+			[this package]) to adapt to many kinds of systems.
+
+Usage: $[0] [[OPTION]]... [[VAR=VALUE]]...
+
+[To assign environment variables (e.g., CC, CFLAGS...), specify them as
+VAR=VALUE.  See below for descriptions of some of the useful variables.
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+  -h, --help              display this help and exit
+      --help=short        display options specific to this package
+      --help=recursive    display the short help of all the included packages
+  -V, --version           display version information and exit
+  -q, --quiet, --silent   do not print \`checking ...' messages
+      --cache-file=FILE   cache test results in FILE [disabled]
+  -C, --config-cache      alias for \`--cache-file=config.cache'
+  -n, --no-create         do not create output files
+      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
+
+Installation directories:
+]AS_HELP_STRING([--prefix=PREFIX],
+  [install architecture-independent files in PREFIX [$ac_default_prefix]])
+AS_HELP_STRING([--exec-prefix=EPREFIX],
+  [install architecture-dependent files in EPREFIX [PREFIX]])[
+
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
+
+For better control, use the options below.
+
+Fine tuning of the installation directories:
+  --bindir=DIR            user executables [EPREFIX/bin]
+  --sbindir=DIR           system admin executables [EPREFIX/sbin]
+  --libexecdir=DIR        program executables [EPREFIX/libexec]
+  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
+  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
+  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
+  --libdir=DIR            object code libraries [EPREFIX/lib]
+  --includedir=DIR        C header files [PREFIX/include]
+  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
+  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
+  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
+  --infodir=DIR           info documentation [DATAROOTDIR/info]
+  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
+  --mandir=DIR            man documentation [DATAROOTDIR/man]
+]AS_HELP_STRING([--docdir=DIR],
+  [documentation root ]@<:@DATAROOTDIR/doc/m4_ifset([AC_PACKAGE_TARNAME],
+    [AC_PACKAGE_TARNAME], [PACKAGE])@:>@)[
+  --htmldir=DIR           html documentation [DOCDIR]
+  --dvidir=DIR            dvi documentation [DOCDIR]
+  --pdfdir=DIR            pdf documentation [DOCDIR]
+  --psdir=DIR             ps documentation [DOCDIR]
+_ACEOF
+
+  cat <<\_ACEOF]
+m4_divert_pop([HELP_BEGIN])dnl
+dnl The order of the diversions here is
+dnl - HELP_BEGIN
+dnl   which may be extended by extra generic options such as with X or
+dnl   AC_ARG_PROGRAM.  Displayed only in long --help.
+dnl
+dnl - HELP_CANON
+dnl   Support for cross compilation (--build, --host and --target).
+dnl   Display only in long --help.
+dnl
+dnl - HELP_ENABLE
+dnl   which starts with the trailer of the HELP_BEGIN, HELP_CANON section,
+dnl   then implements the header of the non generic options.
+dnl
+dnl - HELP_WITH
+dnl
+dnl - HELP_VAR
+dnl
+dnl - HELP_VAR_END
+dnl
+dnl - HELP_END
+dnl   initialized below, in which we dump the trailer (handling of the
+dnl   recursion for instance).
+m4_divert_push([HELP_ENABLE])dnl
+_ACEOF
+fi
+
+if test -n "$ac_init_help"; then
+m4_ifset([AC_PACKAGE_STRING],
+[  case $ac_init_help in
+     short | recursive ) echo "Configuration of AC_PACKAGE_STRING:";;
+   esac])
+  cat <<\_ACEOF
+m4_divert_pop([HELP_ENABLE])dnl
+m4_divert_push([HELP_END])dnl
+
+Report bugs to m4_ifset([AC_PACKAGE_BUGREPORT], [<AC_PACKAGE_BUGREPORT>],
+  [the package provider]).dnl
+m4_ifdef([AC_PACKAGE_NAME], [m4_ifset([AC_PACKAGE_URL], [
+AC_PACKAGE_NAME home page: <AC_PACKAGE_URL>.])dnl
+m4_if(m4_index(m4_defn([AC_PACKAGE_NAME]), [GNU ]), [0], [
+General help using GNU software: <http://www.gnu.org/gethelp/>.])])
+_ACEOF
+ac_status=$?
+fi
+
+if test "$ac_init_help" = "recursive"; then
+  # If there are subdirs, report their specific --help.
+  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
+    test -d "$ac_dir" ||
+      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
+      continue
+    _AC_SRCDIRS(["$ac_dir"])
+    cd "$ac_dir" || { ac_status=$?; continue; }
+    # Check for guested configure.
+    if test -f "$ac_srcdir/configure.gnu"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
+    elif test -f "$ac_srcdir/configure"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure" --help=recursive
+    else
+      AC_MSG_WARN([no configuration information is in $ac_dir])
+    fi || ac_status=$?
+    cd "$ac_pwd" || { ac_status=$?; break; }
+  done
+fi
+
+test -n "$ac_init_help" && exit $ac_status
+m4_divert_pop([HELP_END])dnl
+])# _AC_INIT_HELP
+
+
+# _AC_INIT_VERSION
+# ----------------
+# Handle the `configure --version' message.
+m4_define([_AC_INIT_VERSION],
+[m4_divert_text([VERSION_BEGIN],
+[if $ac_init_version; then
+  cat <<\_ACEOF
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])configure[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+generated by m4_PACKAGE_STRING])
+m4_divert_text([VERSION_END],
+[_ACEOF
+  exit
+fi])dnl
+])# _AC_INIT_VERSION
+
+
+# _AC_INIT_CONFIG_LOG
+# -------------------
+# Initialize the config.log file descriptor and write header to it.
+m4_define([_AC_INIT_CONFIG_LOG],
+[m4_divert_text([INIT_PREPARE],
+[m4_define([AS_MESSAGE_LOG_FD], 5)dnl
+cat >config.log <<_ACEOF
+This file contains any messages produced by compilers while
+running configure, to aid debugging if configure makes a mistake.
+
+It was created by m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])dnl
+$as_me[]m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]), which was
+generated by m4_PACKAGE_STRING.  Invocation command line was
+
+  $ $[0] $[@]
+
+_ACEOF
+exec AS_MESSAGE_LOG_FD>>config.log
+AS_UNAME >&AS_MESSAGE_LOG_FD
+
+cat >&AS_MESSAGE_LOG_FD <<_ACEOF
+
+
+m4_text_box([Core tests.])
+
+_ACEOF
+])])# _AC_INIT_CONFIG_LOG
+
+
+# _AC_INIT_PREPARE
+# ----------------
+# Called by AC_INIT to build the preamble of the `configure' scripts.
+# 1. Trap and clean up various tmp files.
+# 2. Set up the fd and output files
+# 3. Remember the options given to `configure' for `config.status --recheck'.
+# 4. Initiates confdefs.h
+# 5. Loads site and cache files
+m4_define([_AC_INIT_PREPARE],
+[m4_divert_push([INIT_PREPARE])dnl
+
+# Keep a trace of the command line.
+# Strip out --no-create and --no-recursion so they do not pile up.
+# Strip out --silent because we don't want to record it for future runs.
+# Also quote any args containing shell meta-characters.
+# Make two passes to allow for proper duplicate-argument suppression.
+ac_configure_args=
+ac_configure_args0=
+ac_configure_args1=
+ac_must_keep_next=false
+for ac_pass in 1 2
+do
+  for ac_arg
+  do
+    case $ac_arg in
+    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
+    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+    | -silent | --silent | --silen | --sile | --sil)
+      continue ;;
+    *\'*)
+      ac_arg=`AS_ECHO(["$ac_arg"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    case $ac_pass in
+    1) AS_VAR_APPEND([ac_configure_args0], [" '$ac_arg'"]) ;;
+    2)
+      AS_VAR_APPEND([ac_configure_args1], [" '$ac_arg'"])
+dnl If trying to remove duplicates, be sure to (i) keep the *last*
+dnl value (e.g. --prefix=1 --prefix=2 --prefix=1 might keep 2 only),
+dnl and (ii) not to strip long options (--prefix foo --prefix bar might
+dnl give --prefix foo bar).
+      if test $ac_must_keep_next = true; then
+	ac_must_keep_next=false # Got value, back to normal.
+      else
+	case $ac_arg in
+dnl Use broad patterns, as arguments that would have already made configure
+dnl exit don't matter.
+	  *=* | --config-cache | -C | -disable-* | --disable-* \
+	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
+	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
+	  | -with-* | --with-* | -without-* | --without-* | --x)
+	    case "$ac_configure_args0 " in
+	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
+	    esac
+	    ;;
+	  -* ) ac_must_keep_next=true ;;
+	esac
+      fi
+      AS_VAR_APPEND([ac_configure_args], [" '$ac_arg'"])
+      ;;
+    esac
+  done
+done
+AS_UNSET(ac_configure_args0)
+AS_UNSET(ac_configure_args1)
+
+# When interrupted or exit'd, cleanup temporary files, and complete
+# config.log.  We remove comments because anyway the quotes in there
+# would cause problems or look ugly.
+# WARNING: Use '\'' to represent an apostrophe within the trap.
+# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
+trap 'exit_status=$?
+  # Save into config.log some information that might help in debugging.
+  {
+    echo
+
+    AS_BOX([Cache variables.])
+    echo
+    m4_bpatsubsts(m4_defn([_AC_CACHE_DUMP]),
+		  [^ *\(#.*\)?
+],                [],
+		  ['], ['\\''])
+    echo
+
+    AS_BOX([Output variables.])
+    echo
+    for ac_var in $ac_subst_vars
+    do
+      eval ac_val=\$$ac_var
+      case $ac_val in
+      *\'\''*) ac_val=`AS_ECHO(["$ac_val"]) | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+      esac
+      AS_ECHO(["$ac_var='\''$ac_val'\''"])
+    done | sort
+    echo
+
+    if test -n "$ac_subst_files"; then
+      AS_BOX([File substitutions.])
+      echo
+      for ac_var in $ac_subst_files
+      do
+	eval ac_val=\$$ac_var
+	case $ac_val in
+	*\'\''*) ac_val=`AS_ECHO(["$ac_val"]) | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+	esac
+	AS_ECHO(["$ac_var='\''$ac_val'\''"])
+      done | sort
+      echo
+    fi
+
+    if test -s confdefs.h; then
+      AS_BOX([confdefs.h.])
+      echo
+      cat confdefs.h
+      echo
+    fi
+    test "$ac_signal" != 0 &&
+      AS_ECHO(["$as_me: caught signal $ac_signal"])
+    AS_ECHO(["$as_me: exit $exit_status"])
+  } >&AS_MESSAGE_LOG_FD
+  rm -f core *.core core.conftest.* &&
+    rm -f -r conftest* confdefs* conf$[$]* $ac_clean_files &&
+    exit $exit_status
+' 0
+for ac_signal in 1 2 13 15; do
+  trap 'ac_signal='$ac_signal'; AS_EXIT([1])' $ac_signal
+done
+ac_signal=0
+
+# confdefs.h avoids OS command line length limits that DEFS can exceed.
+rm -f -r conftest* confdefs.h
+
+dnl AIX cpp loses on an empty file, NextStep 3.3 (patch 3) loses on a file
+dnl containing less than 14 bytes (including the newline).
+AS_ECHO(["/* confdefs.h */"]) > confdefs.h
+
+# Predefined preprocessor variables.
+AC_DEFINE_UNQUOTED([PACKAGE_NAME], ["$PACKAGE_NAME"],
+		   [Define to the full name of this package.])dnl
+AC_DEFINE_UNQUOTED([PACKAGE_TARNAME], ["$PACKAGE_TARNAME"],
+		   [Define to the one symbol short name of this package.])dnl
+AC_DEFINE_UNQUOTED([PACKAGE_VERSION], ["$PACKAGE_VERSION"],
+		   [Define to the version of this package.])dnl
+AC_DEFINE_UNQUOTED([PACKAGE_STRING], ["$PACKAGE_STRING"],
+		   [Define to the full name and version of this package.])dnl
+AC_DEFINE_UNQUOTED([PACKAGE_BUGREPORT], ["$PACKAGE_BUGREPORT"],
+		   [Define to the address where bug reports for this package
+		    should be sent.])dnl
+AC_DEFINE_UNQUOTED([PACKAGE_URL], ["$PACKAGE_URL"],
+		   [Define to the home page for this package.])
+
+# Let the site file select an alternate cache file if it wants to.
+AC_SITE_LOAD
+AC_CACHE_LOAD
+m4_divert_pop([INIT_PREPARE])dnl
+])# _AC_INIT_PREPARE
+
+
+# AU::AC_INIT([UNIQUE-FILE-IN-SOURCE-DIR])
+# ----------------------------------------
+# This macro is used only for Autoupdate.
+AU_DEFUN([AC_INIT],
+[m4_ifval([$2], [[AC_INIT($@)]],
+	  [m4_ifval([$1],
+[[AC_INIT]
+AC_CONFIG_SRCDIR([$1])], [[AC_INIT]])])[]dnl
+])
+
+
+# AC_INIT([PACKAGE, VERSION, [BUG-REPORT], [TARNAME], [URL])
+# ----------------------------------------------------------
+# Include the user macro files, prepare the diversions, and output the
+# preamble of the `configure' script.
+#
+# If BUG-REPORT is omitted, do without (unless the user previously
+# defined the m4 macro AC_PACKAGE_BUGREPORT).  If TARNAME is omitted,
+# use PACKAGE to seed it.  If URL is omitted, use
+# `http://www.gnu.org/software/TARNAME/' if PACKAGE begins with `GNU',
+# otherwise, do without.
+#
+# Note that the order is important: first initialize, then set the
+# AC_CONFIG_SRCDIR.
+m4_define([AC_INIT],
+[# Forbidden tokens and exceptions.
+m4_pattern_forbid([^_?A[CHUM]_])
+m4_pattern_forbid([_AC_])
+m4_pattern_forbid([^LIBOBJS$],
+		  [do not use LIBOBJS directly, use AC_LIBOBJ (see section `AC_LIBOBJ vs LIBOBJS'])
+# Actually reserved by M4sh.
+m4_pattern_allow([^AS_FLAGS$])
+AS_INIT[]dnl
+AS_PREPARE[]dnl
+m4_divert_push([KILL])
+m4_ifval([$2], [_AC_INIT_PACKAGE($@)])
+_AC_INIT_DEFAULTS
+_AC_INIT_PARSE_ARGS
+_AC_INIT_DIRCHECK
+_AC_INIT_SRCDIR
+_AC_INIT_HELP
+_AC_INIT_VERSION
+_AC_INIT_CONFIG_LOG
+_AC_INIT_PREPARE
+_AC_INIT_NOTICE
+_AC_INIT_COPYRIGHT
+m4_divert_text([SHELL_FN], [
+m4_text_box([Autoconf initialization.])])
+m4_divert_pop
+m4_ifval([$2], , [m4_ifval([$1], [AC_CONFIG_SRCDIR([$1])])])dnl
+dnl
+dnl Substitute for predefined variables.
+AC_SUBST([DEFS])dnl
+AC_SUBST([ECHO_C])dnl
+AC_SUBST([ECHO_N])dnl
+AC_SUBST([ECHO_T])dnl
+AC_SUBST([LIBS])dnl
+_AC_ARG_VAR_PRECIOUS([build_alias])AC_SUBST([build_alias])dnl
+_AC_ARG_VAR_PRECIOUS([host_alias])AC_SUBST([host_alias])dnl
+_AC_ARG_VAR_PRECIOUS([target_alias])AC_SUBST([target_alias])dnl
+dnl
+AC_LANG_PUSH(C)
+])
+
+
+
+
+## ------------------------------------------------------------- ##
+## Selecting optional features, working with optional software.  ##
+## ------------------------------------------------------------- ##
+
+# AC_PRESERVE_HELP_ORDER
+# ----------------------
+# Emit help strings in the order given, rather than grouping all --enable-FOO
+# and all --with-BAR.
+AC_DEFUN([AC_PRESERVE_HELP_ORDER],
+[m4_divert_once([HELP_ENABLE], [[
+Optional Features and Packages:
+  --disable-option-checking  ignore unrecognized --enable/--with options
+  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
+  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
+  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
+  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)]])
+m4_define([_m4_divert(HELP_ENABLE)],    _m4_divert(HELP_WITH))
+])# AC_PRESERVE_HELP_ORDER
+
+# _AC_ENABLE_IF(OPTION, FEATURE, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# -------------------------------------------------------------------
+# Common code for AC_ARG_ENABLE and AC_ARG_WITH.
+# OPTION is either "enable" or "with".
+#
+m4_define([_AC_ENABLE_IF],
+[@%:@ Check whether --$1-$2 was given.
+_AC_ENABLE_IF_ACTION([$1], m4_translit([$2], [-+.], [___]), [$3], [$4])
+])
+
+m4_define([_AC_ENABLE_IF_ACTION],
+[m4_append_uniq([_AC_USER_OPTS], [$1_$2], [
+])dnl
+AS_IF([test "${$1_$2+set}" = set], [$1val=$$1_$2; $3], [$4])dnl
+])
+
+# AC_ARG_ENABLE(FEATURE, HELP-STRING, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ------------------------------------------------------------------------
+AC_DEFUN([AC_ARG_ENABLE],
+[AC_PROVIDE_IFELSE([AC_PRESERVE_HELP_ORDER],
+[],
+[m4_divert_once([HELP_ENABLE], [[
+Optional Features:
+  --disable-option-checking  ignore unrecognized --enable/--with options
+  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
+  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]]])])dnl
+m4_divert_once([HELP_ENABLE], [$2])dnl
+_AC_ENABLE_IF([enable], [$1], [$3], [$4])dnl
+])# AC_ARG_ENABLE
+
+
+AU_DEFUN([AC_ENABLE],
+[AC_ARG_ENABLE([$1], [  --enable-$1], [$2], [$3])])
+
+
+# AC_ARG_WITH(PACKAGE, HELP-STRING, ACTION-IF-TRUE, [ACTION-IF-FALSE])
+# --------------------------------------------------------------------
+AC_DEFUN([AC_ARG_WITH],
+[AC_PROVIDE_IFELSE([AC_PRESERVE_HELP_ORDER],
+[],
+[m4_divert_once([HELP_WITH], [[
+Optional Packages:
+  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
+  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)]])])
+m4_divert_once([HELP_WITH], [$2])dnl
+_AC_ENABLE_IF([with], [$1], [$3], [$4])dnl
+])# AC_ARG_WITH
+
+AU_DEFUN([AC_WITH],
+[AC_ARG_WITH([$1], [  --with-$1], [$2], [$3])])
+
+# AC_DISABLE_OPTION_CHECKING
+# --------------------------
+AC_DEFUN([AC_DISABLE_OPTION_CHECKING],
+[m4_divert_once([DEFAULTS], [enable_option_checking=no])
+])# AC_DISABLE_OPTION_CHECKING
+
+
+## ----------------------------------------- ##
+## Remembering variables for reconfiguring.  ##
+## ----------------------------------------- ##
+
+
+# AC_ARG_VAR(VARNAME, DOCUMENTATION)
+# ----------------------------------
+# Register VARNAME as a precious variable, and document it in
+# `configure --help' (but only once).
+AC_DEFUN([AC_ARG_VAR],
+[m4_divert_once([HELP_VAR], [[
+Some influential environment variables:]])dnl
+m4_divert_once([HELP_VAR_END], [[
+Use these variables to override the choices made by `configure' or to help
+it to find libraries and programs with nonstandard names/locations.]])dnl
+m4_expand_once([m4_divert_text([HELP_VAR],
+			       [AS_HELP_STRING([$1], [$2], [              ])])],
+	       [$0($1)])dnl
+AC_SUBST([$1])dnl
+_AC_ARG_VAR_PRECIOUS([$1])dnl
+])# AC_ARG_VAR
+
+
+# _AC_ARG_VAR_PRECIOUS(VARNAME)
+# -----------------------------
+# Declare VARNAME is precious.
+m4_define([_AC_ARG_VAR_PRECIOUS],
+[m4_append_uniq([_AC_PRECIOUS_VARS], [$1], [
+])dnl
+])
+
+
+# _AC_ARG_VAR_STORE
+# -----------------
+# We try to diagnose when precious variables have changed.  To do this,
+# make two early snapshots (after the option processing to take
+# explicit variables into account) of those variables: one (ac_env_)
+# which represents the current run, and a second (ac_cv_env_) which,
+# at the first run, will be saved in the cache.  As an exception to
+# the cache mechanism, its loading will override these variables (non
+# `ac_cv_env_' cache value are only set when unset).
+#
+# In subsequent runs, after having loaded the cache, compare
+# ac_cv_env_foo against ac_env_foo.  See _AC_ARG_VAR_VALIDATE.
+m4_define([_AC_ARG_VAR_STORE],
+[m4_divert_text([PARSE_ARGS],
+[for ac_var in $ac_precious_vars; do
+  eval ac_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_env_${ac_var}_value=\$${ac_var}
+  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_cv_env_${ac_var}_value=\$${ac_var}
+done])dnl
+])
+
+
+# _AC_ARG_VAR_VALIDATE
+# --------------------
+# The precious variables are saved twice at the beginning of
+# configure.  E.g., PRECIOUS is saved as `ac_env_PRECIOUS_set' and
+# `ac_env_PRECIOUS_value' on the one hand and `ac_cv_env_PRECIOUS_set'
+# and `ac_cv_env_PRECIOUS_value' on the other hand.
+#
+# Now the cache has just been loaded, so `ac_cv_env_' represents the
+# content of the cached values, while `ac_env_' represents that of the
+# current values.
+#
+# So we check that `ac_env_' and `ac_cv_env_' are consistent.  If
+# they aren't, die.
+m4_define([_AC_ARG_VAR_VALIDATE],
+[m4_divert_text([INIT_PREPARE],
+[# Check that the precious variables saved in the cache have kept the same
+# value.
+ac_cache_corrupted=false
+for ac_var in $ac_precious_vars; do
+  eval ac_old_set=\$ac_cv_env_${ac_var}_set
+  eval ac_new_set=\$ac_env_${ac_var}_set
+  eval ac_old_val=\$ac_cv_env_${ac_var}_value
+  eval ac_new_val=\$ac_env_${ac_var}_value
+  case $ac_old_set,$ac_new_set in
+    set,)
+      AS_MESSAGE([error: `$ac_var' was set to `$ac_old_val' in the previous run], 2)
+      ac_cache_corrupted=: ;;
+    ,set)
+      AS_MESSAGE([error: `$ac_var' was not set in the previous run], 2)
+      ac_cache_corrupted=: ;;
+    ,);;
+    *)
+      if test "x$ac_old_val" != "x$ac_new_val"; then
+	# differences in whitespace do not lead to failure.
+	ac_old_val_w=`echo x $ac_old_val`
+	ac_new_val_w=`echo x $ac_new_val`
+	if test "$ac_old_val_w" != "$ac_new_val_w"; then
+	  AS_MESSAGE([error: `$ac_var' has changed since the previous run:], 2)
+	  ac_cache_corrupted=:
+	else
+	  AS_MESSAGE([warning: ignoring whitespace changes in `$ac_var' since the previous run:], 2)
+	  eval $ac_var=\$ac_old_val
+	fi
+	AS_MESSAGE([  former value:  `$ac_old_val'], 2)
+	AS_MESSAGE([  current value: `$ac_new_val'], 2)
+      fi;;
+  esac
+  # Pass precious variables to config.status.
+  if test "$ac_new_set" = set; then
+    case $ac_new_val in
+    *\'*) ac_arg=$ac_var=`AS_ECHO(["$ac_new_val"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+    *) ac_arg=$ac_var=$ac_new_val ;;
+    esac
+    case " $ac_configure_args " in
+      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
+      *) AS_VAR_APPEND([ac_configure_args], [" '$ac_arg'"]) ;;
+    esac
+  fi
+done
+if $ac_cache_corrupted; then
+  AS_MESSAGE([error: in `$ac_pwd':], 2)
+  AS_MESSAGE([error: changes in the environment can compromise the build], 2)
+  AS_ERROR([run `make distclean' and/or `rm $cache_file' and start over])
+fi])dnl
+])# _AC_ARG_VAR_VALIDATE
+
+
+
+
+
+## ---------------------------- ##
+## Transforming program names.  ##
+## ---------------------------- ##
+
+
+# AC_ARG_PROGRAM
+# --------------
+# This macro is expanded only once, to avoid that `foo' ends up being
+# installed as `ggfoo'.
+AC_DEFUN_ONCE([AC_ARG_PROGRAM],
+[dnl Document the options.
+m4_divert_push([HELP_BEGIN])dnl
+
+Program names:
+  --program-prefix=PREFIX            prepend PREFIX to installed program names
+  --program-suffix=SUFFIX            append SUFFIX to installed program names
+  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
+m4_divert_pop([HELP_BEGIN])dnl
+test "$program_prefix" != NONE &&
+  program_transform_name="s&^&$program_prefix&;$program_transform_name"
+# Use a double $ so make ignores it.
+test "$program_suffix" != NONE &&
+  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
+# Double any \ or $.
+# By default was `s,x,x', remove it if useless.
+[ac_script='s/[\\$]/&&/g;s/;s,x,x,$//']
+program_transform_name=`AS_ECHO(["$program_transform_name"]) | sed "$ac_script"`
+])# AC_ARG_PROGRAM
+
+
+
+
+
+## ------------------------- ##
+## Finding auxiliary files.  ##
+## ------------------------- ##
+
+
+# AC_CONFIG_AUX_DIR(DIR)
+# ----------------------
+# Find install-sh, config.sub, config.guess, and Cygnus configure
+# in directory DIR.  These are auxiliary files used in configuration.
+# DIR can be either absolute or relative to $srcdir.
+AC_DEFUN([AC_CONFIG_AUX_DIR],
+[AC_CONFIG_AUX_DIRS($1 "$srcdir"/$1)])
+
+
+# AC_CONFIG_AUX_DIR_DEFAULT
+# -------------------------
+# The default is `$srcdir' or `$srcdir/..' or `$srcdir/../..'.
+# There's no need to call this macro explicitly; just AC_REQUIRE it.
+AC_DEFUN([AC_CONFIG_AUX_DIR_DEFAULT],
+[AC_CONFIG_AUX_DIRS("$srcdir" "$srcdir/.." "$srcdir/../..")])
+
+
+# AC_CONFIG_AUX_DIRS(DIR ...)
+# ---------------------------
+# Internal subroutine.
+# Search for the configuration auxiliary files in directory list $1.
+# We look only for install-sh, so users of AC_PROG_INSTALL
+# do not automatically need to distribute the other auxiliary files.
+AC_DEFUN([AC_CONFIG_AUX_DIRS],
+[ac_aux_dir=
+for ac_dir in $1; do
+  if test -f "$ac_dir/install-sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install-sh -c"
+    break
+  elif test -f "$ac_dir/install.sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install.sh -c"
+    break
+  elif test -f "$ac_dir/shtool"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/shtool install -c"
+    break
+  fi
+done
+if test -z "$ac_aux_dir"; then
+  AC_MSG_ERROR([cannot find install-sh, install.sh, or shtool in $1])
+fi
+
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
+
+AC_PROVIDE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+])# AC_CONFIG_AUX_DIRS
+
+
+
+
+## ------------------------ ##
+## Finding aclocal macros.  ##
+## ------------------------ ##
+
+
+# AC_CONFIG_MACRO_DIR(DIR)
+# ------------------------
+# Declare directory containing additional macros for aclocal.
+AC_DEFUN([AC_CONFIG_MACRO_DIR], [])
+
+
+
+## --------------------- ##
+## Requiring aux files.  ##
+## --------------------- ##
+
+# AC_REQUIRE_AUX_FILE(FILE)
+# -------------------------
+# This macro does nothing, it's a hook to be read with `autoconf --trace'.
+# It announces FILE is required in the auxdir.
+m4_define([AC_REQUIRE_AUX_FILE],
+[AS_LITERAL_WORD_IF([$1], [],
+	       [m4_fatal([$0: requires a literal argument])])])
+
+
+
+## ----------------------------------- ##
+## Getting the canonical system type.  ##
+## ----------------------------------- ##
+
+# The inputs are:
+#    configure --host=HOST --target=TARGET --build=BUILD
+#
+# The rules are:
+# 1. Build defaults to the current platform, as determined by config.guess.
+# 2. Host defaults to build.
+# 3. Target defaults to host.
+
+
+# _AC_CANONICAL_SPLIT(THING)
+# --------------------------
+# Generate the variables THING, THING_{alias cpu vendor os}.
+m4_define([_AC_CANONICAL_SPLIT],
+[case $ac_cv_$1 in
+*-*-*) ;;
+*) AC_MSG_ERROR([invalid value of canonical $1]);;
+esac
+AC_SUBST([$1], [$ac_cv_$1])dnl
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_$1
+shift
+AC_SUBST([$1_cpu], [$[1]])dnl
+AC_SUBST([$1_vendor], [$[2]])dnl
+shift; shift
+[# Remember, the first character of IFS is used to create $]*,
+# except with old shells:
+$1_os=$[*]
+IFS=$ac_save_IFS
+case $$1_os in *\ *) $1_os=`echo "$$1_os" | sed 's/ /-/g'`;; esac
+AC_SUBST([$1_os])dnl
+])# _AC_CANONICAL_SPLIT
+
+
+# AC_CANONICAL_BUILD
+# ------------------
+AC_DEFUN_ONCE([AC_CANONICAL_BUILD],
+[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_REQUIRE_AUX_FILE([config.sub])dnl
+AC_REQUIRE_AUX_FILE([config.guess])dnl
+m4_divert_once([HELP_CANON],
+[[
+System types:
+  --build=BUILD     configure for building on BUILD [guessed]]])dnl
+# Make sure we can run config.sub.
+$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
+  AC_MSG_ERROR([cannot run $SHELL $ac_aux_dir/config.sub])
+
+AC_CACHE_CHECK([build system type], [ac_cv_build],
+[ac_build_alias=$build_alias
+test "x$ac_build_alias" = x &&
+  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
+test "x$ac_build_alias" = x &&
+  AC_MSG_ERROR([cannot guess build type; you must specify one])
+ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
+  AC_MSG_ERROR([$SHELL $ac_aux_dir/config.sub $ac_build_alias failed])
+])
+_AC_CANONICAL_SPLIT(build)
+])# AC_CANONICAL_BUILD
+
+
+# AC_CANONICAL_HOST
+# -----------------
+AC_DEFUN_ONCE([AC_CANONICAL_HOST],
+[AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_divert_once([HELP_CANON],
+[[  --host=HOST       cross-compile to build programs to run on HOST [BUILD]]])dnl
+AC_CACHE_CHECK([host system type], [ac_cv_host],
+[if test "x$host_alias" = x; then
+  ac_cv_host=$ac_cv_build
+else
+  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
+    AC_MSG_ERROR([$SHELL $ac_aux_dir/config.sub $host_alias failed])
+fi
+])
+_AC_CANONICAL_SPLIT([host])
+])# AC_CANONICAL_HOST
+
+
+# AC_CANONICAL_TARGET
+# -------------------
+AC_DEFUN_ONCE([AC_CANONICAL_TARGET],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_BEFORE([$0], [AC_ARG_PROGRAM])dnl
+m4_divert_once([HELP_CANON],
+[[  --target=TARGET   configure for building compilers for TARGET [HOST]]])dnl
+AC_CACHE_CHECK([target system type], [ac_cv_target],
+[if test "x$target_alias" = x; then
+  ac_cv_target=$ac_cv_host
+else
+  ac_cv_target=`$SHELL "$ac_aux_dir/config.sub" $target_alias` ||
+    AC_MSG_ERROR([$SHELL $ac_aux_dir/config.sub $target_alias failed])
+fi
+])
+_AC_CANONICAL_SPLIT([target])
+
+# The aliases save the names the user supplied, while $host etc.
+# will get canonicalized.
+test -n "$target_alias" &&
+  test "$program_prefix$program_suffix$program_transform_name" = \
+    NONENONEs,x,x, &&
+  program_prefix=${target_alias}-[]dnl
+])# AC_CANONICAL_TARGET
+
+
+AU_ALIAS([AC_CANONICAL_SYSTEM], [AC_CANONICAL_TARGET])
+
+
+# AU::AC_VALIDATE_CACHED_SYSTEM_TUPLE([CMD])
+# ------------------------------------------
+# If the cache file is inconsistent with the current host,
+# target and build system types, execute CMD or print a default
+# error message.  Now handled via _AC_ARG_VAR_PRECIOUS.
+AU_DEFUN([AC_VALIDATE_CACHED_SYSTEM_TUPLE], [])
+
+
+## ---------------------- ##
+## Caching test results.  ##
+## ---------------------- ##
+
+
+# AC_SITE_LOAD
+# ------------
+# Look for site- or system-specific initialization scripts.
+m4_define([AC_SITE_LOAD],
+[# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
+if test -n "$CONFIG_SITE"; then
+  # We do not want a PATH search for config.site.
+  case $CONFIG_SITE in @%:@((
+    -*)  ac_site_file1=./$CONFIG_SITE;;
+    */*) ac_site_file1=$CONFIG_SITE;;
+    *)   ac_site_file1=./$CONFIG_SITE;;
+  esac
+elif test "x$prefix" != xNONE; then
+  ac_site_file1=$prefix/share/config.site
+  ac_site_file2=$prefix/etc/config.site
+else
+  ac_site_file1=$ac_default_prefix/share/config.site
+  ac_site_file2=$ac_default_prefix/etc/config.site
+fi
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
+do
+  test "x$ac_site_file" = xNONE && continue
+  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+    AC_MSG_NOTICE([loading site script $ac_site_file])
+    sed 's/^/| /' "$ac_site_file" >&AS_MESSAGE_LOG_FD
+    . "$ac_site_file" \
+      || AC_MSG_FAILURE([failed to load site script $ac_site_file])
+  fi
+done
+])
+
+
+# AC_CACHE_LOAD
+# -------------
+m4_define([AC_CACHE_LOAD],
+[if test -r "$cache_file"; then
+  # Some versions of bash will fail to source /dev/null (special files
+  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
+  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
+    AC_MSG_NOTICE([loading cache $cache_file])
+    case $cache_file in
+      [[\\/]]* | ?:[[\\/]]* ) . "$cache_file";;
+      *)                      . "./$cache_file";;
+    esac
+  fi
+else
+  AC_MSG_NOTICE([creating cache $cache_file])
+  >$cache_file
+fi
+])# AC_CACHE_LOAD
+
+
+# _AC_CACHE_DUMP
+# --------------
+# Dump the cache to stdout.  It can be in a pipe (this is a requirement).
+m4_define([_AC_CACHE_DUMP],
+[# The following way of writing the cache mishandles newlines in values,
+# but we know of no workaround that is simple, portable, and efficient.
+# So, we kill variables containing newlines.
+# Ultrix sh set writes to stderr and can't be redirected directly,
+# and sets the high bit in the cache file unless we assign to the vars.
+(
+  for ac_var in `(set) 2>&1 | sed -n ['s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p']`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) AC_MSG_WARN([cache variable $ac_var contains a newline]) ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) AS_UNSET([$ac_var]) ;;
+      esac ;;
+    esac
+  done
+
+  (set) 2>&1 |
+    case $as_nl`(ac_space=' '; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      # `set' does not quote correctly, so add quotes: double-quote
+      # substitution turns \\\\ into \\, and sed turns \\ into \.
+      sed -n \
+	["s/'/'\\\\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"]
+      ;; #(
+    *)
+      # `set' quotes correctly as required by POSIX, so do not add quotes.
+      sed -n ["/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"]
+      ;;
+    esac |
+    sort
+)dnl
+])# _AC_CACHE_DUMP
+
+
+# AC_CACHE_SAVE
+# -------------
+# Save the cache.
+# Allow a site initialization script to override cache values.
+m4_define([AC_CACHE_SAVE],
+[cat >confcache <<\_ACEOF
+# This file is a shell script that caches the results of configure
+# tests run on this system so they can be shared between configure
+# scripts and configure runs, see configure's option --config-cache.
+# It is not useful on other systems.  If it contains results you don't
+# want to keep, you may remove or edit it.
+#
+# config.status only pays attention to the cache file if you give it
+# the --recheck option to rerun configure.
+#
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
+# following values.
+
+_ACEOF
+
+_AC_CACHE_DUMP() |
+  sed ['
+     /^ac_cv_env_/b end
+     t clear
+     :clear
+     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
+     t end
+     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
+     :end'] >>confcache
+if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
+  if test -w "$cache_file"; then
+    if test "x$cache_file" != "x/dev/null"; then
+      AC_MSG_NOTICE([updating cache $cache_file])
+      if test ! -f "$cache_file" || test -h "$cache_file"; then
+	cat confcache >"$cache_file"
+      else
+dnl Try to update the cache file atomically even on different mount points;
+dnl at the same time, avoid filename limitation issues in the common case.
+        case $cache_file in #(
+        */* | ?:*)
+	  mv -f confcache "$cache_file"$$ &&
+	  mv -f "$cache_file"$$ "$cache_file" ;; #(
+        *)
+	  mv -f confcache "$cache_file" ;;
+	esac
+      fi
+    fi
+  else
+    AC_MSG_NOTICE([not updating unwritable cache $cache_file])
+  fi
+fi
+rm -f confcache[]dnl
+])# AC_CACHE_SAVE
+
+
+# AC_CACHE_VAL(CACHE-ID, COMMANDS-TO-SET-IT)
+# ------------------------------------------
+# The name of shell var CACHE-ID must contain `_cv_' in order to get saved.
+# Should be dnl'ed.  Try to catch common mistakes.
+m4_defun([AC_CACHE_VAL],
+[AS_LITERAL_WORD_IF([$1], [m4_if(m4_index(m4_quote($1), [_cv_]), [-1],
+			    [AC_DIAGNOSE([syntax],
+[$0($1, ...): suspicious cache-id, must contain _cv_ to be cached])])])dnl
+m4_if(m4_index([$2], [AC_DEFINE]), [-1], [],
+      [AC_DIAGNOSE([syntax],
+[$0($1, ...): suspicious presence of an AC_DEFINE in the second argument, ]dnl
+[where no actions should be taken])])dnl
+m4_if(m4_index([$2], [AC_SUBST]), [-1], [],
+      [AC_DIAGNOSE([syntax],
+[$0($1, ...): suspicious presence of an AC_SUBST in the second argument, ]dnl
+[where no actions should be taken])])dnl
+AS_VAR_SET_IF([$1],
+	      [_AS_ECHO_N([(cached) ])],
+	      [$2])
+])
+
+
+# AC_CACHE_CHECK(MESSAGE, CACHE-ID, COMMANDS)
+# -------------------------------------------
+# Do not call this macro with a dnl right behind.
+m4_defun([AC_CACHE_CHECK],
+[AC_MSG_CHECKING([$1])
+AC_CACHE_VAL([$2], [$3])dnl
+AS_LITERAL_WORD_IF([$2],
+	      [AC_MSG_RESULT([$$2])],
+	      [AS_VAR_COPY([ac_res], [$2])
+	       AC_MSG_RESULT([$ac_res])])dnl
+])
+
+# _AC_CACHE_CHECK_INT(MESSAGE, CACHE-ID, EXPRESSION,
+#                     [PROLOGUE = DEFAULT-INCLUDES], [IF-FAILS])
+# --------------------------------------------------------------
+AC_DEFUN([_AC_CACHE_CHECK_INT],
+[AC_CACHE_CHECK([$1], [$2],
+   [AC_COMPUTE_INT([$2], [$3], [$4], [$5])])
+])# _AC_CACHE_CHECK_INT
+
+
+
+## ---------------------- ##
+## Defining CPP symbols.  ##
+## ---------------------- ##
+
+
+# AC_DEFINE_TRACE_LITERAL(LITERAL-CPP-SYMBOL)
+# -------------------------------------------
+# Used by --trace to collect the list of AC_DEFINEd macros.
+m4_define([AC_DEFINE_TRACE_LITERAL],
+[m4_pattern_allow([^$1$])dnl
+AS_IDENTIFIER_IF([$1], [],
+  [m4_warn([syntax], [AC_DEFINE: not an identifier: $1])])dnl
+])# AC_DEFINE_TRACE_LITERAL
+
+
+# AC_DEFINE_TRACE(CPP-SYMBOL)
+# ---------------------------
+# This macro is a wrapper around AC_DEFINE_TRACE_LITERAL which filters
+# out non literal symbols.  CPP-SYMBOL must not include any parameters.
+m4_define([AC_DEFINE_TRACE],
+[AS_LITERAL_WORD_IF([$1], [AC_DEFINE_TRACE_LITERAL(_m4_expand([$1]))])])
+
+
+# AC_DEFINE(VARIABLE, [VALUE], [DESCRIPTION])
+# -------------------------------------------
+# Set VARIABLE to VALUE, verbatim, or 1.  Remember the value
+# and if VARIABLE is affected the same VALUE, do nothing, else
+# die.  The third argument is used by autoheader.
+m4_define([AC_DEFINE], [_AC_DEFINE_Q([_$0], $@)])
+
+# _AC_DEFINE(STRING)
+# ------------------
+# Append the pre-expanded STRING and a newline to confdefs.h, as if by
+# a quoted here-doc.
+m4_define([_AC_DEFINE],
+[AS_ECHO(["AS_ESCAPE([[$1]])"]) >>confdefs.h])
+
+
+# AC_DEFINE_UNQUOTED(VARIABLE, [VALUE], [DESCRIPTION])
+# ----------------------------------------------------
+# Similar, but perform shell substitutions $ ` \ once on VALUE, as
+# in an unquoted here-doc.
+m4_define([AC_DEFINE_UNQUOTED], [_AC_DEFINE_Q([_$0], $@)])
+
+# _AC_DEFINE_UNQUOTED(STRING)
+# ---------------------------
+# Append the pre-expanded STRING and a newline to confdefs.h, as if
+# with an unquoted here-doc, but avoiding a fork in the common case of
+# no backslash, no command substitution, no complex variable
+# substitution, and no quadrigraphs.
+m4_define([_AC_DEFINE_UNQUOTED],
+[m4_if(m4_bregexp([$1], [\\\|`\|\$(\|\${\|@]), [-1],
+       [AS_ECHO(["AS_ESCAPE([$1], [""])"]) >>confdefs.h],
+       [cat >>confdefs.h <<_ACEOF
+[$1]
+_ACEOF])])
+
+
+# _AC_DEFINE_Q(MACRO, VARIABLE, [VALUE], [DESCRIPTION])
+# -----------------------------------------------------
+# Internal function that performs common elements of AC_DEFINE{,_UNQUOTED}.
+# MACRO must take one argument, which is the fully expanded string to
+# append to confdefs.h as if by a possibly-quoted here-doc.
+#
+# m4_index is roughly 5 to 8 times faster than m4_bpatsubst, so we use
+# m4_format rather than regex to grab prefix up to first ().  AC_name
+# is defined with over-quotation, so that we can avoid m4_defn; this
+# is only safe because the name should not contain $.
+#
+# Guarantee a match in m4_index, so as to avoid a bug with precision
+# -1 in m4_format in older m4.
+m4_define([_AC_DEFINE_Q],
+[m4_pushdef([AC_name], m4_format([[[%.*s]]], m4_index([$2(], [(]), [$2]))]dnl
+[AC_DEFINE_TRACE(AC_name)]dnl
+[m4_cond([m4_index([$3], [
+])], [-1], [],
+	[m4_bregexp([[$3]], [[^\\]
+], [-])], [], [],
+	[m4_warn([syntax], [AC_DEFINE]m4_if([$1], [_AC_DEFINE], [],
+  [[_UNQUOTED]])[: `$3' is not a valid preprocessor define value])])]dnl
+[m4_ifval([$4], [AH_TEMPLATE(AC_name, [$4])
+])_m4_popdef([AC_name])]dnl
+[$1(m4_expand([[@%:@define] $2 ]m4_if([$#], 2, 1,
+  [$3], [], [/**/], [[$3]])))
+])
+
+
+
+## -------------------------- ##
+## Setting output variables.  ##
+## -------------------------- ##
+
+
+# AC_SUBST_TRACE(VARIABLE)
+# ------------------------
+# This macro is used with --trace to collect the list of substituted variables.
+m4_define([AC_SUBST_TRACE])
+
+
+# AC_SUBST(VARIABLE, [VALUE])
+# ---------------------------
+# Create an output variable from a shell VARIABLE.  If VALUE is given
+# assign it to VARIABLE.  Use `""' if you want to set VARIABLE to an
+# empty value, not an empty second argument.
+#
+m4_define([AC_SUBST],
+[AS_IDENTIFIER_IF([$1], [],
+  [m4_fatal([$0: `$1' is not a valid shell variable name])])]dnl
+[AC_SUBST_TRACE([$1])]dnl
+[m4_pattern_allow([^$1$])]dnl
+[m4_ifvaln([$2], [$1=$2])[]]dnl
+[m4_set_add([_AC_SUBST_VARS], [$1])])# AC_SUBST
+
+
+# AC_SUBST_FILE(VARIABLE)
+# -----------------------
+# Read the comments of the preceding macro.
+m4_define([AC_SUBST_FILE],
+[m4_pattern_allow([^$1$])dnl
+m4_append_uniq([_AC_SUBST_FILES], [$1], [
+])])
+
+
+
+## --------------------------------------- ##
+## Printing messages at autoconf runtime.  ##
+## --------------------------------------- ##
+
+# In fact, I think we should promote the use of m4_warn and m4_fatal
+# directly.  This will also avoid to some people to get it wrong
+# between AC_FATAL and AC_MSG_ERROR.
+
+
+# AC_DIAGNOSE(CATEGORY, MESSAGE)
+# AC_FATAL(MESSAGE, [EXIT-STATUS])
+# --------------------------------
+m4_define([AC_DIAGNOSE], [m4_warn($@)])
+m4_define([AC_FATAL],    [m4_fatal($@)])
+
+
+# AC_WARNING(MESSAGE)
+# -------------------
+# Report a MESSAGE to the user of autoconf if `-W' or `-W all' was
+# specified.
+m4_define([AC_WARNING],
+[AC_DIAGNOSE([syntax], [$1])])
+
+
+
+
+## ---------------------------------------- ##
+## Printing messages at configure runtime.  ##
+## ---------------------------------------- ##
+
+
+# AC_MSG_CHECKING(FEATURE)
+# ------------------------
+m4_define([AC_MSG_CHECKING],
+[{ _AS_ECHO_LOG([checking $1])
+_AS_ECHO_N([checking $1... ]); }dnl
+])
+
+
+# AC_MSG_RESULT(RESULT)
+# ---------------------
+m4_define([AC_MSG_RESULT],
+[{ _AS_ECHO_LOG([result: $1])
+_AS_ECHO([$1]); }dnl
+])
+
+
+# AC_MSG_WARN(PROBLEM)
+# AC_MSG_NOTICE(STRING)
+# AC_MSG_ERROR(ERROR, [EXIT-STATUS = 1])
+# AC_MSG_FAILURE(ERROR, [EXIT-STATUS = 1])
+# ----------------------------------------
+m4_copy([AS_WARN],    [AC_MSG_WARN])
+m4_copy([AS_MESSAGE], [AC_MSG_NOTICE])
+m4_copy([AS_ERROR],   [AC_MSG_ERROR])
+m4_define([AC_MSG_FAILURE],
+[{ AS_MESSAGE([error: in `$ac_pwd':], 2)
+AC_MSG_ERROR([$1
+See `config.log' for more details], [$2]); }])
+
+
+# _AC_MSG_LOG_CONFTEST
+# --------------------
+m4_define([_AC_MSG_LOG_CONFTEST],
+[AS_ECHO(["$as_me: failed program was:"]) >&AS_MESSAGE_LOG_FD
+sed 's/^/| /' conftest.$ac_ext >&AS_MESSAGE_LOG_FD
+])
+
+
+# AU::AC_CHECKING(FEATURE)
+# ------------------------
+AU_DEFUN([AC_CHECKING],
+[AS_MESSAGE([checking $1...])])
+
+
+# AU::AC_MSG_RESULT_UNQUOTED(RESULT)
+# ----------------------------------
+# No escaping, so it performed also backtick substitution.
+AU_DEFUN([AC_MSG_RESULT_UNQUOTED],
+[_AS_ECHO_UNQUOTED([$as_me:${as_lineno-$LINENO}: result: $1], AS_MESSAGE_LOG_FD)
+_AS_ECHO_UNQUOTED([$1])[]dnl
+])
+
+
+# AU::AC_VERBOSE(STRING)
+# ----------------------
+AU_ALIAS([AC_VERBOSE], [AC_MSG_RESULT])
+
+
+
+
+
+
+## ---------------------------- ##
+## Compiler-running mechanics.  ##
+## ---------------------------- ##
+
+
+# _AC_RUN_LOG(COMMAND, LOG-COMMANDS)
+# ----------------------------------
+# Eval COMMAND, save the exit status in ac_status, and log it.  The return
+# code is 0 if COMMAND succeeded, so that it can be used directly in AS_IF
+# constructs.
+AC_DEFUN([_AC_RUN_LOG],
+[{ { $2; } >&AS_MESSAGE_LOG_FD
+  ($1) 2>&AS_MESSAGE_LOG_FD
+  ac_status=$?
+  _AS_ECHO_LOG([\$? = $ac_status])
+  test $ac_status = 0; }])
+
+
+# _AC_RUN_LOG_STDERR(COMMAND, LOG-COMMANDS)
+# -----------------------------------------
+# Run COMMAND, save its stderr into conftest.err, save the exit status
+# in ac_status, and log it.  Don't forget to clean up conftest.err after
+# use.
+# Note that when tracing, most shells will leave the traces in stderr
+# starting with "+": that's what this macro tries to address.
+# The return code is 0 if COMMAND succeeded, so that it can be used directly
+# in AS_IF constructs.
+AC_DEFUN([_AC_RUN_LOG_STDERR],
+[{ { $2; } >&AS_MESSAGE_LOG_FD
+  ($1) 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&AS_MESSAGE_LOG_FD
+    mv -f conftest.er1 conftest.err
+  fi
+  _AS_ECHO_LOG([\$? = $ac_status])
+  test $ac_status = 0; }])
+
+
+# _AC_RUN_LOG_LIMIT(COMMAND, LOG-COMMANDS, [LINES])
+# -------------------------------------------------
+# Like _AC_RUN_LOG, but only log LINES lines from stderr,
+# defaulting to 10 lines.
+AC_DEFUN([_AC_RUN_LOG_LIMIT],
+[{ { $2; } >&AS_MESSAGE_LOG_FD
+  ($1) 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    sed 'm4_default([$3], [10])a\
+... rest of stderr output deleted ...
+         m4_default([$3], [10])q' conftest.err >conftest.er1
+    cat conftest.er1 >&AS_MESSAGE_LOG_FD
+  fi
+  rm -f conftest.er1 conftest.err
+  _AS_ECHO_LOG([\$? = $ac_status])
+  test $ac_status = 0; }])
+
+
+# _AC_DO_ECHO(COMMAND)
+# --------------------
+# Echo COMMAND.  This is designed to be used just before evaluating COMMAND.
+AC_DEFUN([_AC_DO_ECHO],
+[m4_if([$1], [$ac_try], [], [ac_try="$1"
+])]dnl
+dnl If the string contains '\"', '`', or '\\', then just echo it rather
+dnl than expanding it.  This is a hack, but it is safer, while also
+dnl typically expanding simple substrings like '$CC', which is what we want.
+dnl
+dnl Much of this macro body is quoted, to work around misuses like
+dnl `AC_CHECK_FUNC(sigblock, , AC_CHECK_LIB(bsd, sigblock))',
+dnl which underquotes the 3rd arg and would misbehave if we didn't quote here.
+dnl The "(($ac_try" instead of $ac_try avoids problems with even-worse
+dnl underquoting misuses, such as
+dnl `AC_CHECK_FUNC(foo, , AC_CHECK_LIB(a, foo, , AC_CHECK_LIB(b, foo)))'.
+dnl We normally wouldn't bother with this kind of workaround for invalid code
+dnl but this change was put in just before Autoconf 2.60 and we wanted to
+dnl minimize the integration hassle.
+[[case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""]
+AS_ECHO(["$ac_try_echo"])])
+
+# _AC_DO(COMMAND)
+# ---------------
+# Eval COMMAND, save the exit status in ac_status, and log it.
+# For internal use only.
+AC_DEFUN([_AC_DO],
+[_AC_RUN_LOG([eval "$1"],
+	     [_AC_DO_ECHO([$1])])])
+
+
+# _AC_DO_STDERR(COMMAND)
+# ----------------------
+# Like _AC_RUN_LOG_STDERR, but eval (instead of running) COMMAND.
+AC_DEFUN([_AC_DO_STDERR],
+[_AC_RUN_LOG_STDERR([eval "$1"],
+		    [_AC_DO_ECHO([$1])])])
+
+
+# _AC_DO_VAR(VARIABLE)
+# --------------------
+# Evaluate "$VARIABLE", which should be a valid shell command.
+# The purpose of this macro is to write "configure:123: command line"
+# into config.log for every test run.
+AC_DEFUN([_AC_DO_VAR],
+[_AC_DO([$$1])])
+
+
+# _AC_DO_TOKENS(COMMAND)
+# ----------------------
+# Like _AC_DO_VAR, but execute COMMAND instead, where COMMAND is a series of
+# tokens of the shell command language.
+AC_DEFUN([_AC_DO_TOKENS],
+[{ ac_try='$1'
+  _AC_DO([$ac_try]); }])
+
+
+# _AC_DO_LIMIT(COMMAND, [LINES])
+# ------------------------------
+# Like _AC_DO, but limit the amount of stderr lines logged to LINES.
+# For internal use only.
+AC_DEFUN([_AC_DO_LIMIT],
+[_AC_RUN_LOG_LIMIT([eval "$1"],
+		   [_AC_DO_ECHO([$1])], [$2])])
+
+
+# _AC_EVAL(COMMAND)
+# -----------------
+# Eval COMMAND, save the exit status in ac_status, and log it.
+# Unlike _AC_DO, this macro mishandles quoted arguments in some cases.
+# It is present only for backward compatibility with previous Autoconf versions.
+AC_DEFUN([_AC_EVAL],
+[_AC_RUN_LOG([eval $1],
+	     [eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$1\""])])
+
+
+# _AC_EVAL_STDERR(COMMAND)
+# ------------------------
+# Like _AC_RUN_LOG_STDERR, but eval (instead of running) COMMAND.
+# Unlike _AC_DO_STDERR, this macro mishandles quoted arguments in some cases.
+# It is present only for backward compatibility with previous Autoconf versions.
+AC_DEFUN([_AC_EVAL_STDERR],
+[_AC_RUN_LOG_STDERR([eval $1],
+		    [eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$1\""])])
+
+
+# AC_TRY_EVAL(VARIABLE)
+# ---------------------
+# Evaluate $VARIABLE, which should be a valid shell command.
+# The purpose of this macro is to write "configure:123: command line"
+# into config.log for every test run.
+#
+# The AC_TRY_EVAL and AC_TRY_COMMAND macros are dangerous and
+# undocumented, and should not be used.
+# They may be removed or their API changed in a future release.
+# Autoconf itself no longer uses these two macros; they are present
+# only for backward compatibility with previous versions of Autoconf.
+# Not every shell command will work due to problems with eval
+# and quoting, and the rules for exactly what does work are tricky.
+# Worse, due to double-expansion during evaluation, arbitrary unintended
+# shell commands could be executed in some situations.
+AC_DEFUN([AC_TRY_EVAL],
+[_AC_EVAL([$$1])])
+
+
+# AC_TRY_COMMAND(COMMAND)
+# -----------------------
+# Like AC_TRY_EVAL, but execute COMMAND instead, where COMMAND is a series of
+# tokens of the shell command language.
+# This macro should not be used; see the comments under AC_TRY_EVAL for why.
+AC_DEFUN([AC_TRY_COMMAND],
+[{ ac_try='$1'
+  _AC_EVAL([$ac_try]); }])
+
+
+# AC_RUN_LOG(COMMAND)
+# -------------------
+AC_DEFUN([AC_RUN_LOG],
+[_AC_RUN_LOG([$1],
+	     [AS_ECHO(["$as_me:${as_lineno-$LINENO}: AS_ESCAPE([$1])"])])])
+
+
+
+
+## ------------------------ ##
+## Examining declarations.  ##
+## ------------------------ ##
+
+
+# _AC_PREPROC_IFELSE_BODY
+# -----------------------
+# Shell function body for _AC_PREPROC_IFELSE.
+m4_define([_AC_PREPROC_IFELSE_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AS_IF([_AC_DO_STDERR([$ac_cpp conftest.$ac_ext]) > conftest.i && {
+	 test -z "$ac_[]_AC_LANG_ABBREV[]_preproc_warn_flag$ac_[]_AC_LANG_ABBREV[]_werror_flag" ||
+	 test ! -s conftest.err
+       }],
+    [ac_retval=0],
+    [_AC_MSG_LOG_CONFTEST
+    ac_retval=1])
+  AS_LINENO_POP
+  AS_SET_STATUS([$ac_retval])
+])# _AC_PREPROC_IFELSE_BODY
+
+
+# _AC_PREPROC_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ----------------------------------------------------------------
+# Try to preprocess PROGRAM.
+#
+# This macro can be used during the selection of a preprocessor.
+# eval is necessary to expand ac_cpp.
+AC_DEFUN([_AC_PREPROC_IFELSE],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_try_cpp],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_try_cpp], [LINENO],
+    [Try to preprocess conftest.$ac_ext, and return whether this succeeded.])],
+  [$0_BODY])]dnl
+[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])]dnl
+[AS_IF([ac_fn_[]_AC_LANG_ABBREV[]_try_cpp "$LINENO"], [$2], [$3])
+rm -f conftest.err conftest.i[]m4_ifval([$1], [ conftest.$ac_ext])[]dnl
+])# _AC_PREPROC_IFELSE
+
+# AC_PREPROC_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ---------------------------------------------------------------
+# Try to preprocess PROGRAM.  Requires that the preprocessor for the
+# current language was checked for, hence do not use this macro in macros
+# looking for a preprocessor.
+AC_DEFUN([AC_PREPROC_IFELSE],
+[AC_LANG_PREPROC_REQUIRE()dnl
+_AC_PREPROC_IFELSE($@)])
+
+
+# AC_TRY_CPP(INCLUDES, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ---------------------------------------------------------
+# AC_TRY_CPP is used to check whether particular header files exist.
+# (But it actually tests whether INCLUDES produces no CPP errors.)
+#
+# INCLUDES are not defaulted and are double quoted.
+AU_DEFUN([AC_TRY_CPP],
+[AC_PREPROC_IFELSE([AC_LANG_SOURCE([[$1]])], [$2], [$3])])
+
+
+# AC_EGREP_CPP(PATTERN, PROGRAM,
+#              [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ------------------------------------------------------
+# Because this macro is used by AC_PROG_GCC_TRADITIONAL, which must
+# come early, it is not included in AC_BEFORE checks.
+AC_DEFUN([AC_EGREP_CPP],
+[AC_LANG_PREPROC_REQUIRE()dnl
+AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_LANG_CONFTEST([AC_LANG_SOURCE([[$2]])])
+AS_IF([dnl eval is necessary to expand ac_cpp.
+dnl Ultrix and Pyramid sh refuse to redirect output of eval, so use subshell.
+(eval "$ac_cpp conftest.$ac_ext") 2>&AS_MESSAGE_LOG_FD |
+dnl Quote $1 to prevent m4 from eating character classes
+  $EGREP "[$1]" >/dev/null 2>&1],
+  [$3],
+  [$4])
+rm -f conftest*
+])# AC_EGREP_CPP
+
+
+# AC_EGREP_HEADER(PATTERN, HEADER-FILE,
+#                 [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# ---------------------------------------------------------
+AC_DEFUN([AC_EGREP_HEADER],
+[AC_EGREP_CPP([$1],
+[#include <$2>
+], [$3], [$4])])
+
+
+
+
+## ------------------ ##
+## Examining syntax.  ##
+## ------------------ ##
+
+# _AC_COMPILE_IFELSE_BODY
+# -----------------------
+# Shell function body for _AC_COMPILE_IFELSE.
+m4_define([_AC_COMPILE_IFELSE_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  rm -f conftest.$ac_objext
+  AS_IF([_AC_DO_STDERR($ac_compile) && {
+	 test -z "$ac_[]_AC_LANG_ABBREV[]_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest.$ac_objext],
+      [ac_retval=0],
+      [_AC_MSG_LOG_CONFTEST
+	ac_retval=1])
+  AS_LINENO_POP
+  AS_SET_STATUS([$ac_retval])
+])# _AC_COMPILE_IFELSE_BODY
+
+
+# _AC_COMPILE_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ----------------------------------------------------------------
+# Try to compile PROGRAM.
+# This macro can be used during the selection of a compiler.
+AC_DEFUN([_AC_COMPILE_IFELSE],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_try_compile],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_try_compile], [LINENO],
+    [Try to compile conftest.$ac_ext, and return whether this succeeded.])],
+  [$0_BODY])]dnl
+[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])]dnl
+[AS_IF([ac_fn_[]_AC_LANG_ABBREV[]_try_compile "$LINENO"], [$2], [$3])
+rm -f core conftest.err conftest.$ac_objext[]m4_ifval([$1], [ conftest.$ac_ext])[]dnl
+])# _AC_COMPILE_IFELSE
+
+
+# AC_COMPILE_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ---------------------------------------------------------------
+# Try to compile PROGRAM.  Requires that the compiler for the current
+# language was checked for, hence do not use this macro in macros looking
+# for a compiler.
+AC_DEFUN([AC_COMPILE_IFELSE],
+[AC_LANG_COMPILER_REQUIRE()dnl
+_AC_COMPILE_IFELSE($@)])
+
+
+# AC_TRY_COMPILE(INCLUDES, FUNCTION-BODY,
+#                [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ---------------------------------------------------
+AU_DEFUN([AC_TRY_COMPILE],
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$1]], [[$2]])], [$3], [$4])])
+
+
+
+## --------------------- ##
+## Examining libraries.  ##
+## --------------------- ##
+
+
+# _AC_LINK_IFELSE_BODY
+# --------------------
+# Shell function body for _AC_LINK_IFELSE.
+m4_define([_AC_LINK_IFELSE_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  rm -f conftest.$ac_objext conftest$ac_exeext
+  AS_IF([_AC_DO_STDERR($ac_link) && {
+	 test -z "$ac_[]_AC_LANG_ABBREV[]_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest$ac_exeext && {
+	 test "$cross_compiling" = yes ||
+	 AS_TEST_X([conftest$ac_exeext])
+       }],
+      [ac_retval=0],
+      [_AC_MSG_LOG_CONFTEST
+	ac_retval=1])
+  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+  # interfere with the next link command; also delete a directory that is
+  # left behind by Apple's compiler.  We do this before executing the actions.
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  AS_LINENO_POP
+  AS_SET_STATUS([$ac_retval])
+])# _AC_LINK_IFELSE_BODY
+
+
+# _AC_LINK_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# -------------------------------------------------------------
+# Try to link PROGRAM.
+# This macro can be used during the selection of a compiler.
+#
+# Test that resulting file is executable; see the problem reported by mwoehlke
+# in <http://lists.gnu.org/archive/html/bug-coreutils/2006-10/msg00048.html>.
+# But skip the test when cross-compiling, to prevent problems like the one
+# reported by Chris Johns in
+# <http://lists.gnu.org/archive/html/autoconf/2007-03/msg00085.html>.
+#
+AC_DEFUN([_AC_LINK_IFELSE],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_try_link],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_try_link], [LINENO],
+    [Try to link conftest.$ac_ext, and return whether this succeeded.])],
+  [$0_BODY])]dnl
+[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])]dnl
+[AS_IF([ac_fn_[]_AC_LANG_ABBREV[]_try_link "$LINENO"], [$2], [$3])
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext[]m4_ifval([$1], [ conftest.$ac_ext])[]dnl
+])# _AC_LINK_IFELSE
+
+
+# AC_LINK_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ------------------------------------------------------------
+# Try to link PROGRAM.  Requires that the compiler for the current
+# language was checked for, hence do not use this macro in macros looking
+# for a compiler.
+AC_DEFUN([AC_LINK_IFELSE],
+[AC_LANG_COMPILER_REQUIRE()dnl
+_AC_LINK_IFELSE($@)])
+
+
+# AC_TRY_LINK(INCLUDES, FUNCTION-BODY,
+#             [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ------------------------------------------------
+# Contrarily to AC_LINK_IFELSE, this macro double quote its first two args.
+AU_DEFUN([AC_TRY_LINK],
+[AC_LINK_IFELSE([AC_LANG_PROGRAM([[$1]], [[$2]])], [$3], [$4])])
+
+
+# AC_COMPILE_CHECK(ECHO-TEXT, INCLUDES, FUNCTION-BODY,
+#                  ACTION-IF-TRUE, [ACTION-IF-FALSE])
+# ---------------------------------------------------
+AU_DEFUN([AC_COMPILE_CHECK],
+[m4_ifvaln([$1], [AC_MSG_CHECKING([for $1])])dnl
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[$2]], [[$3]])], [$4], [$5])])
+
+
+
+
+## ------------------------------- ##
+## Checking for runtime features.  ##
+## ------------------------------- ##
+
+
+# _AC_RUN_IFELSE_BODY
+# -------------------
+# Shell function body for _AC_RUN_IFELSE.
+m4_define([_AC_RUN_IFELSE_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AS_IF([_AC_DO_VAR(ac_link) && _AC_DO_TOKENS(./conftest$ac_exeext)],
+      [ac_retval=0],
+      [AS_ECHO(["$as_me: program exited with status $ac_status"]) >&AS_MESSAGE_LOG_FD
+       _AC_MSG_LOG_CONFTEST
+       ac_retval=$ac_status])
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  AS_LINENO_POP
+  AS_SET_STATUS([$ac_retval])
+])# _AC_RUN_IFELSE_BODY
+
+
+# _AC_RUN_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
+# ------------------------------------------------------------
+# Compile, link, and run.
+# This macro can be used during the selection of a compiler.
+# We also remove conftest.o as if the compilation fails, some compilers
+# don't remove it.  We remove gmon.out and bb.out, which may be
+# created during the run if the program is built with profiling support.
+AC_DEFUN([_AC_RUN_IFELSE],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_try_run],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_try_run], [LINENO],
+    [Try to link conftest.$ac_ext, and return whether this succeeded.
+     Assumes that executables *can* be run.])],
+  [$0_BODY])]dnl
+[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])]dnl
+[AS_IF([ac_fn_[]_AC_LANG_ABBREV[]_try_run "$LINENO"], [$2], [$3])
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam[]m4_ifval([$1], [ conftest.$ac_ext])[]dnl
+])# _AC_RUN_IFELSE
+
+# AC_RUN_IFELSE(PROGRAM,
+#               [ACTION-IF-TRUE], [ACTION-IF-FALSE],
+#               [ACTION-IF-CROSS-COMPILING = RUNTIME-ERROR])
+# ----------------------------------------------------------
+# Compile, link, and run. Requires that the compiler for the current
+# language was checked for, hence do not use this macro in macros looking
+# for a compiler.
+AC_DEFUN([AC_RUN_IFELSE],
+[AC_LANG_COMPILER_REQUIRE()dnl
+m4_ifval([$4], [],
+	 [AC_DIAGNOSE([cross],
+		     [$0 called without default to allow cross compiling])])dnl
+AS_IF([test "$cross_compiling" = yes],
+  [m4_default([$4],
+	   [AC_MSG_FAILURE([cannot run test program while cross compiling])])],
+  [_AC_RUN_IFELSE($@)])
+])
+
+
+# AC_TRY_RUN(PROGRAM,
+#            [ACTION-IF-TRUE], [ACTION-IF-FALSE],
+#            [ACTION-IF-CROSS-COMPILING = RUNTIME-ERROR])
+# -------------------------------------------------------
+AU_DEFUN([AC_TRY_RUN],
+[AC_RUN_IFELSE([AC_LANG_SOURCE([[$1]])], [$2], [$3], [$4])])
+
+
+
+## ------------------------------------- ##
+## Checking for the existence of files.  ##
+## ------------------------------------- ##
+
+# AC_CHECK_FILE(FILE, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# -------------------------------------------------------------
+#
+# Check for the existence of FILE.
+AC_DEFUN([AC_CHECK_FILE],
+[AC_DIAGNOSE([cross],
+	     [cannot check for file existence when cross compiling])dnl
+AS_VAR_PUSHDEF([ac_File], [ac_cv_file_$1])dnl
+AC_CACHE_CHECK([for $1], [ac_File],
+[test "$cross_compiling" = yes &&
+  AC_MSG_ERROR([cannot check for file existence when cross compiling])
+if test -r "$1"; then
+  AS_VAR_SET([ac_File], [yes])
+else
+  AS_VAR_SET([ac_File], [no])
+fi])
+AS_VAR_IF([ac_File], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_File])dnl
+])# AC_CHECK_FILE
+
+
+# _AC_CHECK_FILES(FILE)
+# ---------------------
+# Helper to AC_CHECK_FILES, which generates two of the three arguments
+# to AC_CHECK_FILE based on FILE.
+m4_define([_AC_CHECK_FILES],
+[[$1], [AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_$1]), [1],
+  [Define to 1 if you have the file `$1'.])]])
+
+
+# AC_CHECK_FILES(FILE..., [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# -----------------------------------------------------------------
+# For each word in the whitespace-separated FILE list, perform either
+# ACTION-IF-FOUND or ACTION-IF-NOT-FOUND.  For files that exist, also
+# provide the preprocessor variable HAVE_FILE.
+AC_DEFUN([AC_CHECK_FILES],
+[m4_map_args_w([$1], [AC_CHECK_FILE(_$0(], [)[$2], [$3])])])
+
+
+## ------------------------------- ##
+## Checking for declared symbols.  ##
+## ------------------------------- ##
+
+
+# _AC_CHECK_DECL_BODY
+# -------------------
+# Shell function body for AC_CHECK_DECL.
+m4_define([_AC_CHECK_DECL_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  [as_decl_name=`echo $][2|sed 's/ *(.*//'`]
+  [as_decl_use=`echo $][2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'`]
+  AC_CACHE_CHECK([whether $as_decl_name is declared], [$[]3],
+  [AC_COMPILE_IFELSE([AC_LANG_PROGRAM([$[]4],
+[@%:@ifndef $[]as_decl_name
+@%:@ifdef __cplusplus
+  (void) $[]as_decl_use;
+@%:@else
+  (void) $[]as_decl_name;
+@%:@endif
+@%:@endif
+])],
+		   [AS_VAR_SET([$[]3], [yes])],
+		   [AS_VAR_SET([$[]3], [no])])])
+  AS_LINENO_POP
+])# _AC_CHECK_DECL_BODY
+
+# AC_CHECK_DECL(SYMBOL,
+#               [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#               [INCLUDES = DEFAULT-INCLUDES])
+# -------------------------------------------------------
+# Check whether SYMBOL (a function, variable, or constant) is declared.
+AC_DEFUN([AC_CHECK_DECL],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_decl],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_decl],
+    [LINENO SYMBOL VAR INCLUDES],
+    [Tests whether SYMBOL is declared in INCLUDES, setting cache variable
+     VAR accordingly.])],
+  [_$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_Symbol], [ac_cv_have_decl_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_decl ]dnl
+["$LINENO" "$1" "ac_Symbol" "AS_ESCAPE([AC_INCLUDES_DEFAULT([$4])], [""])"
+AS_VAR_IF([ac_Symbol], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Symbol])dnl
+])# AC_CHECK_DECL
+
+
+# _AC_CHECK_DECLS(SYMBOL, ACTION-IF_FOUND, ACTION-IF-NOT-FOUND,
+#                 INCLUDES)
+# -------------------------------------------------------------
+# Helper to AC_CHECK_DECLS, which generates the check for a single
+# SYMBOL with INCLUDES, performs the AC_DEFINE, then expands
+# ACTION-IF-FOUND or ACTION-IF-NOT-FOUND.
+m4_define([_AC_CHECK_DECLS],
+[AC_CHECK_DECL([$1], [ac_have_decl=1], [ac_have_decl=0], [$4])]dnl
+[AC_DEFINE_UNQUOTED(AS_TR_CPP(m4_bpatsubst(HAVE_DECL_[$1],[ *(.*])),
+  [$ac_have_decl],
+  [Define to 1 if you have the declaration of `$1',
+   and to 0 if you don't.])]dnl
+[m4_ifvaln([$2$3], [AS_IF([test $ac_have_decl = 1], [$2], [$3])])])
+
+# AC_CHECK_DECLS(SYMBOLS,
+#                [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#                [INCLUDES = DEFAULT-INCLUDES])
+# --------------------------------------------------------
+# Defines HAVE_DECL_SYMBOL to 1 if declared, 0 otherwise.  See the
+# documentation for a detailed explanation of this difference with
+# other AC_CHECK_*S macros.  SYMBOLS is an m4 list.
+AC_DEFUN([AC_CHECK_DECLS],
+[m4_map_args_sep([_$0(], [, [$2], [$3], [$4])], [], $1)])
+
+
+# _AC_CHECK_DECL_ONCE(SYMBOL)
+# ---------------------------
+# Check for a single SYMBOL once.
+m4_define([_AC_CHECK_DECL_ONCE],
+[AC_DEFUN([_AC_Check_Decl_$1], [_AC_CHECK_DECLS([$1])])]dnl
+[AC_REQUIRE([_AC_Check_Decl_$1])])
+
+# AC_CHECK_DECLS_ONCE(SYMBOLS)
+# ----------------------------
+# Like AC_CHECK_DECLS(SYMBOLS), but do it at most once.
+AC_DEFUN([AC_CHECK_DECLS_ONCE],
+[m4_map_args_sep([_AC_CHECK_DECL_ONCE(], [)], [], $1)])
+
+
+
+## ---------------------------------- ##
+## Replacement of library functions.  ##
+## ---------------------------------- ##
+
+
+# AC_CONFIG_LIBOBJ_DIR(DIRNAME)
+# -----------------------------
+# Announce LIBOBJ replacement files are in $top_srcdir/DIRNAME.
+AC_DEFUN_ONCE([AC_CONFIG_LIBOBJ_DIR],
+[m4_divert_text([DEFAULTS], [ac_config_libobj_dir=$1])])
+
+
+# AC_LIBSOURCE(FILE-NAME)
+# -----------------------
+# Announce we might need the file `FILE-NAME'.
+m4_define([AC_LIBSOURCE], [])
+
+
+# AC_LIBSOURCES([FILE-NAME1, ...])
+# --------------------------------
+# Announce we might need these files.
+AC_DEFUN([AC_LIBSOURCES],
+[m4_map_args([AC_LIBSOURCE], $1)])
+
+
+# _AC_LIBOBJ(FILE-NAME-NOEXT, ACTION-IF-INDIR)
+# --------------------------------------------
+# We need `FILE-NAME-NOEXT.o', save this into `LIBOBJS'.
+m4_define([_AC_LIBOBJ],
+[case " $LIB@&t@OBJS " in
+  *" $1.$ac_objext "* ) ;;
+  *) AC_SUBST([LIB@&t@OBJS], ["$LIB@&t@OBJS $1.$ac_objext"]) ;;
+esac
+])
+
+
+# AC_LIBOBJ(FILE-NAME-NOEXT)
+# --------------------------
+# We need `FILE-NAME-NOEXT.o', save this into `LIBOBJS'.
+AC_DEFUN([AC_LIBOBJ],
+[_AC_LIBOBJ([$1])]dnl
+[AS_LITERAL_WORD_IF([$1], [AC_LIBSOURCE([$1.c])],
+  [AC_DIAGNOSE([syntax], [$0($1): you should use literals])])])
+
+
+# _AC_LIBOBJS_NORMALIZE
+# ---------------------
+# Clean up LIBOBJS and LTLIBOBJS so that they work with 1. ac_objext,
+# 2. Automake's ANSI2KNR, 3. Libtool, 4. combination of the three.
+# Used with AC_CONFIG_COMMANDS_PRE.
+AC_DEFUN([_AC_LIBOBJS_NORMALIZE],
+[ac_libobjs=
+ac_ltlibobjs=
+m4_ifndef([AM_C_PROTOTYPES], [U=
+])dnl
+for ac_i in : $LIB@&t@OBJS; do test "x$ac_i" = x: && continue
+  # 1. Remove the extension, and $U if already installed.
+  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
+  ac_i=`AS_ECHO(["$ac_i"]) | sed "$ac_script"`
+  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
+  #    will be set to the directory where LIBOBJS objects are built.
+  AS_VAR_APPEND([ac_libobjs], [" \${LIBOBJDIR}$ac_i\$U.$ac_objext"])
+  AS_VAR_APPEND([ac_ltlibobjs], [" \${LIBOBJDIR}$ac_i"'$U.lo'])
+done
+AC_SUBST([LIB@&t@OBJS], [$ac_libobjs])
+AC_SUBST([LTLIBOBJS], [$ac_ltlibobjs])
+])
+
+
+## ----------------------------------- ##
+## Checking compiler characteristics.  ##
+## ----------------------------------- ##
+
+
+# _AC_COMPUTE_INT_COMPILE(EXPRESSION, VARIABLE, PROLOGUE, [IF-SUCCESS],
+#                         [IF-FAILURE])
+# ---------------------------------------------------------------------
+# Compute the integer EXPRESSION and store the result in the VARIABLE.
+# Works OK if cross compiling, but assumes twos-complement arithmetic.
+m4_define([_AC_COMPUTE_INT_COMPILE],
+[# Depending upon the size, compute the lo and hi bounds.
+_AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([$3], [($1) >= 0])],
+ [ac_lo=0 ac_mid=0
+  while :; do
+    _AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([$3], [($1) <= $ac_mid])],
+		       [ac_hi=$ac_mid; break],
+		       [AS_VAR_ARITH([ac_lo], [$ac_mid + 1])
+			if test $ac_lo -le $ac_mid; then
+			  ac_lo= ac_hi=
+			  break
+			fi
+			AS_VAR_ARITH([ac_mid], [2 '*' $ac_mid + 1])])
+  done],
+[AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([$3], [($1) < 0])],
+ [ac_hi=-1 ac_mid=-1
+  while :; do
+    _AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([$3], [($1) >= $ac_mid])],
+		       [ac_lo=$ac_mid; break],
+		       [AS_VAR_ARITH([ac_hi], ['(' $ac_mid ')' - 1])
+			if test $ac_mid -le $ac_hi; then
+			  ac_lo= ac_hi=
+			  break
+			fi
+			AS_VAR_ARITH([ac_mid], [2 '*' $ac_mid])])
+  done],
+ [ac_lo= ac_hi=])])
+# Binary search between lo and hi bounds.
+while test "x$ac_lo" != "x$ac_hi"; do
+  AS_VAR_ARITH([ac_mid], ['(' $ac_hi - $ac_lo ')' / 2 + $ac_lo])
+  _AC_COMPILE_IFELSE([AC_LANG_BOOL_COMPILE_TRY([$3], [($1) <= $ac_mid])],
+		     [ac_hi=$ac_mid],
+		     [AS_VAR_ARITH([ac_lo], ['(' $ac_mid ')' + 1])])
+done
+case $ac_lo in @%:@((
+?*) AS_VAR_SET([$2], [$ac_lo]); $4 ;;
+'') $5 ;;
+esac[]dnl
+])# _AC_COMPUTE_INT_COMPILE
+
+
+# _AC_COMPUTE_INT_RUN(EXPRESSION, VARIABLE, PROLOGUE, [IF-SUCCESS],
+#                     [IF-FAILURE])
+# -----------------------------------------------------------------
+# Store the evaluation of the integer EXPRESSION in VARIABLE.
+#
+# AC_LANG_INT_SAVE intentionally does not end the file in a newline, so
+# we must add one to make it a text file before passing it to read.
+m4_define([_AC_COMPUTE_INT_RUN],
+[_AC_RUN_IFELSE([AC_LANG_INT_SAVE([$3], [$1])],
+		[echo >>conftest.val; read $2 <conftest.val; $4], [$5])
+rm -f conftest.val
+])# _AC_COMPUTE_INT_RUN
+
+
+# _AC_COMPUTE_INT_BODY
+# --------------------
+# Shell function body for AC_COMPUTE_INT.
+m4_define([_AC_COMPUTE_INT_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  if test "$cross_compiling" = yes; then
+    _AC_COMPUTE_INT_COMPILE([$[]2], [$[]3], [$[]4],
+			    [ac_retval=0], [ac_retval=1])
+  else
+    _AC_COMPUTE_INT_RUN([$[]2], [$[]3], [$[]4],
+			[ac_retval=0], [ac_retval=1])
+  fi
+  AS_LINENO_POP
+  AS_SET_STATUS([$ac_retval])
+])# _AC_COMPUTE_INT_BODY
+
+# AC_COMPUTE_INT(VARIABLE, EXPRESSION, PROLOGUE, [IF-FAILS])
+# ----------------------------------------------------------
+# Store into the shell variable VARIABLE the value of the integer C expression
+# EXPRESSION.  The value should fit in an initializer in a C variable of type
+# `signed long'.  If no PROLOGUE are specified, the default includes are used.
+# IF-FAILS is evaluated if the value cannot be found (which includes the
+# case of cross-compilation, if EXPRESSION is not computable at compile-time.
+AC_DEFUN([AC_COMPUTE_INT],
+[AC_LANG_COMPILER_REQUIRE()]dnl
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_compute_int],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_compute_int],
+    [LINENO EXPR VAR INCLUDES],
+    [Tries to find the compile-time value of EXPR in a program that includes
+     INCLUDES, setting VAR accordingly.  Returns whether the value could
+     be computed])],
+    [_$0_BODY])]dnl
+[AS_IF([ac_fn_[]_AC_LANG_ABBREV[]_compute_int "$LINENO" "$2" "$1" ]dnl
+       ["AS_ESCAPE([$3], [""])"],
+       [], [$4])
+])# AC_COMPUTE_INT
+
+# _AC_COMPUTE_INT(EXPRESSION, VARIABLE, PROLOGUE, [IF-FAILS])
+# -----------------------------------------------------------
+# FIXME: this private interface was used by several packages.
+# Give them time to transition to AC_COMPUTE_INT and then delete this one.
+AC_DEFUN([_AC_COMPUTE_INT],
+[AC_COMPUTE_INT([$2], [$1], [$3], [$4])
+AC_DIAGNOSE([obsolete],
+[The macro `_AC_COMPUTE_INT' is obsolete and will be deleted in a
+future version or Autoconf.  Hence, it is suggested that you use
+instead the public AC_COMPUTE_INT macro.  Note that the arguments are
+slightly different between the two.])dnl
+])# _AC_COMPUTE_INT
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/headers.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/headers.m4
new file mode 100644
index 0000000..1bc0feb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/headers.m4
@@ -0,0 +1,886 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# Checking for headers.
+#
+# Copyright (C) 1988, 1999, 2000, 2001, 2002, 2003, 2004, 2006, 2008,
+# 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# Table of contents
+#
+# 1. Generic tests for headers
+# 2. Default includes
+# 3. Headers to tests with AC_CHECK_HEADERS
+# 4. Tests for specific headers
+
+
+## ------------------------------ ##
+## 1. Generic tests for headers.  ##
+## ------------------------------ ##
+
+
+# AC_CHECK_HEADER(HEADER-FILE,
+#		  [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		  [INCLUDES])
+# ---------------------------------------------------------
+# We are slowly moving to checking headers with the compiler instead
+# of the preproc, so that we actually learn about the usability of a
+# header instead of its mere presence.  But since users are used to
+# the old semantics, they check for headers in random order and
+# without providing prerequisite headers.  This macro implements the
+# transition phase, and should be cleaned up latter to use compilation
+# only.
+#
+# If INCLUDES is empty, then check both via the compiler and preproc.
+# If the results are different, issue a warning, but keep the preproc
+# result.
+#
+# If INCLUDES is `-', keep only the old semantics.
+#
+# If INCLUDES is specified and different from `-', then use the new
+# semantics only.
+#
+# The m4_indir allows for fewer expansions of $@.
+AC_DEFUN([AC_CHECK_HEADER],
+[m4_indir(m4_case([$4],
+		  [],  [[_AC_CHECK_HEADER_MONGREL]],
+		  [-], [[_AC_CHECK_HEADER_PREPROC]],
+		       [[_AC_CHECK_HEADER_COMPILE]]), $@)
+])# AC_CHECK_HEADER
+
+
+# _AC_CHECK_HEADER_MONGREL_BODY
+# -----------------------------
+# Shell function body for _AC_CHECK_HEADER_MONGREL
+m4_define([_AC_CHECK_HEADER_MONGREL_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AS_VAR_SET_IF([$[]3],
+		[AC_CACHE_CHECK([for $[]2], [$[]3], [])],
+		[# Is the header compilable?
+AC_MSG_CHECKING([$[]2 usability])
+AC_COMPILE_IFELSE([AC_LANG_SOURCE([$[]4
+@%:@include <$[]2>])],
+		  [ac_header_compiler=yes],
+		  [ac_header_compiler=no])
+AC_MSG_RESULT([$ac_header_compiler])
+
+# Is the header present?
+AC_MSG_CHECKING([$[]2 presence])
+AC_PREPROC_IFELSE([AC_LANG_SOURCE([@%:@include <$[]2>])],
+		  [ac_header_preproc=yes],
+		  [ac_header_preproc=no])
+AC_MSG_RESULT([$ac_header_preproc])
+
+# So?  What about this header?
+case $ac_header_compiler:$ac_header_preproc:$ac_[]_AC_LANG_ABBREV[]_preproc_warn_flag in #((
+  yes:no: )
+    AC_MSG_WARN([$[]2: accepted by the compiler, rejected by the preprocessor!])
+    AC_MSG_WARN([$[]2: proceeding with the compiler's result])
+    ;;
+  no:yes:* )
+    AC_MSG_WARN([$[]2: present but cannot be compiled])
+    AC_MSG_WARN([$[]2:     check for missing prerequisite headers?])
+    AC_MSG_WARN([$[]2: see the Autoconf documentation])
+    AC_MSG_WARN([$[]2:     section "Present But Cannot Be Compiled"])
+    AC_MSG_WARN([$[]2: proceeding with the compiler's result])
+m4_ifset([AC_PACKAGE_BUGREPORT],
+[m4_n([( AS_BOX([Report this to ]AC_PACKAGE_BUGREPORT)
+     ) | sed "s/^/$as_me: WARNING:     /" >&2])])dnl
+    ;;
+esac
+  AC_CACHE_CHECK([for $[]2], [$[]3],
+		 [AS_VAR_SET([$[]3], [$ac_header_compiler])])])
+  AS_LINENO_POP
+])#_AC_CHECK_HEADER_MONGREL_BODY
+
+# _AC_CHECK_HEADER_MONGREL(HEADER-FILE,
+#			   [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#			   [INCLUDES = DEFAULT-INCLUDES])
+# ------------------------------------------------------------------
+# Check using both the compiler and the preprocessor.  If they disagree,
+# warn, and the preproc wins.
+#
+# This is not based on _AC_CHECK_HEADER_COMPILE and _AC_CHECK_HEADER_PREPROC
+# because it obfuscate the code to try to factor everything, in particular
+# because of the cache variables, and the `checking ...' messages.
+AC_DEFUN([_AC_CHECK_HEADER_MONGREL],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_header_mongrel],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_header_mongrel],
+    [LINENO HEADER VAR INCLUDES],
+    [Tests whether HEADER exists, giving a warning if it cannot be compiled
+     using the include files in INCLUDES and setting the cache variable VAR
+     accordingly.])],
+  [$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_Header], [ac_cv_header_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_header_mongrel ]dnl
+["$LINENO" "$1" "ac_Header" "AS_ESCAPE([AC_INCLUDES_DEFAULT([$4])], [""])"
+AS_VAR_IF([ac_Header], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Header])])# _AC_CHECK_HEADER_MONGREL
+
+
+# _AC_CHECK_HEADER_COMPILE_BODY
+# -----------------------------
+# Shell function body for _AC_CHECK_HEADER_COMPILE
+m4_define([_AC_CHECK_HEADER_COMPILE_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for $[]2], [$[]3],
+		 [AC_COMPILE_IFELSE([AC_LANG_SOURCE([$[]4
+@%:@include <$[]2>])],
+				    [AS_VAR_SET([$[]3], [yes])],
+				    [AS_VAR_SET([$[]3], [no])])])
+  AS_LINENO_POP
+])# _AC_CHECK_HEADER_COMPILE_BODY
+
+# _AC_CHECK_HEADER_COMPILE(HEADER-FILE,
+#		       [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		       [INCLUDES = DEFAULT-INCLUDES])
+# --------------------------------------------------------------
+# Check the compiler accepts HEADER-FILE.  The INCLUDES are defaulted.
+AC_DEFUN([_AC_CHECK_HEADER_COMPILE],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_header_compile],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_header_compile],
+    [LINENO HEADER VAR INCLUDES],
+    [Tests whether HEADER exists and can be compiled using the include files
+     in INCLUDES, setting the cache variable VAR accordingly.])],
+  [$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_Header], [ac_cv_header_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_header_compile ]dnl
+["$LINENO" "$1" "ac_Header" "AS_ESCAPE([AC_INCLUDES_DEFAULT([$4])], [""])"
+AS_VAR_IF([ac_Header], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Header])])# _AC_CHECK_HEADER_COMPILE
+
+# _AC_CHECK_HEADER_PREPROC_BODY
+# -----------------------------
+# Shell function body for _AC_CHECK_HEADER_PREPROC.
+m4_define([_AC_CHECK_HEADER_PREPROC_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for $[]2], [$[]3],
+  [AC_PREPROC_IFELSE([AC_LANG_SOURCE([@%:@include <$[]2>])],
+		     [AS_VAR_SET([$[]3], [yes])],
+		     [AS_VAR_SET([$[]3], [no])])])
+  AS_LINENO_POP
+])# _AC_CHECK_HEADER_PREPROC_BODY
+
+
+
+# _AC_CHECK_HEADER_PREPROC(HEADER-FILE,
+#		       [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
+# --------------------------------------------------------------
+# Check the preprocessor accepts HEADER-FILE.
+AC_DEFUN([_AC_CHECK_HEADER_PREPROC],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_header_preproc],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_header_preproc],
+    [LINENO HEADER VAR],
+    [Tests whether HEADER is present, setting the cache variable VAR accordingly.])],
+  [$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_Header], [ac_cv_header_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_header_preproc "$LINENO" "$1" "ac_Header"
+AS_VAR_IF([ac_Header], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Header])dnl
+])# _AC_CHECK_HEADER_PREPROC
+
+# _AC_CHECK_HEADER_OLD(HEADER-FILE, [ACTION-IF-FOUND],
+#                      [ACTION-IF-NOT-FOUND])
+# _AC_CHECK_HEADER_NEW(HEADER-FILE, [ACTION-IF-FOUND],
+#                      [ACTION-IF-NOT-FOUND])
+# ----------------------------------------------------
+# Some packages used these undocumented macros.  Even worse, gcc
+# redefined AC_CHECK_HEADER in terms of _AC_CHECK_HEADER_OLD, so we
+# can't do the simpler:
+#   AU_DEFUN([_AC_CHECK_HEADER_OLD],
+#     [AC_CHECK_HEADER([$1], [$2], [$3], [-])])
+AC_DEFUN([_AC_CHECK_HEADER_OLD],
+[AC_DIAGNOSE([obsolete], [The macro `$0' is obsolete.
+You should use AC_CHECK_HEADER with a fourth argument.])]dnl
+[_AC_CHECK_HEADER_PREPROC($@)])
+
+AC_DEFUN([_AC_CHECK_HEADER_NEW],
+[AC_DIAGNOSE([obsolete], [The macro `$0' is obsolete.
+You should use AC_CHECK_HEADER with a fourth argument.])]dnl
+[_AC_CHECK_HEADER_COMPILE($@)])
+
+
+# _AH_CHECK_HEADER(HEADER-FILE)
+# -----------------------------
+# Prepare the autoheader snippet for HEADER-FILE.
+m4_define([_AH_CHECK_HEADER],
+[AH_TEMPLATE(AS_TR_CPP([HAVE_$1]),
+  [Define to 1 if you have the <$1> header file.])])
+
+
+# AH_CHECK_HEADERS(HEADER-FILE...)
+# --------------------------------
+m4_define([AH_CHECK_HEADERS],
+[m4_foreach_w([AC_Header], [$1], [_AH_CHECK_HEADER(m4_defn([AC_Header]))])])
+
+
+# AC_CHECK_HEADERS(HEADER-FILE...,
+#		   [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		   [INCLUDES])
+# ----------------------------------------------------------
+# Check for each whitespace-separated HEADER-FILE (omitting the <> or
+# ""), and perform ACTION-IF-FOUND or ACTION-IF-NOT-FOUND for each
+# header.  INCLUDES is as for AC_CHECK_HEADER.  Additionally, make the
+# preprocessor definition HAVE_HEADER_FILE available for each found
+# header.  Either ACTION may include `break' to stop the search.
+AC_DEFUN([AC_CHECK_HEADERS],
+[m4_map_args_w([$1], [_AH_CHECK_HEADER(], [)])]dnl
+[AS_FOR([AC_header], [ac_header], [$1],
+[AC_CHECK_HEADER(AC_header,
+		 [AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_]AC_header)) $2],
+		 [$3], [$4])dnl])
+])# AC_CHECK_HEADERS
+
+
+# _AC_CHECK_HEADER_ONCE(HEADER-FILE)
+# ----------------------------------
+# Check for a single HEADER-FILE once.
+m4_define([_AC_CHECK_HEADER_ONCE],
+[_AH_CHECK_HEADER([$1])AC_DEFUN([_AC_Header_]m4_translit([[$1]],
+    [./-], [___]),
+  [m4_divert_text([INIT_PREPARE], [AS_VAR_APPEND([ac_header_list], [" $1"])])
+_AC_HEADERS_EXPANSION])AC_REQUIRE([_AC_Header_]m4_translit([[$1]],
+    [./-], [___]))])
+
+
+# AC_CHECK_HEADERS_ONCE(HEADER-FILE...)
+# -------------------------------------
+# Add each whitespace-separated name in HEADER-FILE to the list of
+# headers to check once.
+AC_DEFUN([AC_CHECK_HEADERS_ONCE],
+[m4_map_args_w([$1], [_AC_CHECK_HEADER_ONCE(], [)])])
+
+m4_define([_AC_HEADERS_EXPANSION],
+[
+  m4_divert_text([DEFAULTS], [ac_header_list=])
+  AC_CHECK_HEADERS([$ac_header_list], [], [], [AC_INCLUDES_DEFAULT])
+  m4_define([_AC_HEADERS_EXPANSION], [])
+])
+
+
+
+
+## --------------------- ##
+## 2. Default includes.  ##
+## --------------------- ##
+
+# Always use the same set of default headers for all the generic
+# macros.  It is easier to document, to extend, and to understand than
+# having specific defaults for each macro.
+
+# _AC_INCLUDES_DEFAULT_REQUIREMENTS
+# ---------------------------------
+# Required when AC_INCLUDES_DEFAULT uses its default branch.
+AC_DEFUN([_AC_INCLUDES_DEFAULT_REQUIREMENTS],
+[m4_divert_text([DEFAULTS],
+[# Factoring default headers for most tests.
+dnl If ever you change this variable, please keep autoconf.texi in sync.
+ac_includes_default="\
+#include <stdio.h>
+#ifdef HAVE_SYS_TYPES_H
+# include <sys/types.h>
+#endif
+#ifdef HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_STRING_H
+# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
+#  include <memory.h>
+# endif
+# include <string.h>
+#endif
+#ifdef HAVE_STRINGS_H
+# include <strings.h>
+#endif
+#ifdef HAVE_INTTYPES_H
+# include <inttypes.h>
+#endif
+#ifdef HAVE_STDINT_H
+# include <stdint.h>
+#endif
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif"
+])dnl
+AC_REQUIRE([AC_HEADER_STDC])dnl
+# On IRIX 5.3, sys/types and inttypes.h are conflicting.
+AC_CHECK_HEADERS([sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
+		  inttypes.h stdint.h unistd.h],
+		 [], [], $ac_includes_default)
+])# _AC_INCLUDES_DEFAULT_REQUIREMENTS
+
+
+# AC_INCLUDES_DEFAULT([INCLUDES])
+# -------------------------------
+# If INCLUDES is empty, expand in default includes, otherwise in
+# INCLUDES.
+# In most cases INCLUDES is not double quoted as it should, and if
+# for instance INCLUDES = `#include <stdio.h>' then unless we force
+# a newline, the hash will swallow the closing paren etc. etc.
+# The usual failure.
+# Take no risk: for the newline.
+AC_DEFUN([AC_INCLUDES_DEFAULT],
+[m4_ifval([$1], [$1
+],
+	  [AC_REQUIRE([_AC_INCLUDES_DEFAULT_REQUIREMENTS])dnl
+$ac_includes_default])])
+
+
+
+
+
+## ------------------------------------------- ##
+## 3. Headers to check with AC_CHECK_HEADERS.  ##
+## ------------------------------------------- ##
+
+# errno.h is portable.
+
+AN_HEADER([OS.h],               [AC_CHECK_HEADERS])
+AN_HEADER([argz.h],             [AC_CHECK_HEADERS])
+AN_HEADER([arpa/inet.h],        [AC_CHECK_HEADERS])
+AN_HEADER([fcntl.h],            [AC_CHECK_HEADERS])
+AN_HEADER([fenv.h],             [AC_CHECK_HEADERS])
+AN_HEADER([float.h],            [AC_CHECK_HEADERS])
+AN_HEADER([fs_info.h],          [AC_CHECK_HEADERS])
+AN_HEADER([inttypes.h],         [AC_CHECK_HEADERS])
+AN_HEADER([langinfo.h],         [AC_CHECK_HEADERS])
+AN_HEADER([libintl.h],          [AC_CHECK_HEADERS])
+AN_HEADER([limits.h],           [AC_CHECK_HEADERS])
+AN_HEADER([locale.h],           [AC_CHECK_HEADERS])
+AN_HEADER([mach/mach.h],        [AC_CHECK_HEADERS])
+AN_HEADER([malloc.h],           [AC_CHECK_HEADERS])
+AN_HEADER([memory.h],           [AC_CHECK_HEADERS])
+AN_HEADER([mntent.h],           [AC_CHECK_HEADERS])
+AN_HEADER([mnttab.h],           [AC_CHECK_HEADERS])
+AN_HEADER([netdb.h],            [AC_CHECK_HEADERS])
+AN_HEADER([netinet/in.h],       [AC_CHECK_HEADERS])
+AN_HEADER([nl_types.h],         [AC_CHECK_HEADERS])
+AN_HEADER([nlist.h],            [AC_CHECK_HEADERS])
+AN_HEADER([paths.h],            [AC_CHECK_HEADERS])
+AN_HEADER([sgtty.h],            [AC_CHECK_HEADERS])
+AN_HEADER([shadow.h],           [AC_CHECK_HEADERS])
+AN_HEADER([stddef.h],           [AC_CHECK_HEADERS])
+AN_HEADER([stdint.h],           [AC_CHECK_HEADERS])
+AN_HEADER([stdio_ext.h],        [AC_CHECK_HEADERS])
+AN_HEADER([stdlib.h],           [AC_CHECK_HEADERS])
+AN_HEADER([string.h],           [AC_CHECK_HEADERS])
+AN_HEADER([strings.h],          [AC_CHECK_HEADERS])
+AN_HEADER([sys/acl.h],          [AC_CHECK_HEADERS])
+AN_HEADER([sys/file.h],         [AC_CHECK_HEADERS])
+AN_HEADER([sys/filsys.h],       [AC_CHECK_HEADERS])
+AN_HEADER([sys/fs/s5param.h],   [AC_CHECK_HEADERS])
+AN_HEADER([sys/fs_types.h],     [AC_CHECK_HEADERS])
+AN_HEADER([sys/fstyp.h],        [AC_CHECK_HEADERS])
+AN_HEADER([sys/ioctl.h],        [AC_CHECK_HEADERS])
+AN_HEADER([sys/mntent.h],       [AC_CHECK_HEADERS])
+AN_HEADER([sys/mount.h],        [AC_CHECK_HEADERS])
+AN_HEADER([sys/param.h],        [AC_CHECK_HEADERS])
+AN_HEADER([sys/socket.h],       [AC_CHECK_HEADERS])
+AN_HEADER([sys/statfs.h],       [AC_CHECK_HEADERS])
+AN_HEADER([sys/statvfs.h],      [AC_CHECK_HEADERS])
+AN_HEADER([sys/systeminfo.h],   [AC_CHECK_HEADERS])
+AN_HEADER([sys/time.h],         [AC_CHECK_HEADERS])
+AN_HEADER([sys/timeb.h],        [AC_CHECK_HEADERS])
+AN_HEADER([sys/vfs.h],          [AC_CHECK_HEADERS])
+AN_HEADER([sys/window.h],       [AC_CHECK_HEADERS])
+AN_HEADER([syslog.h],           [AC_CHECK_HEADERS])
+AN_HEADER([termio.h],           [AC_CHECK_HEADERS])
+AN_HEADER([termios.h],          [AC_CHECK_HEADERS])
+AN_HEADER([unistd.h],           [AC_CHECK_HEADERS])
+AN_HEADER([utime.h],            [AC_CHECK_HEADERS])
+AN_HEADER([utmp.h],             [AC_CHECK_HEADERS])
+AN_HEADER([utmpx.h],            [AC_CHECK_HEADERS])
+AN_HEADER([values.h],           [AC_CHECK_HEADERS])
+AN_HEADER([wchar.h],            [AC_CHECK_HEADERS])
+AN_HEADER([wctype.h],           [AC_CHECK_HEADERS])
+
+
+
+## ------------------------------- ##
+## 4. Tests for specific headers.  ##
+## ------------------------------- ##
+
+# AC_HEADER_ASSERT
+# ----------------
+# Check whether to enable assertions.
+AC_DEFUN_ONCE([AC_HEADER_ASSERT],
+[
+  AC_MSG_CHECKING([whether to enable assertions])
+  AC_ARG_ENABLE([assert],
+    [AS_HELP_STRING([--disable-assert], [turn off assertions])],
+    [ac_enable_assert=$enableval
+     AS_IF(dnl
+      [test "x$enableval" = xno],
+	[AC_DEFINE([NDEBUG], [1],
+	  [Define to 1 if assertions should be disabled.])],
+      [test "x$enableval" != xyes],
+	[AC_MSG_WARN([invalid argument supplied to --enable-assert])
+	ac_enable_assert=yes])],
+    [ac_enable_assert=yes])
+  AC_MSG_RESULT([$ac_enable_assert])
+])
+
+
+# _AC_CHECK_HEADER_DIRENT(HEADER-FILE,
+#			  [ACTION-IF-FOUND], [ACTION-IF-NOT_FOUND])
+# -----------------------------------------------------------------
+# Like AC_CHECK_HEADER, except also make sure that HEADER-FILE
+# defines the type `DIR'.  dirent.h on NextStep 3.2 doesn't.
+m4_define([_AC_CHECK_HEADER_DIRENT],
+[AS_VAR_PUSHDEF([ac_Header], [ac_cv_header_dirent_$1])dnl
+AC_CACHE_CHECK([for $1 that defines DIR], [ac_Header],
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([#include <sys/types.h>
+#include <$1>
+],
+				    [if ((DIR *) 0)
+return 0;])],
+		   [AS_VAR_SET([ac_Header], [yes])],
+		   [AS_VAR_SET([ac_Header], [no])])])
+AS_VAR_IF([ac_Header], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Header])dnl
+])# _AC_CHECK_HEADER_DIRENT
+
+
+# _AH_CHECK_HEADER_DIRENT(HEADERS)
+# --------------------------------
+# Like _AH_CHECK_HEADER, but tuned to a dirent provider.
+m4_define([_AH_CHECK_HEADER_DIRENT],
+[AH_TEMPLATE(AS_TR_CPP([HAVE_$1]),
+  [Define to 1 if you have the <$1> header file, and it defines `DIR'.])])
+
+
+# AC_HEADER_DIRENT
+# ----------------
+AC_DEFUN([AC_HEADER_DIRENT],
+[m4_map_args([_AH_CHECK_HEADER_DIRENT], [dirent.h], [sys/ndir.h],
+	     [sys/dir.h], [ndir.h])]dnl
+[ac_header_dirent=no
+for ac_hdr in dirent.h sys/ndir.h sys/dir.h ndir.h; do
+  _AC_CHECK_HEADER_DIRENT($ac_hdr,
+			  [AC_DEFINE_UNQUOTED(AS_TR_CPP(HAVE_$ac_hdr), 1)
+ac_header_dirent=$ac_hdr; break])
+done
+# Two versions of opendir et al. are in -ldir and -lx on SCO Xenix.
+if test $ac_header_dirent = dirent.h; then
+  AC_SEARCH_LIBS(opendir, dir)
+else
+  AC_SEARCH_LIBS(opendir, x)
+fi
+])# AC_HEADER_DIRENT
+
+
+# AC_HEADER_MAJOR
+# ---------------
+AN_FUNCTION([major],     [AC_HEADER_MAJOR])
+AN_FUNCTION([makedev],   [AC_HEADER_MAJOR])
+AN_FUNCTION([minor],     [AC_HEADER_MAJOR])
+AN_HEADER([sys/mkdev.h], [AC_HEADER_MAJOR])
+AC_DEFUN([AC_HEADER_MAJOR],
+[AC_CACHE_CHECK(whether sys/types.h defines makedev,
+		ac_cv_header_sys_types_h_makedev,
+[AC_LINK_IFELSE([AC_LANG_PROGRAM([[@%:@include <sys/types.h>]],
+				 [[return makedev(0, 0);]])],
+		[ac_cv_header_sys_types_h_makedev=yes],
+		[ac_cv_header_sys_types_h_makedev=no])
+])
+
+if test $ac_cv_header_sys_types_h_makedev = no; then
+AC_CHECK_HEADER(sys/mkdev.h,
+		[AC_DEFINE(MAJOR_IN_MKDEV, 1,
+			   [Define to 1 if `major', `minor', and `makedev' are
+			    declared in <mkdev.h>.])])
+
+  if test $ac_cv_header_sys_mkdev_h = no; then
+    AC_CHECK_HEADER(sys/sysmacros.h,
+		    [AC_DEFINE(MAJOR_IN_SYSMACROS, 1,
+			       [Define to 1 if `major', `minor', and `makedev'
+				are declared in <sysmacros.h>.])])
+  fi
+fi
+])# AC_HEADER_MAJOR
+
+
+# AC_HEADER_RESOLV
+# ----------------
+# According to http://www.mcsr.olemiss.edu/cgi-bin/man-cgi?resolver+3
+# (or http://www.chemie.fu-berlin.de/cgi-bin/man/sgi_irix?resolver+3),
+# sys/types.h, netinet/in.h and arpa/nameser.h are required on IRIX.
+# netinet/in.h is needed on Cygwin, too.
+# With Solaris 9, netdb.h is required, to get symbols like HOST_NOT_FOUND.
+#
+AN_HEADER(resolv.h,	[AC_HEADER_RESOLV])
+AC_DEFUN([AC_HEADER_RESOLV],
+[AC_CHECK_HEADERS(sys/types.h netinet/in.h arpa/nameser.h netdb.h resolv.h,
+		  [], [],
+[[#ifdef HAVE_SYS_TYPES_H
+#  include <sys/types.h>
+#endif
+#ifdef HAVE_NETINET_IN_H
+#  include <netinet/in.h>   /* inet_ functions / structs */
+#endif
+#ifdef HAVE_ARPA_NAMESER_H
+#  include <arpa/nameser.h> /* DNS HEADER struct */
+#endif
+#ifdef HAVE_NETDB_H
+#  include <netdb.h>
+#endif]])
+])# AC_HEADER_RESOLV
+
+
+# AC_HEADER_STAT
+# --------------
+# FIXME: Shouldn't this be named AC_HEADER_SYS_STAT?
+AC_DEFUN([AC_HEADER_STAT],
+[AC_CACHE_CHECK(whether stat file-mode macros are broken,
+  ac_cv_header_stat_broken,
+[AC_COMPILE_IFELSE([AC_LANG_SOURCE([[#include <sys/types.h>
+#include <sys/stat.h>
+
+#if defined S_ISBLK && defined S_IFDIR
+extern char c1[S_ISBLK (S_IFDIR) ? -1 : 1];
+#endif
+
+#if defined S_ISBLK && defined S_IFCHR
+extern char c2[S_ISBLK (S_IFCHR) ? -1 : 1];
+#endif
+
+#if defined S_ISLNK && defined S_IFREG
+extern char c3[S_ISLNK (S_IFREG) ? -1 : 1];
+#endif
+
+#if defined S_ISSOCK && defined S_IFREG
+extern char c4[S_ISSOCK (S_IFREG) ? -1 : 1];
+#endif
+]])], ac_cv_header_stat_broken=no, ac_cv_header_stat_broken=yes)])
+if test $ac_cv_header_stat_broken = yes; then
+  AC_DEFINE(STAT_MACROS_BROKEN, 1,
+	    [Define to 1 if the `S_IS*' macros in <sys/stat.h> do not
+	     work properly.])
+fi
+])# AC_HEADER_STAT
+
+
+# AC_HEADER_STDBOOL
+# -----------------
+# Check for stdbool.h that conforms to C99.
+AN_IDENTIFIER([bool], [AC_HEADER_STDBOOL])
+AN_IDENTIFIER([true], [AC_HEADER_STDBOOL])
+AN_IDENTIFIER([false],[AC_HEADER_STDBOOL])
+AC_DEFUN([AC_HEADER_STDBOOL],
+[AC_CACHE_CHECK([for stdbool.h that conforms to C99],
+   [ac_cv_header_stdbool_h],
+   [AC_COMPILE_IFELSE([AC_LANG_PROGRAM(
+      [[
+#include <stdbool.h>
+#ifndef bool
+ "error: bool is not defined"
+#endif
+#ifndef false
+ "error: false is not defined"
+#endif
+#if false
+ "error: false is not 0"
+#endif
+#ifndef true
+ "error: true is not defined"
+#endif
+#if true != 1
+ "error: true is not 1"
+#endif
+#ifndef __bool_true_false_are_defined
+ "error: __bool_true_false_are_defined is not defined"
+#endif
+
+	struct s { _Bool s: 1; _Bool t; } s;
+
+	char a[true == 1 ? 1 : -1];
+	char b[false == 0 ? 1 : -1];
+	char c[__bool_true_false_are_defined == 1 ? 1 : -1];
+	char d[(bool) 0.5 == true ? 1 : -1];
+	/* See body of main program for 'e'.  */
+	char f[(_Bool) 0.0 == false ? 1 : -1];
+	char g[true];
+	char h[sizeof (_Bool)];
+	char i[sizeof s.t];
+	enum { j = false, k = true, l = false * true, m = true * 256 };
+	/* The following fails for
+	   HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */
+	_Bool n[m];
+	char o[sizeof n == m * sizeof n[0] ? 1 : -1];
+	char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1];
+	/* Catch a bug in an HP-UX C compiler.  See
+	   http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html
+	   http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html
+	 */
+	_Bool q = true;
+	_Bool *pq = &q;
+      ]],
+      [[
+	bool e = &s;
+	*pq |= q;
+	*pq |= ! q;
+	/* Refer to every declared value, to avoid compiler optimizations.  */
+	return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l
+		+ !m + !n + !o + !p + !q + !pq);
+      ]])],
+      [ac_cv_header_stdbool_h=yes],
+      [ac_cv_header_stdbool_h=no])])
+AC_CHECK_TYPES([_Bool])
+if test $ac_cv_header_stdbool_h = yes; then
+  AC_DEFINE(HAVE_STDBOOL_H, 1, [Define to 1 if stdbool.h conforms to C99.])
+fi
+])# AC_HEADER_STDBOOL
+
+
+# AC_HEADER_STDC
+# --------------
+AC_DEFUN([AC_HEADER_STDC],
+[AC_CACHE_CHECK(for ANSI C header files, ac_cv_header_stdc,
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <float.h>
+]])],
+		    [ac_cv_header_stdc=yes],
+		    [ac_cv_header_stdc=no])
+
+if test $ac_cv_header_stdc = yes; then
+  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+  AC_EGREP_HEADER(memchr, string.h, , ac_cv_header_stdc=no)
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+  AC_EGREP_HEADER(free, stdlib.h, , ac_cv_header_stdc=no)
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+  AC_RUN_IFELSE([AC_LANG_SOURCE(
+[[#include <ctype.h>
+#include <stdlib.h>
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+		   (('a' <= (c) && (c) <= 'i') \
+		     || ('j' <= (c) && (c) <= 'r') \
+		     || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+  int i;
+  for (i = 0; i < 256; i++)
+    if (XOR (islower (i), ISLOWER (i))
+	|| toupper (i) != TOUPPER (i))
+      return 2;
+  return 0;
+}]])], , ac_cv_header_stdc=no, :)
+fi])
+if test $ac_cv_header_stdc = yes; then
+  AC_DEFINE(STDC_HEADERS, 1,
+	    [Define to 1 if you have the ANSI C header files.])
+fi
+])# AC_HEADER_STDC
+
+
+# AC_HEADER_SYS_WAIT
+# ------------------
+AC_DEFUN([AC_HEADER_SYS_WAIT],
+[AC_CACHE_CHECK([for sys/wait.h that is POSIX.1 compatible],
+  ac_cv_header_sys_wait_h,
+[AC_COMPILE_IFELSE(
+[AC_LANG_PROGRAM([#include <sys/types.h>
+#include <sys/wait.h>
+#ifndef WEXITSTATUS
+# define WEXITSTATUS(stat_val) ((unsigned int) (stat_val) >> 8)
+#endif
+#ifndef WIFEXITED
+# define WIFEXITED(stat_val) (((stat_val) & 255) == 0)
+#endif
+],
+[  int s;
+  wait (&s);
+  s = WIFEXITED (s) ? WEXITSTATUS (s) : 1;])],
+		 [ac_cv_header_sys_wait_h=yes],
+		 [ac_cv_header_sys_wait_h=no])])
+if test $ac_cv_header_sys_wait_h = yes; then
+  AC_DEFINE(HAVE_SYS_WAIT_H, 1,
+	    [Define to 1 if you have <sys/wait.h> that is POSIX.1 compatible.])
+fi
+])# AC_HEADER_SYS_WAIT
+
+
+# AC_HEADER_TIME
+# --------------
+AC_DEFUN([AC_HEADER_TIME],
+[AC_CACHE_CHECK([whether time.h and sys/time.h may both be included],
+  ac_cv_header_time,
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([#include <sys/types.h>
+#include <sys/time.h>
+#include <time.h>
+],
+[if ((struct tm *) 0)
+return 0;])],
+		   [ac_cv_header_time=yes],
+		   [ac_cv_header_time=no])])
+if test $ac_cv_header_time = yes; then
+  AC_DEFINE(TIME_WITH_SYS_TIME, 1,
+	    [Define to 1 if you can safely include both <sys/time.h>
+	     and <time.h>.])
+fi
+])# AC_HEADER_TIME
+
+
+# _AC_HEADER_TIOCGWINSZ_IN_TERMIOS_H
+# ----------------------------------
+m4_define([_AC_HEADER_TIOCGWINSZ_IN_TERMIOS_H],
+[AC_CACHE_CHECK([whether termios.h defines TIOCGWINSZ],
+		ac_cv_sys_tiocgwinsz_in_termios_h,
+[AC_EGREP_CPP([yes],
+	      [#include <sys/types.h>
+#include <termios.h>
+#ifdef TIOCGWINSZ
+  yes
+#endif
+],
+		ac_cv_sys_tiocgwinsz_in_termios_h=yes,
+		ac_cv_sys_tiocgwinsz_in_termios_h=no)])
+])# _AC_HEADER_TIOCGWINSZ_IN_TERMIOS_H
+
+
+# _AC_HEADER_TIOCGWINSZ_IN_SYS_IOCTL
+# ----------------------------------
+m4_define([_AC_HEADER_TIOCGWINSZ_IN_SYS_IOCTL],
+[AC_CACHE_CHECK([whether sys/ioctl.h defines TIOCGWINSZ],
+		ac_cv_sys_tiocgwinsz_in_sys_ioctl_h,
+[AC_EGREP_CPP([yes],
+	      [#include <sys/types.h>
+#include <sys/ioctl.h>
+#ifdef TIOCGWINSZ
+  yes
+#endif
+],
+		ac_cv_sys_tiocgwinsz_in_sys_ioctl_h=yes,
+		ac_cv_sys_tiocgwinsz_in_sys_ioctl_h=no)])
+])# _AC_HEADER_TIOCGWINSZ_IN_SYS_IOCTL
+
+
+# AC_HEADER_TIOCGWINSZ
+# --------------------
+# Look for a header that defines TIOCGWINSZ.
+# FIXME: Is this the proper name?  Is this the proper implementation?
+# I need more help.
+AC_DEFUN([AC_HEADER_TIOCGWINSZ],
+[_AC_HEADER_TIOCGWINSZ_IN_TERMIOS_H
+if test $ac_cv_sys_tiocgwinsz_in_termios_h != yes; then
+  _AC_HEADER_TIOCGWINSZ_IN_SYS_IOCTL
+  if test $ac_cv_sys_tiocgwinsz_in_sys_ioctl_h = yes; then
+    AC_DEFINE(GWINSZ_IN_SYS_IOCTL,1,
+	      [Define to 1 if `TIOCGWINSZ' requires <sys/ioctl.h>.])
+  fi
+fi
+])# AC_HEADER_TIOCGWINSZ
+
+
+# AU::AC_UNISTD_H
+# ---------------
+AU_DEFUN([AC_UNISTD_H],
+[AC_CHECK_HEADERS(unistd.h)])
+
+
+# AU::AC_USG
+# ----------
+# Define `USG' if string functions are in strings.h.
+AU_DEFUN([AC_USG],
+[AC_MSG_CHECKING([for BSD string and memory functions])
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[@%:@include <strings.h>]],
+				[[rindex(0, 0); bzero(0, 0);]])],
+	       [AC_MSG_RESULT(yes)],
+	       [AC_MSG_RESULT(no)
+		AC_DEFINE(USG, 1,
+			  [Define to 1 if you do not have <strings.h>, index,
+			   bzero, etc... This symbol is obsolete, you should
+			   not depend upon it.])])
+AC_CHECK_HEADERS(string.h)],
+[Remove `AC_MSG_CHECKING', `AC_LINK_IFELSE' and this warning
+when you adjust your code to use HAVE_STRING_H.])
+
+
+# AU::AC_MEMORY_H
+# ---------------
+# To be precise this macro used to be:
+#
+#   | AC_MSG_CHECKING(whether string.h declares mem functions)
+#   | AC_EGREP_HEADER(memchr, string.h, ac_found=yes, ac_found=no)
+#   | AC_MSG_RESULT($ac_found)
+#   | if test $ac_found = no; then
+#   |	AC_CHECK_HEADER(memory.h, [AC_DEFINE(NEED_MEMORY_H)])
+#   | fi
+#
+# But it is better to check for both headers, and alias NEED_MEMORY_H to
+# HAVE_MEMORY_H.
+AU_DEFUN([AC_MEMORY_H],
+[AC_CHECK_HEADER(memory.h,
+		[AC_DEFINE([NEED_MEMORY_H], 1,
+			   [Same as `HAVE_MEMORY_H', don't depend on me.])])
+AC_CHECK_HEADERS(string.h memory.h)],
+[Remove this warning and
+`AC_CHECK_HEADER(memory.h, AC_DEFINE(...))' when you adjust your code to
+use HAVE_STRING_H and HAVE_MEMORY_H, not NEED_MEMORY_H.])
+
+
+# AU::AC_DIR_HEADER
+# -----------------
+# Like calling `AC_HEADER_DIRENT' and `AC_FUNC_CLOSEDIR_VOID', but
+# defines a different set of C preprocessor macros to indicate which
+# header file is found.
+AU_DEFUN([AC_DIR_HEADER],
+[AC_HEADER_DIRENT
+AC_FUNC_CLOSEDIR_VOID
+test ac_cv_header_dirent_dirent_h &&
+  AC_DEFINE([DIRENT], 1, [Same as `HAVE_DIRENT_H', don't depend on me.])
+test ac_cv_header_dirent_sys_ndir_h &&
+  AC_DEFINE([SYSNDIR], 1, [Same as `HAVE_SYS_NDIR_H', don't depend on me.])
+test ac_cv_header_dirent_sys_dir_h &&
+  AC_DEFINE([SYSDIR], 1, [Same as `HAVE_SYS_DIR_H', don't depend on me.])
+test ac_cv_header_dirent_ndir_h &&
+  AC_DEFINE([NDIR], 1, [Same as `HAVE_NDIR_H', don't depend on me.])],
+[Remove this warning and the four `AC_DEFINE' when you
+adjust your code to use `AC_HEADER_DIRENT'.])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/lang.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/lang.m4
new file mode 100644
index 0000000..d2e98f9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/lang.m4
@@ -0,0 +1,722 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Programming languages support.
+# Copyright (C) 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008, 2009,
+# 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# Table of Contents:
+#
+# 1. Language selection
+#    and routines to produce programs in a given language.
+#
+# 2. Producing programs in a given language.
+#
+# 3. Looking for a compiler
+#    And possibly the associated preprocessor.
+#
+#    3a. Computing EXEEXT and OBJEXT.
+#
+# 4. Compilers' characteristics.
+
+
+
+## ----------------------- ##
+## 1. Language selection.  ##
+## ----------------------- ##
+
+
+# AC_LANG_CASE(LANG1, IF-LANG1, LANG2, IF-LANG2, ..., DEFAULT)
+# ------------------------------------------------------------
+# Expand into IF-LANG1 if the current language is LANG1 etc. else
+# into default.
+m4_define([AC_LANG_CASE],
+[m4_case(_AC_LANG, $@)])
+
+
+# _AC_LANG_DISPATCH(MACRO, LANG, ARGS)
+# ------------------------------------
+# Call the specialization of MACRO for LANG with ARGS.  Complain if
+# unavailable.
+m4_define([_AC_LANG_DISPATCH],
+[m4_ifdef([$1($2)],
+       [m4_indir([$1($2)], m4_shift2($@))],
+       [m4_fatal([$1: unknown language: $2])])])
+
+
+# _AC_LANG_SET(OLD, NEW)
+# ----------------------
+# Output the shell code needed to switch from OLD language to NEW language.
+# Do not try to optimize like this:
+#
+# m4_defun([_AC_LANG_SET],
+# [m4_if([$1], [$2], [],
+#        [_AC_LANG_DISPATCH([AC_LANG], [$2])])])
+#
+# as it can introduce differences between the sh-current language and the
+# m4-current-language when m4_require is used.  Something more subtle
+# might be possible, but at least for the time being, play it safe.
+m4_defun([_AC_LANG_SET],
+[_AC_LANG_DISPATCH([AC_LANG], [$2])])
+
+
+# AC_LANG(LANG)
+# -------------
+# Set the current language to LANG.
+m4_defun([AC_LANG],
+[_AC_LANG_SET(m4_ifdef([_AC_LANG], [m4_defn([_AC_LANG])]),
+	      [$1])dnl
+m4_define([_AC_LANG], [$1])])
+
+
+# AC_LANG_PUSH(LANG)
+# ------------------
+# Save the current language, and use LANG.
+m4_defun([AC_LANG_PUSH],
+[_AC_LANG_SET(m4_ifdef([_AC_LANG], [m4_defn([_AC_LANG])]),
+	      [$1])dnl
+m4_pushdef([_AC_LANG], [$1])])
+
+
+# AC_LANG_POP([LANG])
+# -------------------
+# If given, check that the current language is LANG, and restore the
+# previous language.
+m4_defun([AC_LANG_POP],
+[m4_ifval([$1],
+ [m4_if([$1], m4_defn([_AC_LANG]), [],
+  [m4_fatal([$0($1): unexpected current language: ]m4_defn([_AC_LANG]))])])dnl
+m4_pushdef([$0 OLD], m4_defn([_AC_LANG]))dnl
+m4_popdef([_AC_LANG])dnl
+_AC_LANG_SET(m4_defn([$0 OLD]), m4_defn([_AC_LANG]))dnl
+m4_popdef([$0 OLD])dnl
+])
+
+
+# AC_LANG_SAVE
+# ------------
+# Save the current language, but don't change language.
+AU_DEFUN([AC_LANG_SAVE],
+[[AC_LANG_SAVE]],
+[Instead of using `AC_LANG', `AC_LANG_SAVE', and `AC_LANG_RESTORE',
+you should use `AC_LANG_PUSH' and `AC_LANG_POP'.])
+AC_DEFUN([AC_LANG_SAVE],
+[m4_pushdef([_AC_LANG], _AC_LANG)dnl
+AC_DIAGNOSE([obsolete], [The macro `AC_LANG_SAVE' is obsolete.
+You should run autoupdate.])])
+
+
+# AC_LANG_RESTORE
+# ---------------
+# Restore the current language from the stack.
+AU_DEFUN([AC_LANG_RESTORE], [AC_LANG_POP($@)])
+
+
+# _AC_LANG_ABBREV
+# ---------------
+# Return a short signature of _AC_LANG which can be used in shell
+# variable names, or in M4 macro names.
+m4_defun([_AC_LANG_ABBREV],
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# _AC_LANG_PREFIX
+# ---------------
+# Return a short (upper case) signature of _AC_LANG that is used to
+# prefix environment variables like FLAGS.
+m4_defun([_AC_LANG_PREFIX],
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_ASSERT(LANG)
+# --------------------
+# Current language must be LANG.
+m4_defun([AC_LANG_ASSERT],
+[m4_if(_AC_LANG, $1, [],
+       [m4_fatal([$0: current language is not $1: ] _AC_LANG)])])
+
+
+
+# AC_LANG_DEFINE(NAME, ABBREV, PREFIX, COMPILER-VAR, COPY-FROM, SHELL-VARS)
+# -------------------------------------------------------------------------
+# Define a language referenced by AC_LANG(NAME), with cache variable prefix
+# ABBREV, Makefile variable prefix PREFIX and compiler variable COMPILER-VAR.
+# AC_LANG(NAME) is defined to SHELL-VARS, other macros are copied from language
+# COPY-FROM.  Even if COPY-FROM is empty, a default definition is provided for
+# language-specific macros AC_LANG_SOURCE(NAME) and AC_LANG_CONFTEST(NAME).
+m4_define([AC_LANG_DEFINE],
+[m4_define([AC_LANG($1)], [$6])]
+[m4_define([_AC_LANG_ABBREV($1)], [$2])]
+[m4_define([_AC_LANG_PREFIX($1)], [$3])]
+[m4_define([_AC_CC($1)], [$4])]
+[m4_copy([AC_LANG_CONFTEST($5)], [AC_LANG_CONFTEST($1)])]
+[m4_copy([AC_LANG_SOURCE($5)], [AC_LANG_SOURCE($1)])]
+[m4_copy([_AC_LANG_NULL_PROGRAM($5)], [_AC_LANG_NULL_PROGRAM($1)])]
+[m4_ifval([$5],
+[m4_copy([AC_LANG_PROGRAM($5)], [AC_LANG_PROGRAM($1)])]
+[m4_copy([AC_LANG_CALL($5)], [AC_LANG_CALL($1)])]
+[m4_copy([AC_LANG_FUNC_LINK_TRY($5)], [AC_LANG_FUNC_LINK_TRY($1)])]
+[m4_copy([AC_LANG_BOOL_COMPILE_TRY($5)], [AC_LANG_BOOL_COMPILE_TRY($1)])]
+[m4_copy([AC_LANG_INT_SAVE($5)], [AC_LANG_INT_SAVE($1)])]
+[m4_copy([_AC_LANG_IO_PROGRAM($5)], [_AC_LANG_IO_PROGRAM($1)])])])
+
+## ----------------------- ##
+## 2. Producing programs.  ##
+## ----------------------- ##
+
+
+# AC_LANG_CONFTEST(BODY)
+# ----------------------
+# Save the BODY in `conftest.$ac_ext'.  Add a trailing new line.
+AC_DEFUN([AC_LANG_CONFTEST],
+[m4_pushdef([_AC_LANG_DEFINES_PROVIDED],
+  [m4_warn([syntax], [$0: no AC_LANG_SOURCE call detected in body])])]dnl
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)]dnl
+[[]_AC_LANG_DEFINES_PROVIDED[]m4_popdef([_AC_LANG_DEFINES_PROVIDED])])
+
+
+# AC_LANG_CONFTEST()(BODY)
+# ------------------------
+# Default implementation of AC_LANG_CONFTEST.
+# This version assumes that you can't inline confdefs.h into your
+# language, and as such, it is safe to blindly call
+# AC_LANG_DEFINES_PROVIDED.  Language-specific overrides should
+# remove this call if AC_LANG_SOURCE does inline confdefs.h.
+m4_define([AC_LANG_CONFTEST()],
+[cat > conftest.$ac_ext <<_ACEOF
+AC_LANG_DEFINES_PROVIDED[]$1
+_ACEOF])
+
+# AC_LANG_DEFINES_PROVIDED
+# ------------------------
+# Witness macro that all prior AC_DEFINE results have been output
+# into the current expansion, to silence warning from AC_LANG_CONFTEST.
+m4_define([AC_LANG_DEFINES_PROVIDED],
+[m4_define([_$0])])
+
+
+# AC_LANG_SOURCE(BODY)
+# --------------------
+# Produce a valid source for the current language, which includes the
+# BODY, and as much as possible `confdefs.h'.
+AC_DEFUN([AC_LANG_SOURCE],
+[AC_LANG_DEFINES_PROVIDED[]_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_SOURCE()(BODY)
+# ----------------------
+# Default implementation of AC_LANG_SOURCE.
+m4_define([AC_LANG_SOURCE()],
+[$1])
+
+
+# AC_LANG_PROGRAM([PROLOGUE], [BODY])
+# -----------------------------------
+# Produce a valid source for the current language.  Prepend the
+# PROLOGUE (typically CPP directives and/or declarations) to an
+# execution the BODY (typically glued inside the `main' function, or
+# equivalent).
+AC_DEFUN([AC_LANG_PROGRAM],
+[AC_LANG_SOURCE([_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])])
+
+
+# _AC_LANG_NULL_PROGRAM()()
+# -------------------------
+# Default implementation of AC_LANG_NULL_PROGRAM
+m4_define([_AC_LANG_NULL_PROGRAM()],
+[AC_LANG_PROGRAM([], [])])
+
+
+# _AC_LANG_NULL_PROGRAM
+# ---------------------
+# Produce valid source for the current language that does
+# nothing.
+AC_DEFUN([_AC_LANG_NULL_PROGRAM],
+[AC_LANG_SOURCE([_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])])
+
+
+# _AC_LANG_IO_PROGRAM
+# -------------------
+# Produce valid source for the current language that creates
+# a file.  (This is used when detecting whether executables
+# work, e.g. to detect cross-compiling.)
+AC_DEFUN([_AC_LANG_IO_PROGRAM],
+[AC_LANG_SOURCE([_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])])
+
+
+# AC_LANG_CALL(PROLOGUE, FUNCTION)
+# --------------------------------
+# Call the FUNCTION.
+AC_DEFUN([AC_LANG_CALL],
+[m4_ifval([$2], [], [m4_warn([syntax], [$0: no function given])])dnl
+_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_FUNC_LINK_TRY(FUNCTION)
+# -------------------------------
+# Produce a source which links correctly iff the FUNCTION exists.
+AC_DEFUN([AC_LANG_FUNC_LINK_TRY],
+[m4_ifval([$1], [], [m4_warn([syntax], [$0: no function given])])dnl
+_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_BOOL_COMPILE_TRY(PROLOGUE, EXPRESSION)
+# ----------------------------------------------
+# Produce a program that compiles with success iff the boolean EXPRESSION
+# evaluates to true at compile time.
+AC_DEFUN([AC_LANG_BOOL_COMPILE_TRY],
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_INT_SAVE(PROLOGUE, EXPRESSION)
+# --------------------------------------
+# Produce a program that saves the runtime evaluation of the integer
+# EXPRESSION into `conftest.val'.
+AC_DEFUN([AC_LANG_INT_SAVE],
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# _AC_CC
+# ------
+# The variable name of the compiler.
+m4_define([_AC_CC],
+[_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+## -------------------------------------------- ##
+## 3. Looking for Compilers and Preprocessors.  ##
+## -------------------------------------------- ##
+
+
+# AC_LANG_COMPILER
+# ----------------
+# Find a compiler for the current LANG.  Be sure to be run before
+# AC_LANG_PREPROC.
+#
+# Note that because we might AC_REQUIRE `AC_LANG_COMPILER(C)' for
+# instance, the latter must be AC_DEFUN'd, not just define'd.
+m4_define([AC_LANG_COMPILER],
+[AC_BEFORE([AC_LANG_COMPILER(]_AC_LANG[)],
+	   [AC_LANG_PREPROC(]_AC_LANG[)])dnl
+_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_COMPILER_REQUIRE
+# ------------------------
+# Ensure we have a compiler for the current LANG.
+AC_DEFUN([AC_LANG_COMPILER_REQUIRE],
+[m4_require([AC_LANG_COMPILER(]_AC_LANG[)],
+	    [AC_LANG_COMPILER])])
+
+
+
+# _AC_LANG_COMPILER_GNU
+# ---------------------
+# Check whether the compiler for the current language is GNU.
+#
+# It doesn't seem necessary right now to have a different source
+# according to the current language, since this works fine.  Some day
+# it might be needed.  Nevertheless, pay attention to the fact that
+# the position of `choke me' on the seventh column is meant: otherwise
+# some Fortran compilers (e.g., SGI) might consider it's a
+# continuation line, and warn instead of reporting an error.
+m4_define([_AC_LANG_COMPILER_GNU],
+[AC_CACHE_CHECK([whether we are using the GNU _AC_LANG compiler],
+		[ac_cv_[]_AC_LANG_ABBREV[]_compiler_gnu],
+[_AC_COMPILE_IFELSE([AC_LANG_PROGRAM([], [[#ifndef __GNUC__
+       choke me
+#endif
+]])],
+		   [ac_compiler_gnu=yes],
+		   [ac_compiler_gnu=no])
+ac_cv_[]_AC_LANG_ABBREV[]_compiler_gnu=$ac_compiler_gnu
+])])# _AC_LANG_COMPILER_GNU
+
+
+# AC_LANG_PREPROC
+# ---------------
+# Find a preprocessor for the current language.  Note that because we
+# might AC_REQUIRE `AC_LANG_PREPROC(C)' for instance, the latter must
+# be AC_DEFUN'd, not just define'd.  Since the preprocessor depends
+# upon the compiler, look for the compiler.
+m4_define([AC_LANG_PREPROC],
+[AC_LANG_COMPILER_REQUIRE()dnl
+_AC_LANG_DISPATCH([$0], _AC_LANG, $@)])
+
+
+# AC_LANG_PREPROC_REQUIRE
+# -----------------------
+# Ensure we have a preprocessor for the current language.
+AC_DEFUN([AC_LANG_PREPROC_REQUIRE],
+[m4_require([AC_LANG_PREPROC(]_AC_LANG[)],
+	    [AC_LANG_PREPROC])])
+
+
+# AC_REQUIRE_CPP
+# --------------
+# Require the preprocessor for the current language.
+# FIXME: AU_ALIAS once AC_LANG is officially documented (2.51?).
+AC_DEFUN([AC_REQUIRE_CPP],
+[AC_LANG_PREPROC_REQUIRE])
+
+
+
+# AC_NO_EXECUTABLES
+# -----------------
+# FIXME: The GCC team has specific needs which the current Autoconf
+# framework cannot solve elegantly.  This macro implements a dirty
+# hack until Autoconf is able to provide the services its users
+# need.
+#
+# Several of the support libraries that are often built with GCC can't
+# assume the tool-chain is already capable of linking a program: the
+# compiler often expects to be able to link with some of such
+# libraries.
+#
+# In several of these libraries, workarounds have been introduced to
+# avoid the AC_PROG_CC_WORKS test, that would just abort their
+# configuration.  The introduction of AC_EXEEXT, enabled either by
+# libtool or by CVS autoconf, have just made matters worse.
+#
+# Unlike an earlier version of this macro, using AC_NO_EXECUTABLES does
+# not disable link tests at autoconf time, but at configure time.
+# This allows AC_NO_EXECUTABLES to be invoked conditionally.
+AC_DEFUN_ONCE([AC_NO_EXECUTABLES],
+[m4_divert_push([KILL])
+m4_divert_text([DEFAULTS], [ac_no_link=no])
+
+AC_BEFORE([$0], [_AC_COMPILER_EXEEXT])
+AC_BEFORE([$0], [AC_LINK_IFELSE])
+
+m4_define([_AC_COMPILER_EXEEXT],
+[AC_LANG_CONFTEST([_AC_LANG_NULL_PROGRAM])
+if _AC_DO_VAR(ac_link); then
+  ac_no_link=no
+  ]m4_defn([_AC_COMPILER_EXEEXT])[
+else
+  rm -f -r a.out a.exe b.out conftest.$ac_ext conftest.o conftest.obj conftest.dSYM
+  ac_no_link=yes
+  # Setting cross_compile will disable run tests; it will
+  # also disable AC_CHECK_FILE but that's generally
+  # correct if we can't link.
+  cross_compiling=yes
+  EXEEXT=
+  _AC_COMPILER_EXEEXT_CROSS
+fi
+])
+
+m4_define([AC_LINK_IFELSE],
+[if test x$ac_no_link = xyes; then
+  AC_MSG_ERROR([link tests are not allowed after AC@&t@_NO_EXECUTABLES])
+fi
+]m4_defn([AC_LINK_IFELSE]))
+
+m4_divert_pop()dnl
+])# AC_NO_EXECUTABLES
+
+
+
+# --------------------------------- #
+# 3a. Computing EXEEXT and OBJEXT.  #
+# --------------------------------- #
+
+
+# Files to ignore
+# ---------------
+# Ignore .d files produced by CFLAGS=-MD.
+#
+# On UWIN (which uses a cc wrapper for MSVC), the compiler also generates
+# a .pdb file
+#
+# When the w32 free Borland C++ command line compiler links a program
+# (conftest.exe), it also produces a file named `conftest.tds' in
+# addition to `conftest.obj'.
+#
+# - *.bb, *.bbg
+#   Created per object by GCC when given -ftest-coverage.
+#
+# - *.xSYM
+#   Created on BeOS.  Seems to be per executable.
+#
+# - *.map, *.inf
+#   Created by the Green Hills compiler.
+#
+# - *.dSYM
+#   Directory created on Mac OS X Leopard.
+
+
+# _AC_COMPILER_OBJEXT_REJECT
+# --------------------------
+# Case/esac pattern matching the files to be ignored when looking for
+# compiled object files.
+m4_define([_AC_COMPILER_OBJEXT_REJECT],
+[*.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM])
+
+
+# _AC_COMPILER_EXEEXT_REJECT
+# --------------------------
+# Case/esac pattern matching the files to be ignored when looking for
+# compiled executables.
+m4_define([_AC_COMPILER_EXEEXT_REJECT],
+[_AC_COMPILER_OBJEXT_REJECT | *.o | *.obj])
+
+
+# We must not AU define them, because autoupdate would then remove
+# them, which is right, but Automake 1.4 would remove the support for
+# $(EXEEXT) etc.
+# FIXME: Remove this once Automake fixed.
+AC_DEFUN([AC_EXEEXT],   [])
+AC_DEFUN([AC_OBJEXT],   [])
+
+
+# _AC_COMPILER_EXEEXT_DEFAULT
+# ---------------------------
+# Check for the extension used for the default name for executables.
+#
+# We do this in order to find out what is the extension we must add for
+# creating executables (see _AC_COMPILER_EXEEXT's comments).
+#
+# On OpenVMS 7.1 system, the DEC C 5.5 compiler when called through a
+# GNV (gnv.sourceforge.net) cc wrapper, produces the output file named
+# `a_out.exe'.
+# b.out is created by i960 compilers.
+#
+# Start with the most likely output file names, but:
+# 1) Beware the clever `test -f' on Cygwin, try the DOS-like .exe names
+# before the counterparts without the extension.
+# 2) The algorithm is not robust to junk in `.', hence go to wildcards
+# (conftest.*) only as a last resort.
+# Beware of `expr' that may return `0' or `'.  Since this macro is
+# the first one in touch with the compiler, it should also check that
+# it compiles properly.
+#
+# The IRIX 6 linker writes into existing files which may not be
+# executable, retaining their permissions.  Remove them first so a
+# subsequent execution test works.
+#
+m4_define([_AC_COMPILER_EXEEXT_DEFAULT],
+[# Try to create an executable without -o first, disregard a.out.
+# It will help us diagnose broken compilers, and finding out an intuition
+# of exeext.
+AC_MSG_CHECKING([whether the _AC_LANG compiler works])
+ac_link_default=`AS_ECHO(["$ac_link"]) | sed ['s/ -o *conftest[^ ]*//']`
+
+# The possible output files:
+ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
+
+ac_rmfiles=
+for ac_file in $ac_files
+do
+  case $ac_file in
+    _AC_COMPILER_EXEEXT_REJECT ) ;;
+    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
+  esac
+done
+rm -f $ac_rmfiles
+
+AS_IF([_AC_DO_VAR(ac_link_default)],
+[# Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
+# in a Makefile.  We should not override ac_cv_exeext if it was cached,
+# so that the user can short-circuit this test for compilers unknown to
+# Autoconf.
+for ac_file in $ac_files ''
+do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    _AC_COMPILER_EXEEXT_REJECT )
+	;;
+    [[ab]].out )
+	# We found the default executable, but exeext='' is most
+	# certainly right.
+	break;;
+    *.* )
+	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
+	then :; else
+	   ac_cv_exeext=`expr "$ac_file" : ['[^.]*\(\..*\)']`
+	fi
+	# We set ac_cv_exeext here because the later test for it is not
+	# safe: cross compilers may not add the suffix if given an `-o'
+	# argument, so we may need to know it at that point already.
+	# Even if this section looks crufty: it has the advantage of
+	# actually working.
+	break;;
+    * )
+	break;;
+  esac
+done
+test "$ac_cv_exeext" = no && ac_cv_exeext=
+],
+      [ac_file=''])
+AS_IF([test -z "$ac_file"],
+[AC_MSG_RESULT([no])
+_AC_MSG_LOG_CONFTEST
+AC_MSG_FAILURE([_AC_LANG compiler cannot create executables], 77)],
+[AC_MSG_RESULT([yes])])
+AC_MSG_CHECKING([for _AC_LANG compiler default output file name])
+AC_MSG_RESULT([$ac_file])
+ac_exeext=$ac_cv_exeext
+])# _AC_COMPILER_EXEEXT_DEFAULT
+
+
+# _AC_COMPILER_EXEEXT_CROSS
+# -------------------------
+# FIXME: These cross compiler hacks should be removed for Autoconf 3.0
+#
+# It is not sufficient to run a no-op program -- this succeeds and gives
+# a false negative when cross-compiling for the compute nodes on the
+# IBM Blue Gene/L.  Instead, _AC_COMPILER_EXEEXT calls _AC_LANG_IO_PROGRAM
+# to create a program that writes to a file, which is sufficient to
+# detect cross-compiling on Blue Gene.  Note also that AC_COMPUTE_INT
+# requires programs that create files when not cross-compiling, so it
+# is safe and not a bad idea to check for this capability in general.
+m4_define([_AC_COMPILER_EXEEXT_CROSS],
+[# Check that the compiler produces executables we can run.  If not, either
+# the compiler is broken, or we cross compile.
+AC_MSG_CHECKING([whether we are cross compiling])
+if test "$cross_compiling" != yes; then
+  _AC_DO_VAR(ac_link)
+  if _AC_DO_TOKENS([./conftest$ac_cv_exeext]); then
+    cross_compiling=no
+  else
+    if test "$cross_compiling" = maybe; then
+	cross_compiling=yes
+    else
+	AC_MSG_FAILURE([cannot run _AC_LANG compiled programs.
+If you meant to cross compile, use `--host'.])
+    fi
+  fi
+fi
+AC_MSG_RESULT([$cross_compiling])
+])# _AC_COMPILER_EXEEXT_CROSS
+
+
+# _AC_COMPILER_EXEEXT_O
+# ---------------------
+# Check for the extension used when `-o foo'.  Try to see if ac_cv_exeext,
+# as computed by _AC_COMPILER_EXEEXT_DEFAULT is OK.
+m4_define([_AC_COMPILER_EXEEXT_O],
+[AC_MSG_CHECKING([for suffix of executables])
+AS_IF([_AC_DO_VAR(ac_link)],
+[# If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
+for ac_file in conftest.exe conftest conftest.*; do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    _AC_COMPILER_EXEEXT_REJECT ) ;;
+    *.* ) ac_cv_exeext=`expr "$ac_file" : ['[^.]*\(\..*\)']`
+	  break;;
+    * ) break;;
+  esac
+done],
+	      [AC_MSG_FAILURE([cannot compute suffix of executables: cannot compile and link])])
+rm -f conftest conftest$ac_cv_exeext
+AC_MSG_RESULT([$ac_cv_exeext])
+])# _AC_COMPILER_EXEEXT_O
+
+
+# _AC_COMPILER_EXEEXT
+# -------------------
+# Check for the extension used for executables.  It compiles a test
+# executable.  If this is called, the executable extensions will be
+# automatically used by link commands run by the configure script.
+#
+# Note that some compilers (cross or not), strictly obey to `-o foo' while
+# the host requires `foo.exe', so we should not depend upon `-o' to
+# test EXEEXT.  But then, be sure not to destroy user files.
+#
+# Must be run before _AC_COMPILER_OBJEXT because _AC_COMPILER_EXEEXT_DEFAULT
+# checks whether the compiler works.
+#
+# Do not rename this macro; Automake decides whether EXEEXT is used
+# by checking whether `_AC_COMPILER_EXEEXT' has been expanded.
+#
+# See _AC_COMPILER_EXEEXT_CROSS for why we need _AC_LANG_IO_PROGRAM.
+m4_define([_AC_COMPILER_EXEEXT],
+[AC_LANG_CONFTEST([_AC_LANG_NULL_PROGRAM])
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
+_AC_COMPILER_EXEEXT_DEFAULT
+rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
+ac_clean_files=$ac_clean_files_save
+_AC_COMPILER_EXEEXT_O
+rm -f conftest.$ac_ext
+AC_SUBST([EXEEXT], [$ac_cv_exeext])dnl
+ac_exeext=$EXEEXT
+AC_LANG_CONFTEST([_AC_LANG_IO_PROGRAM])
+ac_clean_files="$ac_clean_files conftest.out"
+_AC_COMPILER_EXEEXT_CROSS
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
+ac_clean_files=$ac_clean_files_save
+])# _AC_COMPILER_EXEEXT
+
+
+# _AC_COMPILER_OBJEXT
+# -------------------
+# Check the object extension used by the compiler: typically `.o' or
+# `.obj'.  If this is called, some other behavior will change,
+# determined by ac_objext.
+#
+# This macro is called by AC_LANG_COMPILER, the latter being required
+# by the AC_COMPILE_IFELSE macros, so use _AC_COMPILE_IFELSE.  And in fact,
+# don't, since _AC_COMPILE_IFELSE needs to know ac_objext for the `test -s'
+# it includes.  So do it by hand.
+m4_define([_AC_COMPILER_OBJEXT],
+[AC_CACHE_CHECK([for suffix of object files], ac_cv_objext,
+[AC_LANG_CONFTEST([_AC_LANG_NULL_PROGRAM])
+rm -f conftest.o conftest.obj
+AS_IF([_AC_DO_VAR(ac_compile)],
+[for ac_file in conftest.o conftest.obj conftest.*; do
+  test -f "$ac_file" || continue;
+  case $ac_file in
+    _AC_COMPILER_OBJEXT_REJECT ) ;;
+    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
+       break;;
+  esac
+done],
+      [_AC_MSG_LOG_CONFTEST
+AC_MSG_FAILURE([cannot compute suffix of object files: cannot compile])])
+rm -f conftest.$ac_cv_objext conftest.$ac_ext])
+AC_SUBST([OBJEXT], [$ac_cv_objext])dnl
+ac_objext=$OBJEXT
+])# _AC_COMPILER_OBJEXT
+
+
+
+
+## ------------------------------- ##
+## 4. Compilers' characteristics.  ##
+## ------------------------------- ##
+
+# AC_LANG_WERROR
+# --------------
+# Treat warnings from the current language's preprocessor, compiler, and
+# linker as fatal errors.
+AC_DEFUN([AC_LANG_WERROR],
+[m4_divert_text([DEFAULTS], [ac_[]_AC_LANG_ABBREV[]_werror_flag=])
+ac_[]_AC_LANG_ABBREV[]_werror_flag=yes])# AC_LANG_WERROR
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/libs.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/libs.m4
new file mode 100644
index 0000000..ec961aa
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/libs.m4
@@ -0,0 +1,479 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Checking for libraries.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+# Table of contents
+#
+# 1. Generic tests for libraries
+# 2. Tests for specific libraries
+
+
+## --------------------------------- ##
+## 1. Generic tests for libraries.## ##
+## --------------------------------- ##
+
+
+
+# AC_SEARCH_LIBS(FUNCTION, SEARCH-LIBS,
+#                [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#                [OTHER-LIBRARIES])
+# --------------------------------------------------------
+# Search for a library defining FUNC, if it's not already available.
+AC_DEFUN([AC_SEARCH_LIBS],
+[AS_VAR_PUSHDEF([ac_Search], [ac_cv_search_$1])dnl
+AC_CACHE_CHECK([for library containing $1], [ac_Search],
+[ac_func_search_save_LIBS=$LIBS
+AC_LANG_CONFTEST([AC_LANG_CALL([], [$1])])
+for ac_lib in '' $2; do
+  if test -z "$ac_lib"; then
+    ac_res="none required"
+  else
+    ac_res=-l$ac_lib
+    LIBS="-l$ac_lib $5 $ac_func_search_save_LIBS"
+  fi
+  AC_LINK_IFELSE([], [AS_VAR_SET([ac_Search], [$ac_res])])
+  AS_VAR_SET_IF([ac_Search], [break])
+done
+AS_VAR_SET_IF([ac_Search], , [AS_VAR_SET([ac_Search], [no])])
+rm conftest.$ac_ext
+LIBS=$ac_func_search_save_LIBS])
+AS_VAR_COPY([ac_res], [ac_Search])
+AS_IF([test "$ac_res" != no],
+  [test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
+  $3],
+      [$4])
+AS_VAR_POPDEF([ac_Search])dnl
+])
+
+
+
+# AC_CHECK_LIB(LIBRARY, FUNCTION,
+#              [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#              [OTHER-LIBRARIES])
+# ------------------------------------------------------
+#
+# Use a cache variable name containing both the library and function name,
+# because the test really is for library $1 defining function $2, not
+# just for library $1.  Separate tests with the same $1 and different $2s
+# may have different results.
+#
+# Note that using directly AS_VAR_PUSHDEF([ac_Lib], [ac_cv_lib_$1_$2])
+# is asking for troubles, since AC_CHECK_LIB($lib, fun) would give
+# ac_cv_lib_$lib_fun, which is definitely not what was meant.  Hence
+# the AS_LITERAL_IF indirection.
+#
+# FIXME: This macro is extremely suspicious.  It DEFINEs unconditionally,
+# whatever the FUNCTION, in addition to not being a *S macro.  Note
+# that the cache does depend upon the function we are looking for.
+#
+# It is on purpose we used `ac_check_lib_save_LIBS' and not just
+# `ac_save_LIBS': there are many macros which don't want to see `LIBS'
+# changed but still want to use AC_CHECK_LIB, so they save `LIBS'.
+# And ``ac_save_LIBS' is too tempting a name, so let's leave them some
+# freedom.
+AC_DEFUN([AC_CHECK_LIB],
+[m4_ifval([$3], , [AH_CHECK_LIB([$1])])dnl
+AS_LITERAL_WORD_IF([$1],
+	      [AS_VAR_PUSHDEF([ac_Lib], [ac_cv_lib_$1_$2])],
+	      [AS_VAR_PUSHDEF([ac_Lib], [ac_cv_lib_$1''_$2])])dnl
+AC_CACHE_CHECK([for $2 in -l$1], [ac_Lib],
+[ac_check_lib_save_LIBS=$LIBS
+LIBS="-l$1 $5 $LIBS"
+AC_LINK_IFELSE([AC_LANG_CALL([], [$2])],
+	       [AS_VAR_SET([ac_Lib], [yes])],
+	       [AS_VAR_SET([ac_Lib], [no])])
+LIBS=$ac_check_lib_save_LIBS])
+AS_VAR_IF([ac_Lib], [yes],
+      [m4_default([$3], [AC_DEFINE_UNQUOTED(AS_TR_CPP(HAVE_LIB$1))
+  LIBS="-l$1 $LIBS"
+])],
+      [$4])
+AS_VAR_POPDEF([ac_Lib])dnl
+])# AC_CHECK_LIB
+
+
+# AH_CHECK_LIB(LIBNAME)
+# ---------------------
+m4_define([AH_CHECK_LIB],
+[AH_TEMPLATE(AS_TR_CPP([HAVE_LIB$1]),
+	     [Define to 1 if you have the `$1' library (-l$1).])])
+
+
+# AC_HAVE_LIBRARY(LIBRARY,
+#                 [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#                 [OTHER-LIBRARIES])
+# ---------------------------------------------------------
+#
+# This macro is equivalent to calling `AC_CHECK_LIB' with a FUNCTION
+# argument of `main'.  In addition, LIBRARY can be written as any of
+# `foo', `-lfoo', or `libfoo.a'.  In all of those cases, the compiler
+# is passed `-lfoo'.  However, LIBRARY cannot be a shell variable;
+# it must be a literal name.
+AU_DEFUN([AC_HAVE_LIBRARY],
+[m4_pushdef([AC_Lib_Name],
+	    m4_bpatsubst(m4_bpatsubst([[$1]],
+				    [lib\([^\.]*\)\.a], [\1]),
+			[-l], []))dnl
+AC_CHECK_LIB(AC_Lib_Name, main, [$2], [$3], [$4])dnl
+ac_cv_lib_[]AC_Lib_Name()=ac_cv_lib_[]AC_Lib_Name()_main
+m4_popdef([AC_Lib_Name])dnl
+])
+
+
+
+
+## --------------------------------- ##
+## 2. Tests for specific libraries.  ##
+## --------------------------------- ##
+
+
+
+# --------------------- #
+# Checks for X window.  #
+# --------------------- #
+
+
+# _AC_PATH_X_XMKMF
+# ----------------
+# Internal subroutine of _AC_PATH_X.
+# Set ac_x_includes and/or ac_x_libraries.
+m4_define([_AC_PATH_X_XMKMF],
+[AC_ARG_VAR(XMKMF, [Path to xmkmf, Makefile generator for X Window System])dnl
+rm -f -r conftest.dir
+if mkdir conftest.dir; then
+  cd conftest.dir
+  cat >Imakefile <<'_ACEOF'
+incroot:
+	@echo incroot='${INCROOT}'
+usrlibdir:
+	@echo usrlibdir='${USRLIBDIR}'
+libdir:
+	@echo libdir='${LIBDIR}'
+_ACEOF
+  if (export CC; ${XMKMF-xmkmf}) >/dev/null 2>/dev/null && test -f Makefile; then
+    # GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
+    for ac_var in incroot usrlibdir libdir; do
+      eval "ac_im_$ac_var=\`\${MAKE-make} $ac_var 2>/dev/null | sed -n 's/^$ac_var=//p'\`"
+    done
+    # Open Windows xmkmf reportedly sets LIBDIR instead of USRLIBDIR.
+    for ac_extension in a so sl dylib la dll; do
+      if test ! -f "$ac_im_usrlibdir/libX11.$ac_extension" &&
+	 test -f "$ac_im_libdir/libX11.$ac_extension"; then
+	ac_im_usrlibdir=$ac_im_libdir; break
+      fi
+    done
+    # Screen out bogus values from the imake configuration.  They are
+    # bogus both because they are the default anyway, and because
+    # using them would break gcc on systems where it needs fixed includes.
+    case $ac_im_incroot in
+	/usr/include) ac_x_includes= ;;
+	*) test -f "$ac_im_incroot/X11/Xos.h" && ac_x_includes=$ac_im_incroot;;
+    esac
+    case $ac_im_usrlibdir in
+	/usr/lib | /usr/lib64 | /lib | /lib64) ;;
+	*) test -d "$ac_im_usrlibdir" && ac_x_libraries=$ac_im_usrlibdir ;;
+    esac
+  fi
+  cd ..
+  rm -f -r conftest.dir
+fi
+])# _AC_PATH_X_XMKMF
+
+
+# _AC_PATH_X_DIRECT
+# -----------------
+# Internal subroutine of _AC_PATH_X.
+# Set ac_x_includes and/or ac_x_libraries.
+m4_define([_AC_PATH_X_DIRECT],
+[# Standard set of common directories for X headers.
+# Check X11 before X11Rn because it is often a symlink to the current release.
+ac_x_header_dirs='
+/usr/X11/include
+/usr/X11R7/include
+/usr/X11R6/include
+/usr/X11R5/include
+/usr/X11R4/include
+
+/usr/include/X11
+/usr/include/X11R7
+/usr/include/X11R6
+/usr/include/X11R5
+/usr/include/X11R4
+
+/usr/local/X11/include
+/usr/local/X11R7/include
+/usr/local/X11R6/include
+/usr/local/X11R5/include
+/usr/local/X11R4/include
+
+/usr/local/include/X11
+/usr/local/include/X11R7
+/usr/local/include/X11R6
+/usr/local/include/X11R5
+/usr/local/include/X11R4
+
+/usr/X386/include
+/usr/x386/include
+/usr/XFree86/include/X11
+
+/usr/include
+/usr/local/include
+/usr/unsupported/include
+/usr/athena/include
+/usr/local/x11r5/include
+/usr/lpp/Xamples/include
+
+/usr/openwin/include
+/usr/openwin/share/include'
+
+if test "$ac_x_includes" = no; then
+  # Guess where to find include files, by looking for Xlib.h.
+  # First, try using that file with no special directory specified.
+  AC_PREPROC_IFELSE([AC_LANG_SOURCE([@%:@include <X11/Xlib.h>])],
+[# We can compile using X headers with no special include directory.
+ac_x_includes=],
+[for ac_dir in $ac_x_header_dirs; do
+  if test -r "$ac_dir/X11/Xlib.h"; then
+    ac_x_includes=$ac_dir
+    break
+  fi
+done])
+fi # $ac_x_includes = no
+
+if test "$ac_x_libraries" = no; then
+  # Check for the libraries.
+  # See if we find them without any special options.
+  # Don't add to $LIBS permanently.
+  ac_save_LIBS=$LIBS
+  LIBS="-lX11 $LIBS"
+  AC_LINK_IFELSE([AC_LANG_PROGRAM([@%:@include <X11/Xlib.h>],
+				  [XrmInitialize ()])],
+		 [LIBS=$ac_save_LIBS
+# We can link X programs with no special library path.
+ac_x_libraries=],
+		 [LIBS=$ac_save_LIBS
+for ac_dir in `AS_ECHO(["$ac_x_includes $ac_x_header_dirs"]) | sed s/include/lib/g`
+do
+  # Don't even attempt the hair of trying to link an X program!
+  for ac_extension in a so sl dylib la dll; do
+    if test -r "$ac_dir/libX11.$ac_extension"; then
+      ac_x_libraries=$ac_dir
+      break 2
+    fi
+  done
+done])
+fi # $ac_x_libraries = no
+])# _AC_PATH_X_DIRECT
+
+
+# _AC_PATH_X
+# ----------
+# Compute ac_cv_have_x.
+AC_DEFUN([_AC_PATH_X],
+[AC_CACHE_VAL(ac_cv_have_x,
+[# One or both of the vars are not set, and there is no cached value.
+ac_x_includes=no ac_x_libraries=no
+_AC_PATH_X_XMKMF
+_AC_PATH_X_DIRECT
+case $ac_x_includes,$ac_x_libraries in #(
+  no,* | *,no | *\'*)
+    # Didn't find X, or a directory has "'" in its name.
+    ac_cv_have_x="have_x=no";; #(
+  *)
+    # Record where we found X for the cache.
+    ac_cv_have_x="have_x=yes\
+	ac_x_includes='$ac_x_includes'\
+	ac_x_libraries='$ac_x_libraries'"
+esac])dnl
+])
+
+
+# AC_PATH_X
+# ---------
+# If we find X, set shell vars x_includes and x_libraries to the
+# paths, otherwise set no_x=yes.
+# Uses ac_ vars as temps to allow command line to override cache and checks.
+# --without-x overrides everything else, but does not touch the cache.
+AN_HEADER([X11/Xlib.h],  [AC_PATH_X])
+AC_DEFUN([AC_PATH_X],
+[dnl Document the X abnormal options inherited from history.
+m4_divert_once([HELP_BEGIN], [
+X features:
+  --x-includes=DIR    X include files are in DIR
+  --x-libraries=DIR   X library files are in DIR])dnl
+AC_MSG_CHECKING([for X])
+
+AC_ARG_WITH(x, [  --with-x                use the X Window System])
+# $have_x is `yes', `no', `disabled', or empty when we do not yet know.
+if test "x$with_x" = xno; then
+  # The user explicitly disabled X.
+  have_x=disabled
+else
+  case $x_includes,$x_libraries in #(
+    *\'*) AC_MSG_ERROR([cannot use X directory names containing ']);; #(
+    *,NONE | NONE,*) _AC_PATH_X;; #(
+    *) have_x=yes;;
+  esac
+  eval "$ac_cv_have_x"
+fi # $with_x != no
+
+if test "$have_x" != yes; then
+  AC_MSG_RESULT([$have_x])
+  no_x=yes
+else
+  # If each of the values was on the command line, it overrides each guess.
+  test "x$x_includes" = xNONE && x_includes=$ac_x_includes
+  test "x$x_libraries" = xNONE && x_libraries=$ac_x_libraries
+  # Update the cache value to reflect the command line values.
+  ac_cv_have_x="have_x=yes\
+	ac_x_includes='$x_includes'\
+	ac_x_libraries='$x_libraries'"
+  AC_MSG_RESULT([libraries $x_libraries, headers $x_includes])
+fi
+])# AC_PATH_X
+
+
+
+# AC_PATH_XTRA
+# ------------
+# Find additional X libraries, magic flags, etc.
+AC_DEFUN([AC_PATH_XTRA],
+[AC_REQUIRE([AC_PATH_X])dnl
+if test "$no_x" = yes; then
+  # Not all programs may use this symbol, but it does not hurt to define it.
+  AC_DEFINE([X_DISPLAY_MISSING], 1,
+	    [Define to 1 if the X Window System is missing or not being used.])
+  X_CFLAGS= X_PRE_LIBS= X_LIBS= X_EXTRA_LIBS=
+else
+  if test -n "$x_includes"; then
+    X_CFLAGS="$X_CFLAGS -I$x_includes"
+  fi
+
+  # It would also be nice to do this for all -L options, not just this one.
+  if test -n "$x_libraries"; then
+    X_LIBS="$X_LIBS -L$x_libraries"
+    # For Solaris; some versions of Sun CC require a space after -R and
+    # others require no space.  Words are not sufficient . . . .
+    AC_MSG_CHECKING([whether -R must be followed by a space])
+    ac_xsave_LIBS=$LIBS; LIBS="$LIBS -R$x_libraries"
+    ac_xsave_[]_AC_LANG_ABBREV[]_werror_flag=$ac_[]_AC_LANG_ABBREV[]_werror_flag
+    ac_[]_AC_LANG_ABBREV[]_werror_flag=yes
+    AC_LINK_IFELSE([AC_LANG_PROGRAM()],
+      [AC_MSG_RESULT([no])
+       X_LIBS="$X_LIBS -R$x_libraries"],
+      [LIBS="$ac_xsave_LIBS -R $x_libraries"
+       AC_LINK_IFELSE([AC_LANG_PROGRAM()],
+	 [AC_MSG_RESULT([yes])
+	  X_LIBS="$X_LIBS -R $x_libraries"],
+	 [AC_MSG_RESULT([neither works])])])
+    ac_[]_AC_LANG_ABBREV[]_werror_flag=$ac_xsave_[]_AC_LANG_ABBREV[]_werror_flag
+    LIBS=$ac_xsave_LIBS
+  fi
+
+  # Check for system-dependent libraries X programs must link with.
+  # Do this before checking for the system-independent R6 libraries
+  # (-lICE), since we may need -lsocket or whatever for X linking.
+
+  if test "$ISC" = yes; then
+    X_EXTRA_LIBS="$X_EXTRA_LIBS -lnsl_s -linet"
+  else
+    # Martyn Johnson says this is needed for Ultrix, if the X
+    # libraries were built with DECnet support.  And Karl Berry says
+    # the Alpha needs dnet_stub (dnet does not exist).
+    ac_xsave_LIBS="$LIBS"; LIBS="$LIBS $X_LIBS -lX11"
+    AC_LINK_IFELSE([AC_LANG_CALL([], [XOpenDisplay])],
+		   [],
+    [AC_CHECK_LIB(dnet, dnet_ntoa, [X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet"])
+    if test $ac_cv_lib_dnet_dnet_ntoa = no; then
+      AC_CHECK_LIB(dnet_stub, dnet_ntoa,
+	[X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet_stub"])
+    fi])
+    LIBS="$ac_xsave_LIBS"
+
+    # msh@cis.ufl.edu says -lnsl (and -lsocket) are needed for his 386/AT,
+    # to get the SysV transport functions.
+    # Chad R. Larson says the Pyramis MIS-ES running DC/OSx (SVR4)
+    # needs -lnsl.
+    # The nsl library prevents programs from opening the X display
+    # on Irix 5.2, according to T.E. Dickey.
+    # The functions gethostbyname, getservbyname, and inet_addr are
+    # in -lbsd on LynxOS 3.0.1/i386, according to Lars Hecking.
+    AC_CHECK_FUNC(gethostbyname)
+    if test $ac_cv_func_gethostbyname = no; then
+      AC_CHECK_LIB(nsl, gethostbyname, X_EXTRA_LIBS="$X_EXTRA_LIBS -lnsl")
+      if test $ac_cv_lib_nsl_gethostbyname = no; then
+	AC_CHECK_LIB(bsd, gethostbyname, X_EXTRA_LIBS="$X_EXTRA_LIBS -lbsd")
+      fi
+    fi
+
+    # lieder@skyler.mavd.honeywell.com says without -lsocket,
+    # socket/setsockopt and other routines are undefined under SCO ODT
+    # 2.0.  But -lsocket is broken on IRIX 5.2 (and is not necessary
+    # on later versions), says Simon Leinen: it contains gethostby*
+    # variants that don't use the name server (or something).  -lsocket
+    # must be given before -lnsl if both are needed.  We assume that
+    # if connect needs -lnsl, so does gethostbyname.
+    AC_CHECK_FUNC(connect)
+    if test $ac_cv_func_connect = no; then
+      AC_CHECK_LIB(socket, connect, X_EXTRA_LIBS="-lsocket $X_EXTRA_LIBS", ,
+	$X_EXTRA_LIBS)
+    fi
+
+    # Guillermo Gomez says -lposix is necessary on A/UX.
+    AC_CHECK_FUNC(remove)
+    if test $ac_cv_func_remove = no; then
+      AC_CHECK_LIB(posix, remove, X_EXTRA_LIBS="$X_EXTRA_LIBS -lposix")
+    fi
+
+    # BSDI BSD/OS 2.1 needs -lipc for XOpenDisplay.
+    AC_CHECK_FUNC(shmat)
+    if test $ac_cv_func_shmat = no; then
+      AC_CHECK_LIB(ipc, shmat, X_EXTRA_LIBS="$X_EXTRA_LIBS -lipc")
+    fi
+  fi
+
+  # Check for libraries that X11R6 Xt/Xaw programs need.
+  ac_save_LDFLAGS=$LDFLAGS
+  test -n "$x_libraries" && LDFLAGS="$LDFLAGS -L$x_libraries"
+  # SM needs ICE to (dynamically) link under SunOS 4.x (so we have to
+  # check for ICE first), but we must link in the order -lSM -lICE or
+  # we get undefined symbols.  So assume we have SM if we have ICE.
+  # These have to be linked with before -lX11, unlike the other
+  # libraries we check for below, so use a different variable.
+  # John Interrante, Karl Berry
+  AC_CHECK_LIB(ICE, IceConnectionNumber,
+    [X_PRE_LIBS="$X_PRE_LIBS -lSM -lICE"], , $X_EXTRA_LIBS)
+  LDFLAGS=$ac_save_LDFLAGS
+
+fi
+AC_SUBST(X_CFLAGS)dnl
+AC_SUBST(X_PRE_LIBS)dnl
+AC_SUBST(X_LIBS)dnl
+AC_SUBST(X_EXTRA_LIBS)dnl
+])# AC_PATH_XTRA
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/oldnames.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/oldnames.m4
new file mode 100644
index 0000000..1228b15
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/oldnames.m4
@@ -0,0 +1,92 @@
+# This file is part of Autoconf.                           -*- Autoconf -*-
+# Support old macros, and provide automated updates.
+# Copyright (C) 1994, 1999, 2000, 2001, 2003, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David J. MacKenzie.
+
+
+## ---------------------------- ##
+## General macros of Autoconf.  ##
+## ---------------------------- ##
+
+AU_ALIAS([AC_WARN],		[AC_MSG_WARN])
+AU_ALIAS([AC_ERROR],		[AC_MSG_ERROR])
+AU_ALIAS([AC_HAVE_HEADERS],	[AC_CHECK_HEADERS])
+AU_ALIAS([AC_HEADER_CHECK],	[AC_CHECK_HEADER])
+AU_ALIAS([AC_HEADER_EGREP],	[AC_EGREP_HEADER])
+AU_ALIAS([AC_PREFIX],		[AC_PREFIX_PROGRAM])
+AU_ALIAS([AC_PROGRAMS_CHECK],	[AC_CHECK_PROGS])
+AU_ALIAS([AC_PROGRAMS_PATH],	[AC_PATH_PROGS])
+AU_ALIAS([AC_PROGRAM_CHECK],	[AC_CHECK_PROG])
+AU_ALIAS([AC_PROGRAM_EGREP],	[AC_EGREP_CPP])
+AU_ALIAS([AC_PROGRAM_PATH],	[AC_PATH_PROG])
+AU_ALIAS([AC_SIZEOF_TYPE],	[AC_CHECK_SIZEOF])
+AU_ALIAS([AC_TEST_CPP],		[AC_TRY_CPP])
+AU_ALIAS([AC_TEST_PROGRAM],	[AC_TRY_RUN])
+
+
+
+## ----------------------------- ##
+## Specific macros of Autoconf.  ##
+## ----------------------------- ##
+
+AU_ALIAS([AC_CHAR_UNSIGNED],	[AC_C_CHAR_UNSIGNED])
+AU_ALIAS([AC_CONST],		[AC_C_CONST])
+AU_ALIAS([AC_CROSS_CHECK],	[AC_C_CROSS])
+AU_ALIAS([AC_FIND_X],		[AC_PATH_X])
+AU_ALIAS([AC_FIND_XTRA],	[AC_PATH_XTRA])
+AU_ALIAS([AC_GCC_TRADITIONAL],	[AC_PROG_GCC_TRADITIONAL])
+AU_ALIAS([AC_GETGROUPS_T],	[AC_TYPE_GETGROUPS])
+AU_ALIAS([AC_INLINE],		[AC_C_INLINE])
+AU_ALIAS([AC_LN_S],		[AC_PROG_LN_S])
+AU_ALIAS([AC_LONG_DOUBLE],	[AC_C_LONG_DOUBLE])
+AU_ALIAS([AC_LONG_FILE_NAMES],	[AC_SYS_LONG_FILE_NAMES])
+AU_ALIAS([AC_MAJOR_HEADER],	[AC_HEADER_MAJOR])
+AU_ALIAS([AC_MINUS_C_MINUS_O],	[AC_PROG_CC_C_O])
+AU_ALIAS([AC_MODE_T],		[AC_TYPE_MODE_T])
+AU_ALIAS([AC_OFF_T],		[AC_TYPE_OFF_T])
+AU_ALIAS([AC_PID_T],		[AC_TYPE_PID_T])
+AU_ALIAS([AC_RESTARTABLE_SYSCALLS],		[AC_SYS_RESTARTABLE_SYSCALLS])
+AU_ALIAS([AC_RETSIGTYPE],	[AC_TYPE_SIGNAL])
+AU_ALIAS([AC_SET_MAKE],		[AC_PROG_MAKE_SET])
+AU_ALIAS([AC_SIZE_T],		[AC_TYPE_SIZE_T])
+AU_ALIAS([AC_STAT_MACROS_BROKEN],		[AC_HEADER_STAT])
+AU_ALIAS([AC_STDC_HEADERS],	[AC_HEADER_STDC])
+AU_ALIAS([AC_ST_BLKSIZE],	[AC_STRUCT_ST_BLKSIZE])
+AU_ALIAS([AC_ST_BLOCKS],	[AC_STRUCT_ST_BLOCKS])
+AU_ALIAS([AC_ST_RDEV],		[AC_STRUCT_ST_RDEV])
+AU_ALIAS([AC_SYS_SIGLIST_DECLARED],		[AC_DECL_SYS_SIGLIST])
+AU_ALIAS([AC_TIMEZONE],		[AC_STRUCT_TIMEZONE])
+AU_ALIAS([AC_TIME_WITH_SYS_TIME],		[AC_HEADER_TIME])
+AU_ALIAS([AC_UID_T],		[AC_TYPE_UID_T])
+AU_ALIAS([AC_WORDS_BIGENDIAN],	[AC_C_BIGENDIAN])
+AU_ALIAS([AC_YYTEXT_POINTER],	[AC_DECL_YYTEXT])
+AU_ALIAS([AM_CYGWIN32],		[AC_CYGWIN32])
+AU_ALIAS([AC_CYGWIN32],         [AC_CYGWIN])
+AU_ALIAS([AM_EXEEXT],		[AC_EXEEXT])
+# We cannot do this, because in libtool.m4 yet they provide
+# this update.  Some solution is needed.
+# AU_ALIAS([AM_PROG_LIBTOOL],		[AC_PROG_LIBTOOL])
+AU_ALIAS([AM_MINGW32],		[AC_MINGW32])
+AU_ALIAS([AM_PROG_INSTALL],	[AC_PROG_INSTALL])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/programs.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/programs.m4
new file mode 100644
index 0000000..c90d9b2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/programs.m4
@@ -0,0 +1,903 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Checking for programs.
+
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+## ----------------------------- ##
+## Generic checks for programs.  ##
+## ----------------------------- ##
+
+
+# _AC_CHECK_PROG(VARIABLE, PROG-TO-CHECK-FOR,
+#               [VALUE-IF-FOUND], [VALUE-IF-NOT-FOUND],
+#               [PATH], [REJECT])
+# -----------------------------------------------------
+AC_DEFUN([_AC_CHECK_PROG],
+[# Extract the first word of "$2", so it can be a program name with args.
+set dummy $2; ac_word=$[2]
+AC_MSG_CHECKING([for $ac_word])
+AC_CACHE_VAL(ac_cv_prog_$1,
+[if test -n "$$1"; then
+  ac_cv_prog_$1="$$1" # Let the user override the test.
+else
+m4_ifvaln([$6],
+[  ac_prog_rejected=no])dnl
+_AS_PATH_WALK([$5],
+[for ac_exec_ext in '' $ac_executable_extensions; do
+  if AS_EXECUTABLE_P(["$as_dir/$ac_word$ac_exec_ext"]); then
+m4_ifvaln([$6],
+[    if test "$as_dir/$ac_word$ac_exec_ext" = "$6"; then
+       ac_prog_rejected=yes
+       continue
+     fi])dnl
+    ac_cv_prog_$1="$3"
+    _AS_ECHO_LOG([found $as_dir/$ac_word$ac_exec_ext])
+    break 2
+  fi
+done])
+m4_ifvaln([$6],
+[if test $ac_prog_rejected = yes; then
+  # We found a bogon in the path, so make sure we never use it.
+  set dummy $ac_cv_prog_$1
+  shift
+  if test $[@%:@] != 0; then
+    # We chose a different compiler from the bogus one.
+    # However, it has the same basename, so the bogon will be chosen
+    # first if we set $1 to just the basename; use the full file name.
+    shift
+    ac_cv_prog_$1="$as_dir/$ac_word${1+' '}$[@]"
+m4_if([$2], [$4],
+[  else
+    # Default is a loser.
+    AC_MSG_ERROR([$1=$6 unacceptable, but no other $4 found in dnl
+m4_default([$5], [\$PATH])])
+])dnl
+  fi
+fi])dnl
+dnl If no 4th arg is given, leave the cache variable unset,
+dnl so AC_CHECK_PROGS will keep looking.
+m4_ifvaln([$4],
+[  test -z "$ac_cv_prog_$1" && ac_cv_prog_$1="$4"])dnl
+fi])dnl
+$1=$ac_cv_prog_$1
+if test -n "$$1"; then
+  AC_MSG_RESULT([$$1])
+else
+  AC_MSG_RESULT([no])
+fi
+])# _AC_CHECK_PROG
+
+
+# AC_CHECK_PROG(VARIABLE, PROG-TO-CHECK-FOR,
+#               [VALUE-IF-FOUND], [VALUE-IF-NOT-FOUND],
+#               [PATH], [REJECT])
+# -----------------------------------------------------
+AC_DEFUN([AC_CHECK_PROG],
+[_AC_CHECK_PROG($@)
+AC_SUBST([$1])dnl
+])
+
+
+# AC_CHECK_PROGS(VARIABLE, PROGS-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND],
+#                [PATH])
+# ------------------------------------------------------------------
+AC_DEFUN([AC_CHECK_PROGS],
+[for ac_prog in $2
+do
+  AC_CHECK_PROG([$1], [$ac_prog], [$ac_prog], , [$4])
+  test -n "$$1" && break
+done
+m4_ifvaln([$3], [test -n "$$1" || $1="$3"])])
+
+
+# _AC_PATH_PROG(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# ------------------------------------------------------------------------
+AC_DEFUN([_AC_PATH_PROG],
+[# Extract the first word of "$2", so it can be a program name with args.
+set dummy $2; ac_word=$[2]
+AC_MSG_CHECKING([for $ac_word])
+AC_CACHE_VAL([ac_cv_path_$1],
+[case $$1 in
+  [[\\/]]* | ?:[[\\/]]*)
+  ac_cv_path_$1="$$1" # Let the user override the test with a path.
+  ;;
+  *)
+  _AS_PATH_WALK([$4],
+[for ac_exec_ext in '' $ac_executable_extensions; do
+  if AS_EXECUTABLE_P(["$as_dir/$ac_word$ac_exec_ext"]); then
+    ac_cv_path_$1="$as_dir/$ac_word$ac_exec_ext"
+    _AS_ECHO_LOG([found $as_dir/$ac_word$ac_exec_ext])
+    break 2
+  fi
+done])
+dnl If no 3rd arg is given, leave the cache variable unset,
+dnl so AC_PATH_PROGS will keep looking.
+m4_ifvaln([$3],
+[  test -z "$ac_cv_path_$1" && ac_cv_path_$1="$3"])dnl
+  ;;
+esac])dnl
+$1=$ac_cv_path_$1
+if test -n "$$1"; then
+  AC_MSG_RESULT([$$1])
+else
+  AC_MSG_RESULT([no])
+fi
+])# _AC_PATH_PROG
+
+
+# AC_PATH_PROG(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# -----------------------------------------------------------------------
+AC_DEFUN([AC_PATH_PROG],
+[_AC_PATH_PROG($@)
+AC_SUBST([$1])dnl
+])
+
+
+# AC_PATH_PROGS(VARIABLE, PROGS-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND],
+#               [PATH])
+# -----------------------------------------------------------------
+AC_DEFUN([AC_PATH_PROGS],
+[for ac_prog in $2
+do
+  AC_PATH_PROG([$1], [$ac_prog], , [$4])
+  test -n "$$1" && break
+done
+m4_ifvaln([$3], [test -n "$$1" || $1="$3"])dnl
+])
+
+
+
+
+## -------------------------- ##
+## Generic checks for tools.  ##
+## -------------------------- ##
+
+
+# AC_CHECK_TOOL_PREFIX
+# --------------------
+AU_DEFUN([AC_CHECK_TOOL_PREFIX])
+
+
+# _AC_TOOL_WARN
+# -------------
+AC_DEFUN([_AC_TOOL_WARN],
+[case $cross_compiling:$ac_tool_warned in
+yes:)
+AC_MSG_WARN([using cross tools not prefixed with host triplet])
+ac_tool_warned=yes ;;
+esac])
+
+# AC_PATH_TOOL(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# -----------------------------------------------------------------------
+# (Use different variables $1 and ac_pt_$1 so that cache vars don't conflict.)
+AC_DEFUN([AC_PATH_TOOL],
+[if test -n "$ac_tool_prefix"; then
+  AC_PATH_PROG([$1], [${ac_tool_prefix}$2], , [$4])
+fi
+if test -z "$ac_cv_path_$1"; then
+  ac_pt_$1=$$1
+  _AC_PATH_PROG([ac_pt_$1], [$2], [], [$4])
+  if test "x$ac_pt_$1" = x; then
+    $1="$3"
+  else
+    _AC_TOOL_WARN
+    $1=$ac_pt_$1
+  fi
+else
+  $1="$ac_cv_path_$1"
+fi
+])# AC_PATH_TOOL
+
+
+# AC_CHECK_TOOL(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# ------------------------------------------------------------------------
+# (Use different variables $1 and ac_ct_$1 so that cache vars don't conflict.)
+AC_DEFUN([AC_CHECK_TOOL],
+[if test -n "$ac_tool_prefix"; then
+  AC_CHECK_PROG([$1], [${ac_tool_prefix}$2], [${ac_tool_prefix}$2], , [$4])
+fi
+if test -z "$ac_cv_prog_$1"; then
+  ac_ct_$1=$$1
+  _AC_CHECK_PROG([ac_ct_$1], [$2], [$2], [], [$4])
+  if test "x$ac_ct_$1" = x; then
+    $1="$3"
+  else
+    _AC_TOOL_WARN
+    $1=$ac_ct_$1
+  fi
+else
+  $1="$ac_cv_prog_$1"
+fi
+])# AC_CHECK_TOOL
+
+
+# AC_CHECK_TOOLS(VARIABLE, PROGS-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND],
+#                [PATH])
+# ------------------------------------------------------------------
+# Check for each tool in PROGS-TO-CHECK-FOR with the cross prefix. If
+# none can be found with a cross prefix, then use the first one that
+# was found without the cross prefix.
+AC_DEFUN([AC_CHECK_TOOLS],
+[if test -n "$ac_tool_prefix"; then
+  for ac_prog in $2
+  do
+    AC_CHECK_PROG([$1],
+		  [$ac_tool_prefix$ac_prog], [$ac_tool_prefix$ac_prog],,
+		  [$4])
+    test -n "$$1" && break
+  done
+fi
+if test -z "$$1"; then
+  ac_ct_$1=$$1
+  AC_CHECK_PROGS([ac_ct_$1], [$2], [], [$4])
+  if test "x$ac_ct_$1" = x; then
+    $1="$3"
+  else
+    _AC_TOOL_WARN
+    $1=$ac_ct_$1
+  fi
+fi
+])# AC_CHECK_TOOLS
+
+
+# AC_PATH_TARGET_TOOL(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# ------------------------------------------------------------------------------
+# (Use different variables $1 and ac_pt_$1 so that cache vars don't conflict.)
+AC_DEFUN([AC_PATH_TARGET_TOOL],
+[AC_REQUIRE([AC_CANONICAL_TARGET])dnl
+AC_PATH_PROG([$1], [$target_alias-$2], , [$4])
+if test -z "$ac_cv_path_$1"; then
+  if test "$build" = "$target"; then
+    ac_pt_$1=$$1
+    _AC_PATH_PROG([ac_pt_$1], [$2], [$3], [$4])
+    $1=$ac_pt_$1
+  else
+    $1="$3"
+  fi
+else
+  $1="$ac_cv_path_$1"
+fi
+])# AC_PATH_TARGET_TOOL
+
+
+# AC_CHECK_TARGET_TOOL(VARIABLE, PROG-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND], [PATH])
+# -------------------------------------------------------------------------------
+# (Use different variables $1 and ac_ct_$1 so that cache vars don't conflict.)
+AC_DEFUN([AC_CHECK_TARGET_TOOL],
+[AC_REQUIRE([AC_CANONICAL_TARGET])dnl
+AC_CHECK_PROG([$1], [$target_alias-$2], [$target_alias-$2], , [$4])
+if test -z "$ac_cv_prog_$1"; then
+  if test "$build" = "$target"; then
+    ac_ct_$1=$$1
+    _AC_CHECK_PROG([ac_ct_$1], [$2], [$2], [$3], [$4])
+    $1=$ac_ct_$1
+  else
+    $1="$3"
+  fi
+else
+  $1="$ac_cv_prog_$1"
+fi
+])# AC_CHECK_TARGET_TOOL
+
+
+# AC_CHECK_TARGET_TOOLS(VARIABLE, PROGS-TO-CHECK-FOR, [VALUE-IF-NOT-FOUND],
+#	                [PATH])
+# -------------------------------------------------------------------------
+# Check for each tool in PROGS-TO-CHECK-FOR with the cross prefix. If
+# none can be found with a cross prefix, then use the first one that
+# was found without the cross prefix.
+AC_DEFUN([AC_CHECK_TARGET_TOOLS],
+[AC_REQUIRE([AC_CANONICAL_TARGET])dnl
+for ac_prog in $2
+do
+  AC_CHECK_PROG([$1],
+		[$target_alias-$ac_prog], [$target_alias-$ac_prog],,
+		[$4])
+  test -n "$$1" && break
+done
+if test -z "$$1"; then
+  if test "$build" = "$target"; then
+    ac_ct_$1=$$1
+    AC_CHECK_PROGS([ac_ct_$1], [$2], [$3], [$4])
+    $1=$ac_ct_$1
+  else
+    $1="$3"
+  fi
+fi
+])# AC_CHECK_TARGET_TOOLS
+
+
+
+## ---------------- ##
+## Specific tests.  ##
+## ---------------- ##
+
+# Please, keep this section sorted.
+# (But of course when keeping related things together).
+
+# Check for gawk first since it's generally better.
+AN_MAKEVAR([AWK],  [AC_PROG_AWK])
+AN_PROGRAM([awk],  [AC_PROG_AWK])
+AN_PROGRAM([gawk], [AC_PROG_AWK])
+AN_PROGRAM([mawk], [AC_PROG_AWK])
+AN_PROGRAM([nawk], [AC_PROG_AWK])
+AC_DEFUN([AC_PROG_AWK],
+[AC_CHECK_PROGS(AWK, gawk mawk nawk awk, )])
+
+
+# AC_PROG_EGREP
+# -------------
+AC_DEFUN([AC_PROG_EGREP],
+[AC_REQUIRE([AC_PROG_GREP])dnl
+AC_CACHE_CHECK([for egrep], ac_cv_path_EGREP,
+   [if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
+   then ac_cv_path_EGREP="$GREP -E"
+   else
+     _AC_PROG_GREP(EGREP, egrep, ['EGREP$'])
+   fi])
+ EGREP="$ac_cv_path_EGREP"
+ AC_SUBST([EGREP])
+])# AC_PROG_EGREP
+
+
+# AC_PROG_FGREP
+# -------------
+AC_DEFUN([AC_PROG_FGREP],
+[AC_REQUIRE([AC_PROG_GREP])dnl
+AC_CACHE_CHECK([for fgrep], ac_cv_path_FGREP,
+   [if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
+   then ac_cv_path_FGREP="$GREP -F"
+   else
+     _AC_PROG_GREP(FGREP, fgrep, [FGREP])
+   fi])
+ FGREP="$ac_cv_path_FGREP"
+ AC_SUBST([FGREP])
+])# AC_PROG_FGREP
+
+
+# AC_PROG_GREP
+# ------------
+# Check for a fully functional grep program that handles
+# the longest lines possible and which respects multiple -e options.
+# Prefer GNU grep if found.
+AC_DEFUN([AC_PROG_GREP],
+[AC_CACHE_CHECK([for grep that handles long lines and -e], ac_cv_path_GREP,
+   [_$0(GREP, [grep ggrep], [-e 'GREP$' -e '-(cannot match)-'])])
+ GREP="$ac_cv_path_GREP"
+ AC_SUBST([GREP])
+])
+
+
+# _AC_PROG_GREP(VARIABLE, PROGNAME-LIST, PROG-ARGUMENTS)
+# ------------------------------------------------------
+# Solaris 9 /usr/xpg4/bin/*grep is suitable, but /usr/bin/*grep lacks -e.
+# AIX silently truncates long lines before matching.
+# NeXT understands only one -e and truncates long lines.
+m4_define([_AC_PROG_GREP],
+[_AC_PATH_PROGS_FEATURE_CHECK([$1], [$2],
+	[_AC_FEATURE_CHECK_LENGTH([ac_path_$1], [ac_cv_path_$1],
+		["$ac_path_$1" $3], [$1])], [],
+	[$PATH$PATH_SEPARATOR/usr/xpg4/bin])dnl
+])
+
+
+# _AC_PATH_PROGS_FEATURE_CHECK(VARIABLE, PROGNAME-LIST, FEATURE-TEST,
+#                              [ACTION-IF-NOT-FOUND], [PATH=$PATH])
+# -------------------------------------------------------------------
+# FEATURE-TEST is called repeatedly with $ac_path_VARIABLE set to the
+# name of a program in PROGNAME-LIST found in PATH.  FEATURE-TEST must set
+# $ac_cv_path_VARIABLE to the path of an acceptable program, or else
+# ACTION-IF-NOT-FOUND is executed; the default action (for internal use
+# only) issues a fatal error message.  If a suitable $ac_path_VARIABLE is
+# found in the FEATURE-TEST macro, it can set $ac_path_VARIABLE_found=':'
+# to accept that value without any further checks.
+m4_define([_AC_PATH_PROGS_FEATURE_CHECK],
+[if test -z "$$1"; then
+  ac_path_$1_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  _AS_PATH_WALK([$5],
+  [for ac_prog in $2; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_$1="$as_dir/$ac_prog$ac_exec_ext"
+      AS_EXECUTABLE_P(["$ac_path_$1"]) || continue
+$3
+      $ac_path_$1_found && break 3
+    done
+  done])dnl
+  if test -z "$ac_cv_path_$1"; then
+    m4_default([$4],
+      [AC_MSG_ERROR([no acceptable m4_bpatsubst([$2], [ .*]) could be dnl
+found in m4_default([$5], [\$PATH])])])
+  fi
+else
+  ac_cv_path_$1=$$1
+fi
+])
+
+
+# AC_PATH_PROGS_FEATURE_CHECK(VARIABLE, PROGNAME-LIST,
+#                             FEATURE-TEST, [ACTION-IF-NOT-FOUND=:],
+#                             [PATH=$PATH])
+# ------------------------------------------------------------------
+# Designed to be used inside AC_CACHE_VAL.  It is recommended,
+# but not required, that the user also use AC_ARG_VAR([VARIABLE]).
+# If VARIABLE is not empty, set the cache variable
+# $ac_cv_path_VARIABLE to VARIABLE without any further tests.
+# Otherwise, call FEATURE_TEST repeatedly with $ac_path_VARIABLE
+# set to the name of a program in PROGNAME-LIST found in PATH.  If
+# no invocation of FEATURE-TEST sets $ac_cv_path_VARIABLE to the
+# path of an acceptable program, ACTION-IF-NOT-FOUND is executed.
+# FEATURE-TEST is invoked even when $ac_cv_path_VARIABLE is set,
+# in case a better candidate occurs later in PATH; to accept the
+# current setting and bypass further checks, FEATURE-TEST can set
+# $ac_path_VARIABLE_found=':'.  Note that, unlike AC_CHECK_PROGS,
+# this macro does not have any side effect on the current value
+# of VARIABLE.
+m4_define([AC_PATH_PROGS_FEATURE_CHECK],
+[_$0([$1], [$2], [$3], m4_default([$4], [:]), [$5])dnl
+])
+
+
+# _AC_FEATURE_CHECK_LENGTH(PROGPATH, CACHE-VAR, CHECK-CMD, [MATCH-STRING])
+# ------------------------------------------------------------------------
+# For use as the FEATURE-TEST argument to _AC_PATH_PROGS_FEATURE_TEST.
+# On each iteration run CHECK-CMD on an input file, storing the value
+# of PROGPATH in CACHE-VAR if the CHECK-CMD succeeds.  The input file
+# is always one line, starting with only 10 characters, and doubling
+# in length at each iteration until approx 10000 characters or the
+# feature check succeeds.  The feature check is called at each
+# iteration by appending (optionally, MATCH-STRING and) a newline
+# to the file, and using the result as input to CHECK-CMD.
+m4_define([_AC_FEATURE_CHECK_LENGTH],
+[# Check for GNU $1 and select it if it is found.
+  _AC_PATH_PROG_FLAVOR_GNU([$$1],
+    [$2="$$1" $1_found=:],
+  [ac_count=0
+  AS_ECHO_N([0123456789]) >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    AS_ECHO(['$4']) >> "conftest.nl"
+    $3 < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    AS_VAR_ARITH([ac_count], [$ac_count + 1])
+    if test $ac_count -gt ${$1_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      $2="$$1"
+dnl   # Using $1_max so that each tool feature checked gets its
+dnl   # own variable.  Don't reset it otherwise the implied search
+dnl   # for best performing tool in a list breaks down.
+      $1_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out])dnl
+])
+
+
+# _AC_PATH_PROG_FLAVOR_GNU(PROGRAM-PATH, IF-SUCCESS, [IF-FAILURE])
+# ----------------------------------------------------------------
+m4_define([_AC_PATH_PROG_FLAVOR_GNU],
+[# Check for GNU $1
+case `"$1" --version 2>&1` in
+*GNU*)
+  $2;;
+m4_ifval([$3],
+[*)
+  $3;;
+])esac
+])# _AC_PATH_PROG_FLAVOR_GNU
+
+
+# AC_PROG_INSTALL
+# ---------------
+AN_MAKEVAR([INSTALL], [AC_PROG_INSTALL])
+AN_PROGRAM([install], [AC_PROG_INSTALL])
+AC_DEFUN_ONCE([AC_PROG_INSTALL],
+[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_REQUIRE_AUX_FILE([install-sh])dnl
+# Find a good install program.  We prefer a C program (faster),
+# so one script is as good as another.  But avoid the broken or
+# incompatible versions:
+# SysV /etc/install, /usr/sbin/install
+# SunOS /usr/etc/install
+# IRIX /sbin/install
+# AIX /bin/install
+# AmigaOS /C/install, which installs bootblocks on floppy discs
+# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
+# AFS /usr/afsws/bin/install, which mishandles nonexistent args
+# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
+# OS/2's system install, which has a completely different semantic
+# ./install, which can be erroneously created by make from ./install.sh.
+# Reject install programs that cannot install multiple files.
+AC_MSG_CHECKING([for a BSD-compatible install])
+if test -z "$INSTALL"; then
+AC_CACHE_VAL(ac_cv_path_install,
+[_AS_PATH_WALK([$PATH],
+[[# Account for people who put trailing slashes in PATH elements.
+case $as_dir/ in @%:@((
+  ./ | .// | /[cC]/* | \
+  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
+  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
+  /usr/ucb/* ) ;;
+  *)]
+    # OSF1 and SCO ODT 3.0 have their own names for install.
+    # Don't use installbsd from OSF since it installs stuff as root
+    # by default.
+    for ac_prog in ginstall scoinst install; do
+      for ac_exec_ext in '' $ac_executable_extensions; do
+	if AS_EXECUTABLE_P(["$as_dir/$ac_prog$ac_exec_ext"]); then
+	  if test $ac_prog = install &&
+	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # AIX install.  It has an incompatible calling convention.
+	    :
+	  elif test $ac_prog = install &&
+	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # program-specific install script used by HP pwplus--don't use.
+	    :
+	  else
+	    rm -rf conftest.one conftest.two conftest.dir
+	    echo one > conftest.one
+	    echo two > conftest.two
+	    mkdir conftest.dir
+	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
+	      test -s conftest.one && test -s conftest.two &&
+	      test -s conftest.dir/conftest.one &&
+	      test -s conftest.dir/conftest.two
+	    then
+	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
+	      break 3
+	    fi
+	  fi
+	fi
+      done
+    done
+    ;;
+esac
+])
+rm -rf conftest.one conftest.two conftest.dir
+])dnl
+  if test "${ac_cv_path_install+set}" = set; then
+    INSTALL=$ac_cv_path_install
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for INSTALL within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    INSTALL=$ac_install_sh
+  fi
+fi
+dnl Do special magic for INSTALL instead of AC_SUBST, to get
+dnl relative names right.
+AC_MSG_RESULT([$INSTALL])
+
+# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
+# It thinks the first close brace ends the variable substitution.
+test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
+AC_SUBST(INSTALL_PROGRAM)dnl
+
+test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
+AC_SUBST(INSTALL_SCRIPT)dnl
+
+test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
+AC_SUBST(INSTALL_DATA)dnl
+])# AC_PROG_INSTALL
+
+
+# AC_PROG_MKDIR_P
+# ---------------
+# Check whether `mkdir -p' is known to be thread-safe, and fall back to
+# install-sh -d otherwise.
+#
+# Automake 1.8 used `mkdir -m 0755 -p --' to ensure that directories
+# created by `make install' are always world readable, even if the
+# installer happens to have an overly restrictive umask (e.g. 077).
+# This was a mistake.  There are at least two reasons why we must not
+# use `-m 0755':
+#   - it causes special bits like SGID to be ignored,
+#   - it may be too restrictive (some setups expect 775 directories).
+#
+# Do not use -m 0755 and let people choose whatever they expect by
+# setting umask.
+#
+# We cannot accept any implementation of `mkdir' that recognizes `-p'.
+# Some implementations (such as Solaris 8's) are vulnerable to race conditions:
+# if a parallel make tries to run `mkdir -p a/b' and `mkdir -p a/c'
+# concurrently, both version can detect that a/ is missing, but only
+# one can create it and the other will error out.  Consequently we
+# restrict ourselves to known race-free implementations.
+#
+# Automake used to define mkdir_p as `mkdir -p .', in order to
+# allow $(mkdir_p) to be used without argument.  As in
+#   $(mkdir_p) $(somedir)
+# where $(somedir) is conditionally defined.  However we don't do
+# that for MKDIR_P.
+#  1. before we restricted the check to GNU mkdir, `mkdir -p .' was
+#     reported to fail in read-only directories.  The system where this
+#     happened has been forgotten.
+#  2. in practice we call $(MKDIR_P) on directories such as
+#       $(MKDIR_P) "$(DESTDIR)$(somedir)"
+#     and we don't want to create $(DESTDIR) if $(somedir) is empty.
+#     To support the latter case, we have to write
+#       test -z "$(somedir)" || $(MKDIR_P) "$(DESTDIR)$(somedir)"
+#     so $(MKDIR_P) always has an argument.
+#     We will have better chances of detecting a missing test if
+#     $(MKDIR_P) complains about missing arguments.
+#  3. $(MKDIR_P) is named after `mkdir -p' and we don't expect this
+#     to accept no argument.
+#  4. having something like `mkdir .' in the output is unsightly.
+#
+# On NextStep and OpenStep, the `mkdir' command does not
+# recognize any option.  It will interpret all options as
+# directories to create.
+AN_MAKEVAR([MKDIR_P], [AC_PROG_MKDIR_P])
+AC_DEFUN_ONCE([AC_PROG_MKDIR_P],
+[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_REQUIRE_AUX_FILE([install-sh])dnl
+AC_MSG_CHECKING([for a thread-safe mkdir -p])
+if test -z "$MKDIR_P"; then
+  AC_CACHE_VAL([ac_cv_path_mkdir],
+    [_AS_PATH_WALK([$PATH$PATH_SEPARATOR/opt/sfw/bin],
+      [for ac_prog in mkdir gmkdir; do
+	 for ac_exec_ext in '' $ac_executable_extensions; do
+	   AS_EXECUTABLE_P(["$as_dir/$ac_prog$ac_exec_ext"]) || continue
+	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
+	     'mkdir (GNU coreutils) '* | \
+	     'mkdir (coreutils) '* | \
+	     'mkdir (fileutils) '4.1*)
+	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
+	       break 3;;
+	   esac
+	 done
+       done])])
+  test -d ./--version && rmdir ./--version
+  if test "${ac_cv_path_mkdir+set}" = set; then
+    MKDIR_P="$ac_cv_path_mkdir -p"
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for MKDIR_P within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    MKDIR_P="$ac_install_sh -d"
+  fi
+fi
+dnl status.m4 does special magic for MKDIR_P instead of AC_SUBST,
+dnl to get relative names right.  However, also AC_SUBST here so
+dnl that Automake versions before 1.10 will pick it up (they do not
+dnl trace AC_SUBST_TRACE).
+dnl FIXME: Remove this once we drop support for Automake < 1.10.
+AC_SUBST([MKDIR_P])dnl
+AC_MSG_RESULT([$MKDIR_P])
+])# AC_PROG_MKDIR_P
+
+
+# AC_PROG_LEX
+# -----------
+# Look for flex or lex.  Set its associated library to LEXLIB.
+# Check if lex declares yytext as a char * by default, not a char[].
+AN_MAKEVAR([LEX],  [AC_PROG_LEX])
+AN_PROGRAM([lex],  [AC_PROG_LEX])
+AN_PROGRAM([flex], [AC_PROG_LEX])
+AC_DEFUN_ONCE([AC_PROG_LEX],
+[AC_CHECK_PROGS(LEX, flex lex, :)
+if test "x$LEX" != "x:"; then
+  _AC_PROG_LEX_YYTEXT_DECL
+fi])
+
+
+# _AC_PROG_LEX_YYTEXT_DECL
+# ------------------------
+# Check for the Lex output root, the Lex library, and whether Lex
+# declares yytext as a char * by default.
+m4_define([_AC_PROG_LEX_YYTEXT_DECL],
+[cat >conftest.l <<_ACEOF[
+%%
+a { ECHO; }
+b { REJECT; }
+c { yymore (); }
+d { yyless (1); }
+e { yyless (input () != 0); }
+f { unput (yytext[0]); }
+. { BEGIN INITIAL; }
+%%
+#ifdef YYTEXT_POINTER
+extern char *yytext;
+#endif
+int
+main (void)
+{
+  return ! yylex () + ! yywrap ();
+}
+]_ACEOF
+_AC_DO_VAR(LEX conftest.l)
+AC_CACHE_CHECK([lex output file root], [ac_cv_prog_lex_root], [
+if test -f lex.yy.c; then
+  ac_cv_prog_lex_root=lex.yy
+elif test -f lexyy.c; then
+  ac_cv_prog_lex_root=lexyy
+else
+  AC_MSG_ERROR([cannot find output from $LEX; giving up])
+fi])
+AC_SUBST([LEX_OUTPUT_ROOT], [$ac_cv_prog_lex_root])dnl
+
+if test -z "${LEXLIB+set}"; then
+  AC_CACHE_CHECK([lex library], [ac_cv_lib_lex], [
+    ac_save_LIBS=$LIBS
+    ac_cv_lib_lex='none needed'
+    for ac_lib in '' -lfl -ll; do
+      LIBS="$ac_lib $ac_save_LIBS"
+      AC_LINK_IFELSE([AC_LANG_DEFINES_PROVIDED[`cat $LEX_OUTPUT_ROOT.c`]],
+	[ac_cv_lib_lex=$ac_lib])
+      test "$ac_cv_lib_lex" != 'none needed' && break
+    done
+    LIBS=$ac_save_LIBS
+  ])
+  test "$ac_cv_lib_lex" != 'none needed' && LEXLIB=$ac_cv_lib_lex
+fi
+AC_SUBST(LEXLIB)
+
+AC_CACHE_CHECK(whether yytext is a pointer, ac_cv_prog_lex_yytext_pointer,
+[# POSIX says lex can declare yytext either as a pointer or an array; the
+# default is implementation-dependent.  Figure out which it is, since
+# not all implementations provide the %pointer and %array declarations.
+ac_cv_prog_lex_yytext_pointer=no
+ac_save_LIBS=$LIBS
+LIBS="$LEXLIB $ac_save_LIBS"
+AC_LINK_IFELSE([AC_LANG_DEFINES_PROVIDED
+  [#define YYTEXT_POINTER 1
+`cat $LEX_OUTPUT_ROOT.c`]],
+  [ac_cv_prog_lex_yytext_pointer=yes])
+LIBS=$ac_save_LIBS
+])
+dnl
+if test $ac_cv_prog_lex_yytext_pointer = yes; then
+  AC_DEFINE(YYTEXT_POINTER, 1,
+	    [Define to 1 if `lex' declares `yytext' as a `char *' by default,
+	     not a `char[]'.])
+fi
+rm -f conftest.l $LEX_OUTPUT_ROOT.c
+])# _AC_PROG_LEX_YYTEXT_DECL
+
+
+# Require AC_PROG_LEX in case some people were just calling this macro.
+AU_DEFUN([AC_DECL_YYTEXT],  [AC_PROG_LEX])
+
+
+# AC_PROG_LN_S
+# ------------
+AN_MAKEVAR([LN], [AC_PROG_LN_S])
+AN_PROGRAM([ln], [AC_PROG_LN_S])
+AC_DEFUN([AC_PROG_LN_S],
+[AC_MSG_CHECKING([whether ln -s works])
+AC_SUBST([LN_S], [$as_ln_s])dnl
+if test "$LN_S" = "ln -s"; then
+  AC_MSG_RESULT([yes])
+else
+  AC_MSG_RESULT([no, using $LN_S])
+fi
+])# AC_PROG_LN_S
+
+
+# AC_PROG_MAKE_SET
+# ----------------
+# Define SET_MAKE to set ${MAKE} if Make does not do so automatically.  If Make
+# does not run the test Makefile, we assume that the Make program the user will
+# invoke does set $(MAKE).  This is typical, and emitting `MAKE=foomake' is
+# always wrong if `foomake' is not available or does not work.
+AN_MAKEVAR([MAKE], [AC_PROG_MAKE_SET])
+AN_PROGRAM([make], [AC_PROG_MAKE_SET])
+AC_DEFUN([AC_PROG_MAKE_SET],
+[AC_MSG_CHECKING([whether ${MAKE-make} sets \$(MAKE)])
+set x ${MAKE-make}
+ac_make=`AS_ECHO(["$[2]"]) | sed 's/+/p/g; s/[[^a-zA-Z0-9_]]/_/g'`
+AC_CACHE_VAL(ac_cv_prog_make_${ac_make}_set,
+[cat >conftest.make <<\_ACEOF
+SHELL = /bin/sh
+all:
+	@echo '@@@%%%=$(MAKE)=@@@%%%'
+_ACEOF
+# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
+case `${MAKE-make} -f conftest.make 2>/dev/null` in
+  *@@@%%%=?*=@@@%%%*)
+    eval ac_cv_prog_make_${ac_make}_set=yes;;
+  *)
+    eval ac_cv_prog_make_${ac_make}_set=no;;
+esac
+rm -f conftest.make])dnl
+if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
+  AC_MSG_RESULT([yes])
+  SET_MAKE=
+else
+  AC_MSG_RESULT([no])
+  SET_MAKE="MAKE=${MAKE-make}"
+fi
+AC_SUBST([SET_MAKE])dnl
+])# AC_PROG_MAKE_SET
+
+
+# AC_PROG_RANLIB
+# --------------
+AN_MAKEVAR([RANLIB], [AC_PROG_RANLIB])
+AN_PROGRAM([ranlib], [AC_PROG_RANLIB])
+AC_DEFUN([AC_PROG_RANLIB],
+[AC_CHECK_TOOL(RANLIB, ranlib, :)])
+
+
+# AC_RSH
+# ------
+# I don't know what it used to do, but it no longer does.
+AU_DEFUN([AC_RSH], [],
+[$0 is no longer supported.  Remove this warning when you
+adjust the code.])
+
+
+# AC_PROG_SED
+# -----------
+# Check for a fully functional sed program that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+AC_DEFUN([AC_PROG_SED],
+[AC_CACHE_CHECK([for a sed that does not truncate output], ac_cv_path_SED,
+    [dnl ac_script should not contain more than 99 commands (for HP-UX sed),
+     dnl but more than about 7000 bytes, to catch a limit in Solaris 8 /usr/ucb/sed.
+     ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
+     for ac_i in 1 2 3 4 5 6 7; do
+       ac_script="$ac_script$as_nl$ac_script"
+     done
+     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
+     AS_UNSET([ac_script])
+     _AC_PATH_PROGS_FEATURE_CHECK(SED, [sed gsed],
+	[_AC_FEATURE_CHECK_LENGTH([ac_path_SED], [ac_cv_path_SED],
+		["$ac_path_SED" -f conftest.sed])])])
+ SED="$ac_cv_path_SED"
+ AC_SUBST([SED])dnl
+ rm -f conftest.sed
+])# AC_PROG_SED
+
+
+# AC_PROG_YACC
+# ------------
+AN_MAKEVAR([BISON],  [AC_PROG_YACC])
+AN_MAKEVAR([YACC],  [AC_PROG_YACC])
+AN_MAKEVAR([YFLAGS],  [AC_PROG_YACC])
+AN_PROGRAM([yacc],  [AC_PROG_YACC])
+AN_PROGRAM([byacc], [AC_PROG_YACC])
+AN_PROGRAM([bison], [AC_PROG_YACC])
+AC_DEFUN([AC_PROG_YACC],
+[AC_CHECK_PROGS(YACC, 'bison -y' byacc, yacc)dnl
+AC_ARG_VAR(YACC,
+[The `Yet Another Compiler Compiler' implementation to use.  Defaults to
+the first program found out of: `bison -y', `byacc', `yacc'.])dnl
+AC_ARG_VAR(YFLAGS,
+[The list of arguments that will be passed by default to $YACC.  This script
+will default YFLAGS to the empty string to avoid a default value of `-d' given
+by some make applications.])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/specific.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/specific.m4
new file mode 100644
index 0000000..1e20e01
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/specific.m4
@@ -0,0 +1,477 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# Macros that test for specific, unclassified, features.
+#
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+## ------------------------- ##
+## Checks for declarations.  ##
+## ------------------------- ##
+
+
+# AC_DECL_SYS_SIGLIST
+# -------------------
+AN_IDENTIFIER([sys_siglist],     [AC_CHECK_DECLS([sys_siglist])])
+AU_DEFUN([AC_DECL_SYS_SIGLIST],
+[AC_CHECK_DECLS([sys_siglist],,,
+[#include <signal.h>
+/* NetBSD declares sys_siglist in unistd.h.  */
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif
+])
+])# AC_DECL_SYS_SIGLIST
+
+
+
+
+## -------------------------------------- ##
+## Checks for operating system services.  ##
+## -------------------------------------- ##
+
+
+# AC_SYS_INTERPRETER
+# ------------------
+AC_DEFUN([AC_SYS_INTERPRETER],
+[AC_CACHE_CHECK(whether @%:@! works in shell scripts, ac_cv_sys_interpreter,
+[echo '#! /bin/cat
+exit 69
+' >conftest
+chmod u+x conftest
+(SHELL=/bin/sh; export SHELL; ./conftest >/dev/null 2>&1)
+if test $? -ne 69; then
+   ac_cv_sys_interpreter=yes
+else
+   ac_cv_sys_interpreter=no
+fi
+rm -f conftest])
+interpval=$ac_cv_sys_interpreter
+])
+
+
+AU_DEFUN([AC_HAVE_POUNDBANG],
+[AC_SYS_INTERPRETER],
+[Remove this warning when you adjust your code to use
+`AC_SYS_INTERPRETER'.])
+
+
+AU_DEFUN([AC_ARG_ARRAY], [],
+[$0 is no longer implemented: don't do unportable things
+with arguments. Remove this warning when you adjust your code.])
+
+
+# _AC_SYS_LARGEFILE_TEST_INCLUDES
+# -------------------------------
+m4_define([_AC_SYS_LARGEFILE_TEST_INCLUDES],
+[@%:@include <sys/types.h>
+ /* Check that off_t can represent 2**63 - 1 correctly.
+    We can't simply define LARGE_OFF_T to be 9223372036854775807,
+    since some C++ compilers masquerading as C compilers
+    incorrectly reject 9223372036854775807.  */
+@%:@define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62))
+  int off_t_is_large[[(LARGE_OFF_T % 2147483629 == 721
+		       && LARGE_OFF_T % 2147483647 == 1)
+		      ? 1 : -1]];[]dnl
+])
+
+
+# _AC_SYS_LARGEFILE_MACRO_VALUE(C-MACRO, VALUE,
+#				CACHE-VAR,
+#				DESCRIPTION,
+#				PROLOGUE, [FUNCTION-BODY])
+# --------------------------------------------------------
+m4_define([_AC_SYS_LARGEFILE_MACRO_VALUE],
+[AC_CACHE_CHECK([for $1 value needed for large files], [$3],
+[while :; do
+  m4_ifval([$6], [AC_LINK_IFELSE], [AC_COMPILE_IFELSE])(
+    [AC_LANG_PROGRAM([$5], [$6])],
+    [$3=no; break])
+  m4_ifval([$6], [AC_LINK_IFELSE], [AC_COMPILE_IFELSE])(
+    [AC_LANG_PROGRAM([@%:@define $1 $2
+$5], [$6])],
+    [$3=$2; break])
+  $3=unknown
+  break
+done])
+case $$3 in #(
+  no | unknown) ;;
+  *) AC_DEFINE_UNQUOTED([$1], [$$3], [$4]);;
+esac
+rm -rf conftest*[]dnl
+])# _AC_SYS_LARGEFILE_MACRO_VALUE
+
+
+# AC_SYS_LARGEFILE
+# ----------------
+# By default, many hosts won't let programs access large files;
+# one must use special compiler options to get large-file access to work.
+# For more details about this brain damage please see:
+# http://www.unix-systems.org/version2/whatsnew/lfs20mar.html
+AC_DEFUN([AC_SYS_LARGEFILE],
+[AC_ARG_ENABLE(largefile,
+	       [  --disable-largefile     omit support for large files])
+if test "$enable_largefile" != no; then
+
+  AC_CACHE_CHECK([for special C compiler options needed for large files],
+    ac_cv_sys_largefile_CC,
+    [ac_cv_sys_largefile_CC=no
+     if test "$GCC" != yes; then
+       ac_save_CC=$CC
+       while :; do
+	 # IRIX 6.2 and later do not support large files by default,
+	 # so use the C compiler's -n32 option if that helps.
+	 AC_LANG_CONFTEST([AC_LANG_PROGRAM([_AC_SYS_LARGEFILE_TEST_INCLUDES])])
+	 AC_COMPILE_IFELSE([], [break])
+	 CC="$CC -n32"
+	 AC_COMPILE_IFELSE([], [ac_cv_sys_largefile_CC=' -n32'; break])
+	 break
+       done
+       CC=$ac_save_CC
+       rm -f conftest.$ac_ext
+    fi])
+  if test "$ac_cv_sys_largefile_CC" != no; then
+    CC=$CC$ac_cv_sys_largefile_CC
+  fi
+
+  _AC_SYS_LARGEFILE_MACRO_VALUE(_FILE_OFFSET_BITS, 64,
+    ac_cv_sys_file_offset_bits,
+    [Number of bits in a file offset, on hosts where this is settable.],
+    [_AC_SYS_LARGEFILE_TEST_INCLUDES])
+  if test $ac_cv_sys_file_offset_bits = unknown; then
+    _AC_SYS_LARGEFILE_MACRO_VALUE(_LARGE_FILES, 1,
+      ac_cv_sys_large_files,
+      [Define for large files, on AIX-style hosts.],
+      [_AC_SYS_LARGEFILE_TEST_INCLUDES])
+  fi
+fi
+])# AC_SYS_LARGEFILE
+
+
+# AC_SYS_LONG_FILE_NAMES
+# ----------------------
+# Security: use a temporary directory as the most portable way of
+# creating files in /tmp securely.  Removing them leaves a race
+# condition, set -C is not portably guaranteed to use O_EXCL, so still
+# leaves a race, and not all systems have the `mktemp' utility.  We
+# still test for existence first in case of broken systems where the
+# mkdir succeeds even when the directory exists.  Broken systems may
+# retain a race, but they probably have other security problems
+# anyway; this should be secure on well-behaved systems.  In any case,
+# use of `mktemp' is probably inappropriate here since it would fail in
+# attempting to create different file names differing after the 14th
+# character on file systems without long file names.
+AC_DEFUN([AC_SYS_LONG_FILE_NAMES],
+[AC_CACHE_CHECK(for long file names, ac_cv_sys_long_file_names,
+[ac_cv_sys_long_file_names=yes
+# Test for long file names in all the places we know might matter:
+#      .		the current directory, where building will happen
+#      $prefix/lib	where we will be installing things
+#      $exec_prefix/lib	likewise
+#      $TMPDIR		if set, where it might want to write temporary files
+#      /tmp		where it might want to write temporary files
+#      /var/tmp		likewise
+#      /usr/tmp		likewise
+for ac_dir in . "$TMPDIR" /tmp /var/tmp /usr/tmp "$prefix/lib" "$exec_prefix/lib"; do
+  # Skip $TMPDIR if it is empty or bogus, and skip $exec_prefix/lib
+  # in the usual case where exec_prefix is '${prefix}'.
+  case $ac_dir in #(
+    . | /* | ?:[[\\/]]*) ;; #(
+    *) continue;;
+  esac
+  test -w "$ac_dir/." || continue # It is less confusing to not echo anything here.
+  ac_xdir=$ac_dir/cf$$
+  (umask 077 && mkdir "$ac_xdir" 2>/dev/null) || continue
+  ac_tf1=$ac_xdir/conftest9012345
+  ac_tf2=$ac_xdir/conftest9012346
+  touch "$ac_tf1" 2>/dev/null && test -f "$ac_tf1" && test ! -f "$ac_tf2" ||
+    ac_cv_sys_long_file_names=no
+  rm -f -r "$ac_xdir" 2>/dev/null
+  test $ac_cv_sys_long_file_names = no && break
+done])
+if test $ac_cv_sys_long_file_names = yes; then
+  AC_DEFINE(HAVE_LONG_FILE_NAMES, 1,
+	    [Define to 1 if you support file names longer than 14 characters.])
+fi
+])
+
+
+# AC_SYS_RESTARTABLE_SYSCALLS
+# ---------------------------
+# If the system automatically restarts a system call that is
+# interrupted by a signal, define `HAVE_RESTARTABLE_SYSCALLS'.
+AC_DEFUN([AC_SYS_RESTARTABLE_SYSCALLS],
+[AC_DIAGNOSE([obsolete],
+[$0: AC_SYS_RESTARTABLE_SYSCALLS is useful only when supporting very
+old systems that lack `sigaction' and `SA_RESTART'.  Don't bother with
+this macro unless you need to support very old systems like 4.2BSD and
+SVR3.])dnl
+AC_REQUIRE([AC_HEADER_SYS_WAIT])dnl
+AC_CACHE_CHECK(for restartable system calls, ac_cv_sys_restartable_syscalls,
+[AC_RUN_IFELSE([AC_LANG_SOURCE(
+[/* Exit 0 (true) if wait returns something other than -1,
+   i.e. the pid of the child, which means that wait was restarted
+   after getting the signal.  */
+
+AC_INCLUDES_DEFAULT
+#include <signal.h>
+#ifdef HAVE_SYS_WAIT_H
+# include <sys/wait.h>
+#endif
+
+/* Some platforms explicitly require an extern "C" signal handler
+   when using C++. */
+#ifdef __cplusplus
+extern "C" void ucatch (int dummy) { }
+#else
+void ucatch (dummy) int dummy; { }
+#endif
+
+int
+main ()
+{
+  int i = fork (), status;
+
+  if (i == 0)
+    {
+      sleep (3);
+      kill (getppid (), SIGINT);
+      sleep (3);
+      return 0;
+    }
+
+  signal (SIGINT, ucatch);
+
+  status = wait (&i);
+  if (status == -1)
+    wait (&i);
+
+  return status == -1;
+}])],
+	       [ac_cv_sys_restartable_syscalls=yes],
+	       [ac_cv_sys_restartable_syscalls=no])])
+if test $ac_cv_sys_restartable_syscalls = yes; then
+  AC_DEFINE(HAVE_RESTARTABLE_SYSCALLS, 1,
+	    [Define to 1 if system calls automatically restart after
+	     interruption by a signal.])
+fi
+])# AC_SYS_RESTARTABLE_SYSCALLS
+
+
+# AC_SYS_POSIX_TERMIOS
+# --------------------
+AC_DEFUN([AC_SYS_POSIX_TERMIOS],
+[AC_CACHE_CHECK([POSIX termios], ac_cv_sys_posix_termios,
+[AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <sys/types.h>
+#include <unistd.h>
+#include <termios.h>
+]],
+	     [/* SunOS 4.0.3 has termios.h but not the library calls.  */
+   tcgetattr(0, 0);])],
+	     ac_cv_sys_posix_termios=yes,
+	     ac_cv_sys_posix_termios=no)])
+])# AC_SYS_POSIX_TERMIOS
+
+
+
+
+## ------------------------------------ ##
+## Checks for not-quite-Unix variants.  ##
+## ------------------------------------ ##
+
+
+# AC_GNU_SOURCE
+# -------------
+AU_DEFUN([AC_GNU_SOURCE], [AC_USE_SYSTEM_EXTENSIONS])
+
+
+# AC_CYGWIN
+# ---------
+# Check for Cygwin.  This is a way to set the right value for
+# EXEEXT.
+AU_DEFUN([AC_CYGWIN],
+[AC_CANONICAL_HOST
+case $host_os in
+  *cygwin* ) CYGWIN=yes;;
+	 * ) CYGWIN=no;;
+esac
+], [$0 is obsolete: use AC_CANONICAL_HOST and check if $host_os
+matches *cygwin*])# AC_CYGWIN
+
+
+# AC_EMXOS2
+# ---------
+# Check for EMX on OS/2.  This is another way to set the right value
+# for EXEEXT.
+AU_DEFUN([AC_EMXOS2],
+[AC_CANONICAL_HOST
+case $host_os in
+  *emx* ) EMXOS2=yes;;
+      * ) EMXOS2=no;;
+esac
+], [$0 is obsolete: use AC_CANONICAL_HOST and check if $host_os
+matches *emx*])# AC_EMXOS2
+
+
+# AC_MINGW32
+# ----------
+# Check for mingw32.  This is another way to set the right value for
+# EXEEXT.
+AU_DEFUN([AC_MINGW32],
+[AC_CANONICAL_HOST
+case $host_os in
+  *mingw32* ) MINGW32=yes;;
+	  * ) MINGW32=no;;
+esac
+], [$0 is obsolete: use AC_CANONICAL_HOST and check if $host_os
+matches *mingw32*])# AC_MINGW32
+
+
+# AC_USE_SYSTEM_EXTENSIONS
+# ------------------------
+# Enable extensions on systems that normally disable them,
+# typically due to standards-conformance issues.
+#
+# Remember that #undef in AH_VERBATIM gets replaced with #define by
+# AC_DEFINE.  The goal here is to define all known feature-enabling
+# macros, then, if reports of conflicts are made, disable macros that
+# cause problems on some platforms (such as __EXTENSIONS__).
+AC_DEFUN_ONCE([AC_USE_SYSTEM_EXTENSIONS],
+[AC_BEFORE([$0], [AC_COMPILE_IFELSE])dnl
+AC_BEFORE([$0], [AC_RUN_IFELSE])dnl
+
+  AC_CHECK_HEADER([minix/config.h], [MINIX=yes], [MINIX=])
+  if test "$MINIX" = yes; then
+    AC_DEFINE([_POSIX_SOURCE], [1],
+      [Define to 1 if you need to in order for `stat' and other
+       things to work.])
+    AC_DEFINE([_POSIX_1_SOURCE], [2],
+      [Define to 2 if the system does not provide POSIX.1 features
+       except with this defined.])
+    AC_DEFINE([_MINIX], [1],
+      [Define to 1 if on MINIX.])
+  fi
+
+dnl Use a different key than __EXTENSIONS__, as that name broke existing
+dnl configure.ac when using autoheader 2.62.
+  AH_VERBATIM([USE_SYSTEM_EXTENSIONS],
+[/* Enable extensions on AIX 3, Interix.  */
+#ifndef _ALL_SOURCE
+# undef _ALL_SOURCE
+#endif
+/* Enable GNU extensions on systems that have them.  */
+#ifndef _GNU_SOURCE
+# undef _GNU_SOURCE
+#endif
+/* Enable threading extensions on Solaris.  */
+#ifndef _POSIX_PTHREAD_SEMANTICS
+# undef _POSIX_PTHREAD_SEMANTICS
+#endif
+/* Enable extensions on HP NonStop.  */
+#ifndef _TANDEM_SOURCE
+# undef _TANDEM_SOURCE
+#endif
+/* Enable general extensions on Solaris.  */
+#ifndef __EXTENSIONS__
+# undef __EXTENSIONS__
+#endif
+])
+  AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__],
+    [ac_cv_safe_to_define___extensions__],
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM([
+#	  define __EXTENSIONS__ 1
+	  AC_INCLUDES_DEFAULT])],
+       [ac_cv_safe_to_define___extensions__=yes],
+       [ac_cv_safe_to_define___extensions__=no])])
+  test $ac_cv_safe_to_define___extensions__ = yes &&
+    AC_DEFINE([__EXTENSIONS__])
+  AC_DEFINE([_ALL_SOURCE])
+  AC_DEFINE([_GNU_SOURCE])
+  AC_DEFINE([_POSIX_PTHREAD_SEMANTICS])
+  AC_DEFINE([_TANDEM_SOURCE])
+])# AC_USE_SYSTEM_EXTENSIONS
+
+
+
+## -------------------------- ##
+## Checks for UNIX variants.  ##
+## -------------------------- ##
+
+
+# These are kludges which should be replaced by a single POSIX check.
+# They aren't cached, to discourage their use.
+
+# AC_AIX
+# ------
+AU_DEFUN([AC_AIX], [AC_USE_SYSTEM_EXTENSIONS])
+
+
+# AC_MINIX
+# --------
+AU_DEFUN([AC_MINIX], [AC_USE_SYSTEM_EXTENSIONS])
+
+
+# AC_ISC_POSIX
+# ------------
+AU_DEFUN([AC_ISC_POSIX], [AC_SEARCH_LIBS([strerror], [cposix])])
+
+
+# AC_XENIX_DIR
+# ------------
+AU_DEFUN([AC_XENIX_DIR],
+[AC_MSG_CHECKING([for Xenix])
+AC_EGREP_CPP([yes],
+[#if defined M_XENIX && ! defined M_UNIX
+  yes
+@%:@endif],
+	     [AC_MSG_RESULT([yes]); XENIX=yes],
+	     [AC_MSG_RESULT([no]); XENIX=])
+
+AC_HEADER_DIRENT[]dnl
+],
+[You shouldn't need to depend upon XENIX.  Remove the
+`AC_MSG_CHECKING', `AC_EGREP_CPP', and this warning if this part
+of the test is useless.])
+
+
+# AC_DYNIX_SEQ
+# ------------
+AU_DEFUN([AC_DYNIX_SEQ], [AC_FUNC_GETMNTENT])
+
+
+# AC_IRIX_SUN
+# -----------
+AU_DEFUN([AC_IRIX_SUN],
+[AC_FUNC_GETMNTENT
+AC_CHECK_LIB([sun], [getpwnam])])
+
+
+# AC_SCO_INTL
+# -----------
+AU_DEFUN([AC_SCO_INTL], [AC_FUNC_STRFTIME])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/status.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/status.m4
new file mode 100644
index 0000000..7937388
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/status.m4
@@ -0,0 +1,1784 @@
+# This file is part of Autoconf.                       -*- Autoconf -*-
+# Parameterizing and creating config.status.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+# This file handles about all the preparation aspects for
+# `config.status': registering the configuration files, the headers,
+# the links, and the commands `config.status' will run.  There is a
+# little mixture though of things actually handled by `configure',
+# such as running the `configure' in the sub directories.  Minor
+# detail.
+#
+# There are two kinds of commands:
+#
+# COMMANDS:
+#
+#   They are output into `config.status' via a quoted here doc.  These
+#   commands are always associated to a tag which the user can use to
+#   tell `config.status' what are the commands she wants to run.
+#
+# INIT-CMDS:
+#
+#   They are output via an *unquoted* here-doc.  As a consequence $var
+#   will be output as the value of VAR.  This is typically used by
+#   `configure' to give `config.status' some variables it needs to run
+#   the COMMANDS.  At the difference of COMMANDS, the INIT-CMDS are
+#   always run.
+#
+#
+# Honorable members of this family are AC_CONFIG_FILES,
+# AC_CONFIG_HEADERS, AC_CONFIG_LINKS and AC_CONFIG_COMMANDS.  Bad boys
+# are AC_LINK_FILES, AC_OUTPUT_COMMANDS and AC_OUTPUT when used with
+# arguments.  False members are AC_CONFIG_SRCDIR, AC_CONFIG_SUBDIRS
+# and AC_CONFIG_AUX_DIR.  Cousins are AC_CONFIG_COMMANDS_PRE and
+# AC_CONFIG_COMMANDS_POST.
+
+
+## ------------------ ##
+## Auxiliary macros.  ##
+## ------------------ ##
+
+# _AC_SRCDIRS(BUILD-DIR-NAME)
+# ---------------------------
+# Inputs:
+#   - BUILD-DIR-NAME is `top-build -> build' and `top-src -> src'
+#   - `$srcdir' is `top-build -> top-src'
+#
+# Outputs:
+# - `ac_builddir' is `.', for symmetry only.
+# - `ac_top_builddir_sub' is `build -> top_build'.
+#      This is used for @top_builddir@.
+# - `ac_top_build_prefix' is `build -> top_build'.
+#      If not empty, has a trailing slash.
+# - `ac_srcdir' is `build -> src'.
+# - `ac_top_srcdir' is `build -> top-src'.
+# and `ac_abs_builddir' etc., the absolute directory names.
+m4_define([_AC_SRCDIRS],
+[ac_builddir=.
+
+case $1 in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`AS_ECHO([$1]) | sed 's|^\.[[\\/]]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`AS_ECHO(["$ac_dir_suffix"]) | sed 's|/[[^\\/]]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [[\\/]]* | ?:[[\\/]]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+])# _AC_SRCDIRS
+
+
+# _AC_HAVE_TOP_BUILD_PREFIX
+# -------------------------
+# Announce to the world (to Libtool) that we substitute @top_build_prefix@.
+AC_DEFUN([_AC_HAVE_TOP_BUILD_PREFIX])
+
+
+## ---------------------- ##
+## Registering the tags.  ##
+## ---------------------- ##
+
+
+# _AC_CONFIG_COMMANDS_INIT([INIT-COMMANDS])
+# -----------------------------------------
+#
+# Register INIT-COMMANDS as command pasted *unquoted* in
+# `config.status'.  This is typically used to pass variables from
+# `configure' to `config.status'.  Note that $[1] is not over quoted as
+# was the case in AC_OUTPUT_COMMANDS.
+m4_define([_AC_CONFIG_COMMANDS_INIT],
+[m4_ifval([$1],
+	  [m4_append([_AC_OUTPUT_COMMANDS_INIT],
+		     [$1
+])])])
+
+
+# AC_FILE_DEPENDENCY_TRACE(DEST, SOURCE1, [SOURCE2...])
+# -----------------------------------------------------
+# This macro does nothing, it's a hook to be read with `autoconf --trace'.
+#
+# It announces DEST depends upon the SOURCE1 etc.
+m4_define([AC_FILE_DEPENDENCY_TRACE], [])
+
+
+# _AC_FILE_DEPENDENCY_TRACE_COLON(DEST:SOURCE1[:SOURCE2...])
+# ----------------------------------------------------------
+# Declare that DEST depends upon SOURCE1 etc.
+#
+m4_define([_AC_FILE_DEPENDENCY_TRACE_COLON],
+[AC_FILE_DEPENDENCY_TRACE(m4_translit([$1], [:], [,]))])
+
+
+# _AC_CONFIG_DEPENDENCY(MODE, DEST[:SOURCE1...])
+# ----------------------------------------------
+# MODE is `FILES', `HEADERS', or `LINKS'.
+#
+# Be sure that a missing dependency is expressed as a dependency upon
+# `DEST.in' (except with config links).
+#
+m4_define([_AC_CONFIG_DEPENDENCY],
+[_AC_FILE_DEPENDENCY_TRACE_COLON([$2]_AC_CONFIG_DEPENDENCY_DEFAULT($@))dnl
+])
+
+
+# _AC_CONFIG_DEPENDENCY_DEFAULT(MODE, DEST[:SOURCE1...])
+# ------------------------------------------------------
+# Expand to `:DEST.in' if appropriate, or to empty string otherwise.
+#
+# More detailed description:
+# If the tag contains `:', expand to nothing.
+# Otherwise, for a config file or header, add `:DEST.in'.
+# For a config link, DEST.in is not appropriate:
+#  - if the tag is literal, complain.
+#  - otherwise, just expand to nothing and proceed with fingers crossed.
+#    (We get to this case from the obsolete AC_LINK_FILES, for example.)
+#
+m4_define([_AC_CONFIG_DEPENDENCY_DEFAULT],
+[m4_if(m4_index([$2], [:]), [-1],
+	   [m4_if([$1], [LINKS],
+		  [AS_LITERAL_IF([$2],
+		    [m4_fatal([Invalid AC_CONFIG_LINKS tag: `$2'])])],
+		  [:$2.in])])])
+
+
+# _AC_CONFIG_UNIQUE(MODE, DEST)
+# -----------------------------
+# MODE is `FILES', `HEADERS', `LINKS', `COMMANDS', or `SUBDIRS'.
+#
+# Verify that there is no double definition of an output file.
+#
+m4_define([_AC_CONFIG_UNIQUE],
+[m4_ifdef([_AC_SEEN_TAG($2)],
+   [m4_fatal([`$2' is already registered with AC_CONFIG_]m4_defn(
+     [_AC_SEEN_TAG($2)]).)],
+   [m4_define([_AC_SEEN_TAG($2)], [$1])])dnl
+])
+
+
+# _AC_CONFIG_FOOS(MODE, TAGS..., [COMMANDS], [INIT-CMDS])
+# -------------------------------------------------------
+# MODE is `FILES', `HEADERS', `LINKS', or `COMMANDS'.
+#
+# Associate the COMMANDS to each TAG, i.e., when config.status creates TAG,
+# run COMMANDS afterwards.  (This is done in _AC_CONFIG_REGISTER_DEST.)
+#
+# For COMMANDS, do not m4_normalize TAGS before adding it to ac_config_commands.
+# This historical difference allows macro calls in TAGS.
+#
+m4_define([_AC_CONFIG_FOOS],
+[m4_map_args_w([$2], [_AC_CONFIG_REGISTER([$1],], [, [$3])])]dnl
+[m4_define([_AC_SEEN_CONFIG(ANY)])]dnl
+[m4_define([_AC_SEEN_CONFIG($1)])]dnl
+[_AC_CONFIG_COMMANDS_INIT([$4])]dnl
+[ac_config_[]m4_tolower([$1])="$ac_config_[]m4_tolower([$1]) ]dnl
+[m4_if([$1], [COMMANDS], [$2], [m4_normalize([$2])])"
+])
+
+# _AC_CONFIG_COMPUTE_DEST(STRING)
+# -------------------------------
+# Compute the DEST from STRING by stripping any : and following
+# characters.  Guarantee a match in m4_index, so as to avoid a bug
+# with precision -1 in m4_format in older m4.
+m4_define([_AC_CONFIG_COMPUTE_DEST],
+[m4_format([[%.*s]], m4_index([$1:], [:]), [$1])])
+
+# _AC_CONFIG_REGISTER(MODE, TAG, [COMMANDS])
+# ------------------------------------------
+# MODE is `FILES', `HEADERS', `LINKS', or `COMMANDS'.
+#
+m4_define([_AC_CONFIG_REGISTER],
+[m4_if([$1], [COMMANDS],
+       [],
+       [_AC_CONFIG_DEPENDENCY([$1], [$2])])]dnl
+[_AC_CONFIG_REGISTER_DEST([$1], [$2],
+  _AC_CONFIG_COMPUTE_DEST([$2]), [$3])])
+
+
+# _AC_CONFIG_REGISTER_DEST(MODE, TAG, DEST, [COMMANDS])
+# -----------------------------------------------------
+# MODE is `FILES', `HEADERS', `LINKS', or `COMMANDS'.
+# TAG is in the form DEST[:SOURCE...].
+# Thus parameter $3 is the first part of $2.
+#
+# With CONFIG_LINKS, reject DEST=., because it is makes it hard for ./config.status
+# to guess the links to establish (`./config.status .').
+#
+# Save the name of the first config header to AH_HEADER.
+#
+m4_define([_AC_CONFIG_REGISTER_DEST],
+[_AC_CONFIG_UNIQUE([$1], [$3])]dnl
+[m4_if([$1 $3], [LINKS .],
+       [m4_fatal([invalid destination of a config link: `.'])],
+       [$1], [HEADERS],
+       [m4_define_default([AH_HEADER], [$3])])]dnl
+dnl
+dnl Recognize TAG as an argument to config.status:
+dnl
+[m4_append([_AC_LIST_TAGS],
+[    "$3") CONFIG_$1="$CONFIG_$1 $2" ;;
+])]dnl
+dnl
+dnl Register the associated commands, if any:
+dnl
+[m4_ifval([$4],
+[m4_append([_AC_LIST_TAG_COMMANDS],
+[    "$3":]m4_format([[%.1s]], [$1])[) $4 ;;
+])])])# _AC_CONFIG_REGISTER_DEST
+
+
+
+
+## --------------------- ##
+## Configuration files.  ##
+## --------------------- ##
+
+
+# AC_CONFIG_FILES(FILE..., [COMMANDS], [INIT-CMDS])
+# -------------------------------------------------
+# Specify output files, i.e., files that are configured with AC_SUBST.
+#
+AC_DEFUN([AC_CONFIG_FILES], [_AC_CONFIG_FOOS([FILES], $@)])
+
+
+# _AC_SED_CMD_LIMIT
+# -----------------
+# Evaluate to an m4 number equal to the maximum number of commands to put
+# in any single sed program, not counting ":" commands.
+#
+# Some seds have small command number limits, like on Digital OSF/1 and HP-UX.
+m4_define([_AC_SED_CMD_LIMIT],
+dnl One cannot portably go further than 99 commands because of HP-UX.
+[99])
+
+
+# _AC_AWK_LITERAL_LIMIT
+# ---------------------
+# Evaluate to the maximum number of characters to put in an awk
+# string literal, not counting escape characters.
+#
+# Some awk's have small limits, such as Solaris and AIX awk.
+m4_define([_AC_AWK_LITERAL_LIMIT],
+[148])
+
+
+# _AC_OUTPUT_FILES_PREPARE
+# ------------------------
+# Create the awk scripts needed for CONFIG_FILES.
+# Support multiline substitutions and make sure that the substitutions are
+# not evaluated recursively.
+# The intention is to have readable config.status and configure, even
+# though this m4 code might be scary.
+#
+# This code was written by Dan Manthey and rewritten by Ralf Wildenhues.
+#
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+#
+m4_define([_AC_OUTPUT_FILES_PREPARE],
+[# Set up the scripts for CONFIG_FILES section.
+# No need to generate them if there are no CONFIG_FILES.
+# This happens for instance with `./config.status config.h'.
+if test -n "$CONFIG_FILES"; then
+
+dnl For AC_SUBST_FILE, check for usable getline support in awk,
+dnl at config.status execution time.
+dnl Otherwise, do the interpolation in sh, which is slower.
+dnl Without any AC_SUBST_FILE, omit all related code.
+dnl Note the expansion is double-quoted for readability.
+m4_ifdef([_AC_SUBST_FILES],
+[[if $AWK 'BEGIN { getline <"/dev/null" }' </dev/null 2>/dev/null; then
+  ac_cs_awk_getline=:
+  ac_cs_awk_pipe_init=
+  ac_cs_awk_read_file='
+      while ((getline aline < (F[key])) > 0)
+	print(aline)
+      close(F[key])'
+  ac_cs_awk_pipe_fini=
+else
+  ac_cs_awk_getline=false
+  ac_cs_awk_pipe_init="print \"cat <<'|#_!!_#|' &&\""
+  ac_cs_awk_read_file='
+      print "|#_!!_#|"
+      print "cat " F[key] " &&"
+      '$ac_cs_awk_pipe_init
+  # The final `:' finishes the AND list.
+  ac_cs_awk_pipe_fini='END { print "|#_!!_#|"; print ":" }'
+fi]])
+ac_cr=`echo X | tr X '\015'`
+# On cygwin, bash can eat \r inside `` if the user requested igncr.
+# But we know of no other shell where ac_cr would be empty at this
+# point, so we can use a bashism as a fallback.
+if test "x$ac_cr" = x; then
+  eval ac_cr=\$\'\\r\'
+fi
+ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
+if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
+  ac_cs_awk_cr='\\r'
+else
+  ac_cs_awk_cr=$ac_cr
+fi
+dnl
+dnl Define the pipe that does the substitution.
+m4_ifdef([_AC_SUBST_FILES],
+[m4_define([_AC_SUBST_CMDS], [|
+if $ac_cs_awk_getline; then
+  $AWK -f "$ac_tmp/subs.awk"
+else
+  $AWK -f "$ac_tmp/subs.awk" | $SHELL
+fi])],
+[m4_define([_AC_SUBST_CMDS],
+[| $AWK -f "$ac_tmp/subs.awk"])])dnl
+
+echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+_ACEOF
+
+m4_ifdef([_AC_SUBST_FILES],
+[# Create commands to substitute file output variables.
+{
+  echo "cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1" &&
+  echo 'cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&' &&
+  echo "$ac_subst_files" | sed 's/.*/F@<:@"&"@:>@="$&"/' &&
+  echo "_ACAWK" &&
+  echo "_ACEOF"
+} >conf$$files.sh &&
+. ./conf$$files.sh ||
+  AC_MSG_ERROR([could not make $CONFIG_STATUS])
+rm -f conf$$files.sh
+])dnl
+
+{
+  echo "cat >conf$$subs.awk <<_ACEOF" &&
+  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
+  echo "_ACEOF"
+} >conf$$subs.sh ||
+  AC_MSG_ERROR([could not make $CONFIG_STATUS])
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+ac_delim='%!_!# '
+for ac_last_try in false false false false false :; do
+  . ./conf$$subs.sh ||
+    AC_MSG_ERROR([could not make $CONFIG_STATUS])
+
+dnl Do not use grep on conf$$subs.awk, since AIX grep has a line length limit.
+  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
+  if test $ac_delim_n = $ac_delim_num; then
+    break
+  elif $ac_last_try; then
+    AC_MSG_ERROR([could not make $CONFIG_STATUS])
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+rm -f conf$$subs.sh
+
+dnl Initialize an awk array of substitutions, keyed by variable name.
+dnl
+dnl The initial line contains the variable name VAR, then a `!'.
+dnl Construct `S["VAR"]=' from it.
+dnl The rest of the line, and potentially further lines, contain the
+dnl substituted value; the last of those ends with $ac_delim.  We split
+dnl the output both along those substituted newlines and at intervals of
+dnl length _AC_AWK_LITERAL_LIMIT.  The latter is done to comply with awk
+dnl string literal limitations, the former for simplicity in doing so.
+dnl
+dnl We deal with one input line at a time to avoid sed pattern space
+dnl limitations.  We kill the delimiter $ac_delim before splitting the
+dnl string (otherwise we risk splitting the delimiter).  And we do the
+dnl splitting before the quoting of awk special characters (otherwise we
+dnl risk splitting an escape sequence).
+dnl
+dnl Output as separate string literals, joined with backslash-newline.
+dnl Eliminate the newline after `=' in a second script, for readability.
+dnl
+dnl Notes to the main part of the awk script:
+dnl - the unusual FS value helps prevent running into the limit of 99 fields,
+dnl - we avoid sub/gsub because of the \& quoting issues, see
+dnl   http://www.gnu.org/software/gawk/manual/html_node/Gory-Details.html
+dnl - Writing `$ 0' prevents expansion by both the shell and m4 here.
+dnl
+dnl m4-double-quote most of the scripting for readability.
+[cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+_ACEOF
+sed -n '
+h
+s/^/S["/; s/!.*/"]=/
+p
+g
+s/^[^!]*!//
+:repl
+t repl
+s/'"$ac_delim"'$//
+t delim
+:nl
+h
+s/\(.\{]_AC_AWK_LITERAL_LIMIT[\}\)..*/\1/
+t more1
+s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
+p
+n
+b repl
+:more1
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{]_AC_AWK_LITERAL_LIMIT[\}//
+t nl
+:delim
+h
+s/\(.\{]_AC_AWK_LITERAL_LIMIT[\}\)..*/\1/
+t more2
+s/["\\]/\\&/g; s/^/"/; s/$/"/
+p
+b
+:more2
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{]_AC_AWK_LITERAL_LIMIT[\}//
+t delim
+' <conf$$subs.awk | sed '
+/^[^""]/{
+  N
+  s/\n//
+}
+' >>$CONFIG_STATUS || ac_write_fail=1
+rm -f conf$$subs.awk
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACAWK
+cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+  for (key in S) S_is_set[key] = 1
+  FS = ""
+]m4_ifdef([_AC_SUBST_FILES],
+[  \$ac_cs_awk_pipe_init])[
+}
+{
+  line = $ 0
+  nfields = split(line, field, "@")
+  substed = 0
+  len = length(field[1])
+  for (i = 2; i < nfields; i++) {
+    key = field[i]
+    keylen = length(key)
+    if (S_is_set[key]) {
+      value = S[key]
+      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
+      len += length(value) + length(field[++i])
+      substed = 1
+    } else
+      len += 1 + keylen
+  }
+]m4_ifdef([_AC_SUBST_FILES],
+[[  if (nfields == 3 && !substed) {
+    key = field[2]
+    if (F[key] != "" && line ~ /^[	 ]*@.*@[	 ]*$/) {
+      \$ac_cs_awk_read_file
+      next
+    }
+  }]])[
+  print line
+}
+]dnl end of double-quoted part
+m4_ifdef([_AC_SUBST_FILES],
+[\$ac_cs_awk_pipe_fini])
+_ACAWK
+_ACEOF
+dnl See if CR is the EOL marker.  If not, remove any EOL-related
+dnl ^M bytes and escape any remaining ones.  If so, just use mv.
+dnl In case you're wondering how ^M bytes can make it into subs1.awk,
+dnl [from Ralf Wildenhues] one way is if you have e.g.,
+dnl AC_SUBST([variable_that_contains_cr], ["
+dnl "])
+dnl The original aim was that users should be able to substitute any
+dnl characters they like (except for \0).  And the above is not so
+dnl unlikely if the configure script itself happens to be converted
+dnl to w32 text mode.
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
+  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
+else
+  cat
+fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+  || AC_MSG_ERROR([could not setup config files machinery])
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+# trailing colons and then remove the whole line if VPATH becomes empty
+# (actually we leave an empty line to preserve line numbers).
+if test "x$srcdir" = x.; then
+  ac_vpsub=['/^[	 ]*VPATH[	 ]*=[	 ]*/{
+h
+s///
+s/^/:/
+s/[	 ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
+s/:*$//
+x
+s/\(=[	 ]*\).*/\1/
+G
+s/\n//
+s/^[^=]*=[	 ]*$//
+}']
+fi
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+fi # test -n "$CONFIG_FILES"
+
+])# _AC_OUTPUT_FILES_PREPARE
+
+# _AC_OUTPUT_FILE_ADJUST_DIR(VAR)
+# -------------------------------
+# Generate the sed snippet needed to output VAR relative to the
+# top-level directory.
+m4_define([_AC_OUTPUT_FILE_ADJUST_DIR],
+[s&@$1@&$ac_$1&;t t[]AC_SUBST_TRACE([$1])])
+
+
+# _AC_OUTPUT_FILE
+# ---------------
+# Do the variable substitutions to create the Makefiles or whatever.
+#
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+#
+m4_define([_AC_OUTPUT_FILE],
+[
+  #
+  # CONFIG_FILE
+  #
+
+AC_PROVIDE_IFELSE([AC_PROG_INSTALL],
+[  case $INSTALL in
+  [[\\/$]]* | ?:[[\\/]]* ) ac_INSTALL=$INSTALL ;;
+  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
+  esac
+])dnl
+AC_PROVIDE_IFELSE([AC_PROG_MKDIR_P],
+[  ac_MKDIR_P=$MKDIR_P
+  case $MKDIR_P in
+  [[\\/$]]* | ?:[[\\/]]* ) ;;
+  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
+  esac
+])dnl
+_ACEOF
+
+m4_ifndef([AC_DATAROOTDIR_CHECKED],
+[cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# If the template does not know about datarootdir, expand it.
+# FIXME: This hack should be removed a few years after 2.60.
+ac_datarootdir_hack=; ac_datarootdir_seen=
+m4_define([_AC_datarootdir_vars],
+	  [datadir, docdir, infodir, localedir, mandir])]dnl
+[m4_define([_AC_datarootdir_subst], [  s&@$][1@&$$][1&g])]dnl
+[ac_sed_dataroot='
+/datarootdir/ {
+  p
+  q
+}
+m4_map_args_sep([/@], [@/p], [
+], _AC_datarootdir_vars)'
+case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
+*datarootdir*) ac_datarootdir_seen=yes;;
+*@[]m4_join([@*|*@], _AC_datarootdir_vars)@*)
+  AC_MSG_WARN([$ac_file_inputs seems to ignore the --datarootdir setting])
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  ac_datarootdir_hack='
+m4_map_args_sep([_AC_datarootdir_subst(], [)], [
+], _AC_datarootdir_vars)
+  s&\\\${datarootdir}&$datarootdir&g' ;;
+esac
+_ACEOF
+])dnl
+
+# Neutralize VPATH when `$srcdir' = `.'.
+# Shell code in configure.ac might set extrasub.
+# FIXME: do we really want to maintain this feature?
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_sed_extra="$ac_vpsub
+$extrasub
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+:t
+[/@[a-zA-Z_][a-zA-Z_0-9]*@/!b]
+dnl configure_input is a somewhat special, so we don't call AC_SUBST_TRACE.
+dnl Note if you change the s||| delimiter here, don't forget to adjust
+dnl ac_sed_conf_input accordingly.  Using & is a bad idea if & appears in
+dnl the replacement string.
+s|@configure_input@|$ac_sed_conf_input|;t t
+dnl During the transition period, this is a special case:
+s&@top_builddir@&$ac_top_builddir_sub&;t t[]AC_SUBST_TRACE([top_builddir])
+dnl For this substitution see the witness macro _AC_HAVE_TOP_BUILD_PREFIX above.
+s&@top_build_prefix@&$ac_top_build_prefix&;t t[]AC_SUBST_TRACE([top_build_prefix])
+m4_map_args_sep([$0_ADJUST_DIR(], [)], [
+], [srcdir], [abs_srcdir], [top_srcdir], [abs_top_srcdir],
+   [builddir], [abs_builddir],
+   [abs_top_builddir]AC_PROVIDE_IFELSE([AC_PROG_INSTALL],
+     [, [INSTALL]])AC_PROVIDE_IFELSE([AC_PROG_MKDIR_P], [, [MKDIR_P]]))
+m4_ifndef([AC_DATAROOTDIR_CHECKED], [$ac_datarootdir_hack
+])dnl
+"
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" m4_defn([_AC_SUBST_CMDS]) \
+  >$ac_tmp/out || AC_MSG_ERROR([could not create $ac_file])
+
+m4_ifndef([AC_DATAROOTDIR_CHECKED],
+[test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
+  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
+  { ac_out=`sed -n '/^[[	 ]]*datarootdir[[	 ]]*:*=/p' \
+      "$ac_tmp/out"`; test -z "$ac_out"; } &&
+  AC_MSG_WARN([$ac_file contains a reference to the variable `datarootdir'
+which seems to be undefined.  Please make sure it is defined])
+])dnl
+
+  rm -f "$ac_tmp/stdin"
+  case $ac_file in
+  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
+  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+  esac \
+  || AC_MSG_ERROR([could not create $ac_file])
+dnl This would break Makefile dependencies:
+dnl  if diff "$ac_file" "$ac_tmp/out" >/dev/null 2>&1; then
+dnl    echo "$ac_file is unchanged"
+dnl  else
+dnl     rm -f "$ac_file"; mv "$ac_tmp/out" "$ac_file"
+dnl  fi
+])# _AC_OUTPUT_FILE
+
+
+
+
+## ----------------------- ##
+## Configuration headers.  ##
+## ----------------------- ##
+
+
+# AC_CONFIG_HEADERS(HEADERS..., [COMMANDS], [INIT-CMDS])
+# ------------------------------------------------------
+# Specify that the HEADERS are to be created by instantiation of the
+# AC_DEFINEs.
+#
+AC_DEFUN([AC_CONFIG_HEADERS], [_AC_CONFIG_FOOS([HEADERS], $@)])
+
+
+# AC_CONFIG_HEADER(HEADER-TO-CREATE ...)
+# --------------------------------------
+# FIXME: Make it obsolete?
+AC_DEFUN([AC_CONFIG_HEADER],
+[AC_CONFIG_HEADERS([$1])])
+
+
+# _AC_OUTPUT_HEADERS_PREPARE
+# --------------------------
+# Create the awk scripts needed for CONFIG_HEADERS.
+# Support multiline #defines.
+#
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+#
+m4_define([_AC_OUTPUT_HEADERS_PREPARE],
+[# Set up the scripts for CONFIG_HEADERS section.
+# No need to generate them if there are no CONFIG_HEADERS.
+# This happens for instance with `./config.status Makefile'.
+if test -n "$CONFIG_HEADERS"; then
+dnl This `||' list is finished at the end of _AC_OUTPUT_HEADERS_PREPARE.
+cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+BEGIN {
+_ACEOF
+
+# Transform confdefs.h into an awk script `defines.awk', embedded as
+# here-document in config.status, that substitutes the proper values into
+# config.h.in to produce config.h.
+
+# Create a delimiter string that does not exist in confdefs.h, to ease
+# handling of long lines.
+ac_delim='%!_!# '
+for ac_last_try in false false :; do
+  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
+  if test -z "$ac_tt"; then
+    break
+  elif $ac_last_try; then
+    AC_MSG_ERROR([could not make $CONFIG_HEADERS])
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+
+# For the awk script, D is an array of macro values keyed by name,
+# likewise P contains macro parameters if any.  Preserve backslash
+# newline sequences.
+dnl
+dnl Structure of the sed script that reads confdefs.h:
+dnl rset:  main loop, searches for `#define' lines
+dnl def:   deal with a `#define' line
+dnl bsnl:  deal with a `#define' line that ends with backslash-newline
+dnl cont:  handle a continuation line
+dnl bsnlc: handle a continuation line that ends with backslash-newline
+dnl
+dnl Each sub part escapes the awk special characters and outputs a statement
+dnl inserting the macro value into the array D, keyed by name.  If the macro
+dnl uses parameters, they are added in the array P, keyed by name.
+dnl
+dnl Long values are split into several string literals with help of ac_delim.
+dnl Assume nobody uses macro names of nearly 150 bytes length.
+dnl
+dnl The initial replace for `#define' lines inserts a leading space
+dnl in order to ease later matching; otherwise, output lines may be
+dnl repeatedly matched.
+dnl
+dnl m4-double-quote most of this for [, ], define, and substr:
+[
+ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
+sed -n '
+s/.\{]_AC_AWK_LITERAL_LIMIT[\}/&'"$ac_delim"'/g
+t rset
+:rset
+s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
+t def
+d
+:def
+s/\\$//
+t bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3"/p
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
+d
+:bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3\\\\\\n"\\/p
+t cont
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
+t cont
+d
+:cont
+n
+s/.\{]_AC_AWK_LITERAL_LIMIT[\}/&'"$ac_delim"'/g
+t clear
+:clear
+s/\\$//
+t bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/"/p
+d
+:bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
+b cont
+' <confdefs.h | sed '
+s/'"$ac_delim"'/"\\\
+"/g' >>$CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  for (key in D) D_is_set[key] = 1
+  FS = ""
+}
+/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
+  line = \$ 0
+  split(line, arg, " ")
+  if (arg[1] == "#") {
+    defundef = arg[2]
+    mac1 = arg[3]
+  } else {
+    defundef = substr(arg[1], 2)
+    mac1 = arg[2]
+  }
+  split(mac1, mac2, "(") #)
+  macro = mac2[1]
+  prefix = substr(line, 1, index(line, defundef) - 1)
+  if (D_is_set[macro]) {
+    # Preserve the white space surrounding the "#".
+    print prefix "define", macro P[macro] D[macro]
+    next
+  } else {
+    # Replace #undef with comments.  This is necessary, for example,
+    # in the case of _POSIX_SOURCE, which is predefined and required
+    # on some systems where configure will not decide to define it.
+    if (defundef == "undef") {
+      print "/*", prefix defundef, macro, "*/"
+      next
+    }
+  }
+}
+{ print }
+]dnl End of double-quoted section
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+dnl finish `||' list indicating write error:
+  AC_MSG_ERROR([could not setup config headers machinery])
+fi # test -n "$CONFIG_HEADERS"
+
+])# _AC_OUTPUT_HEADERS_PREPARE
+
+
+# _AC_OUTPUT_HEADER
+# -----------------
+#
+# Output the code which instantiates the `config.h' files from their
+# `config.h.in'.
+#
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+#
+m4_define([_AC_OUTPUT_HEADER],
+[
+  #
+  # CONFIG_HEADER
+  #
+  if test x"$ac_file" != x-; then
+    {
+      AS_ECHO(["/* $configure_input  */"]) \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
+    } >"$ac_tmp/config.h" \
+      || AC_MSG_ERROR([could not create $ac_file])
+    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+      AC_MSG_NOTICE([$ac_file is unchanged])
+    else
+      rm -f "$ac_file"
+      mv "$ac_tmp/config.h" "$ac_file" \
+	|| AC_MSG_ERROR([could not create $ac_file])
+    fi
+  else
+    AS_ECHO(["/* $configure_input  */"]) \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+      || AC_MSG_ERROR([could not create -])
+  fi
+dnl If running for Automake, be ready to perform additional
+dnl commands to set up the timestamp files.
+m4_ifdef([_AC_AM_CONFIG_HEADER_HOOK],
+	 [_AC_AM_CONFIG_HEADER_HOOK(["$ac_file"])
+])dnl
+])# _AC_OUTPUT_HEADER
+
+
+
+## --------------------- ##
+## Configuration links.  ##
+## --------------------- ##
+
+
+# AC_CONFIG_LINKS(DEST:SOURCE..., [COMMANDS], [INIT-CMDS])
+# --------------------------------------------------------
+# Specify that config.status should establish a (symbolic if possible)
+# link from TOP_SRCDIR/SOURCE to TOP_SRCDIR/DEST.
+# Reject DEST=., because it is makes it hard for ./config.status
+# to guess the links to establish (`./config.status .').
+#
+AC_DEFUN([AC_CONFIG_LINKS], [_AC_CONFIG_FOOS([LINKS], $@)])
+
+
+# AC_LINK_FILES(SOURCE..., DEST...)
+# ---------------------------------
+# Link each of the existing files SOURCE... to the corresponding
+# link name in DEST...
+#
+# Unfortunately we can't provide a very good autoupdate service here,
+# since in `AC_LINK_FILES($from, $to)' it is possible that `$from'
+# and `$to' are actually lists.  It would then be completely wrong to
+# replace it with `AC_CONFIG_LINKS($to:$from).  It is possible in the
+# case of literal values though, but because I don't think there is any
+# interest in creating config links with literal values, no special
+# mechanism is implemented to handle them.
+#
+# _AC_LINK_FILES_CNT is used to be robust to multiple calls.
+AU_DEFUN([AC_LINK_FILES],
+[m4_if($#, 2, ,
+       [m4_fatal([$0: incorrect number of arguments])])dnl
+m4_define_default([_AC_LINK_FILES_CNT], 0)dnl
+m4_define([_AC_LINK_FILES_CNT], m4_incr(_AC_LINK_FILES_CNT))dnl
+ac_sources="$1"
+ac_dests="$2"
+while test -n "$ac_sources"; do
+  set $ac_dests; ac_dest=$[1]; shift; ac_dests=$[*]
+  set $ac_sources; ac_source=$[1]; shift; ac_sources=$[*]
+  [ac_config_links_]_AC_LINK_FILES_CNT="$[ac_config_links_]_AC_LINK_FILES_CNT $ac_dest:$ac_source"
+done
+AC_CONFIG_LINKS($[ac_config_links_]_AC_LINK_FILES_CNT)dnl
+],
+[It is technically impossible to `autoupdate' cleanly from AC_LINK_FILES
+to AC_CONFIG_LINKS.  `autoupdate' provides a functional but inelegant
+update, you should probably tune the result yourself.])# AC_LINK_FILES
+
+
+# _AC_OUTPUT_LINK
+# ---------------
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+m4_define([_AC_OUTPUT_LINK],
+[
+  #
+  # CONFIG_LINK
+  #
+
+  if test "$ac_source" = "$ac_file" && test "$srcdir" = '.'; then
+    :
+  else
+    # Prefer the file from the source tree if names are identical.
+    if test "$ac_source" = "$ac_file" || test ! -r "$ac_source"; then
+      ac_source=$srcdir/$ac_source
+    fi
+
+    AC_MSG_NOTICE([linking $ac_source to $ac_file])
+
+    if test ! -r "$ac_source"; then
+      AC_MSG_ERROR([$ac_source: file not found])
+    fi
+    rm -f "$ac_file"
+
+    # Try a relative symlink, then a hard link, then a copy.
+    case $ac_source in
+    [[\\/$]]* | ?:[[\\/]]* ) ac_rel_source=$ac_source ;;
+	*) ac_rel_source=$ac_top_build_prefix$ac_source ;;
+    esac
+    ln -s "$ac_rel_source" "$ac_file" 2>/dev/null ||
+      ln "$ac_source" "$ac_file" 2>/dev/null ||
+      cp -p "$ac_source" "$ac_file" ||
+      AC_MSG_ERROR([cannot link or copy $ac_source to $ac_file])
+  fi
+])# _AC_OUTPUT_LINK
+
+
+
+## ------------------------ ##
+## Configuration commands.  ##
+## ------------------------ ##
+
+
+# AC_CONFIG_COMMANDS(NAME...,[COMMANDS], [INIT-CMDS])
+# ---------------------------------------------------
+#
+# Specify additional commands to be run by config.status.  This
+# commands must be associated with a NAME, which should be thought
+# as the name of a file the COMMANDS create.
+#
+AC_DEFUN([AC_CONFIG_COMMANDS], [_AC_CONFIG_FOOS([COMMANDS], $@)])
+
+
+# AC_OUTPUT_COMMANDS(EXTRA-CMDS, INIT-CMDS)
+# -----------------------------------------
+#
+# Add additional commands for AC_OUTPUT to put into config.status.
+#
+# This macro is an obsolete version of AC_CONFIG_COMMANDS.  The only
+# difficulty in mapping AC_OUTPUT_COMMANDS to AC_CONFIG_COMMANDS is
+# to give a unique key.  The scheme we have chosen is `default-1',
+# `default-2' etc. for each call.
+#
+# Unfortunately this scheme is fragile: bad things might happen
+# if you update an included file and configure.ac: you might have
+# clashes :(  On the other hand, I'd like to avoid weird keys (e.g.,
+# depending upon __file__ or the pid).
+AU_DEFUN([AC_OUTPUT_COMMANDS],
+[m4_define_default([_AC_OUTPUT_COMMANDS_CNT], 0)dnl
+m4_define([_AC_OUTPUT_COMMANDS_CNT], m4_incr(_AC_OUTPUT_COMMANDS_CNT))dnl
+dnl Double quoted since that was the case in the original macro.
+AC_CONFIG_COMMANDS([default-]_AC_OUTPUT_COMMANDS_CNT, [[$1]], [[$2]])dnl
+])
+
+
+# _AC_OUTPUT_COMMAND
+# ------------------
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+m4_define([_AC_OUTPUT_COMMAND],
+[  AC_MSG_NOTICE([executing $ac_file commands])
+])
+
+
+
+## -------------------------------------- ##
+## Pre- and post-config.status commands.  ##
+## -------------------------------------- ##
+
+
+# AC_CONFIG_COMMANDS_PRE(CMDS)
+# ----------------------------
+# Commands to run right before config.status is created. Accumulates.
+AC_DEFUN([AC_CONFIG_COMMANDS_PRE],
+[m4_append([AC_OUTPUT_COMMANDS_PRE], [$1
+])])
+
+
+# AC_OUTPUT_COMMANDS_PRE
+# ----------------------
+# A *variable* in which we append all the actions that must be
+# performed before *creating* config.status.  For a start, clean
+# up all the LIBOBJ mess.
+m4_define([AC_OUTPUT_COMMANDS_PRE],
+[_AC_LIBOBJS_NORMALIZE
+])
+
+
+# AC_CONFIG_COMMANDS_POST(CMDS)
+# -----------------------------
+# Commands to run after config.status was created.  Accumulates.
+AC_DEFUN([AC_CONFIG_COMMANDS_POST],
+[m4_append([AC_OUTPUT_COMMANDS_POST], [$1
+])])
+
+# Initialize.
+m4_define([AC_OUTPUT_COMMANDS_POST])
+
+
+
+## ----------------------- ##
+## Configuration subdirs.  ##
+## ----------------------- ##
+
+
+# AC_CONFIG_SUBDIRS(DIR ...)
+# --------------------------
+# We define two variables:
+# - _AC_LIST_SUBDIRS
+#   A statically built list, should contain *all* the arguments of
+#   AC_CONFIG_SUBDIRS.  The final value is assigned to ac_subdirs_all in
+#   the `default' section, and used for --help=recursive.
+#   It makes no sense for arguments which are sh variables.
+# - subdirs
+#   Shell variable built at runtime, so some of these dirs might not be
+#   included, if for instance the user refused a part of the tree.
+#   This is used in _AC_OUTPUT_SUBDIRS.
+AC_DEFUN([AC_CONFIG_SUBDIRS],
+[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])]dnl
+[AC_REQUIRE([AC_DISABLE_OPTION_CHECKING])]dnl
+[m4_map_args_w([$1], [_AC_CONFIG_UNIQUE([SUBDIRS],
+  _AC_CONFIG_COMPUTE_DEST(], [))])]dnl
+[m4_append([_AC_LIST_SUBDIRS], [$1], [
+])]dnl
+[AS_LITERAL_IF([$1], [],
+	       [AC_DIAGNOSE([syntax], [$0: you should use literals])])]dnl
+[AC_SUBST([subdirs], ["$subdirs m4_normalize([$1])"])])
+
+
+# _AC_OUTPUT_SUBDIRS
+# ------------------
+# This is a subroutine of AC_OUTPUT, but it does not go into
+# config.status, rather, it is called after running config.status.
+m4_define([_AC_OUTPUT_SUBDIRS],
+[
+#
+# CONFIG_SUBDIRS section.
+#
+if test "$no_recursion" != yes; then
+
+  # Remove --cache-file, --srcdir, and --disable-option-checking arguments
+  # so they do not pile up.
+  ac_sub_configure_args=
+  ac_prev=
+  eval "set x $ac_configure_args"
+  shift
+  for ac_arg
+  do
+    if test -n "$ac_prev"; then
+      ac_prev=
+      continue
+    fi
+    case $ac_arg in
+    -cache-file | --cache-file | --cache-fil | --cache-fi \
+    | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+      ac_prev=cache_file ;;
+    -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+    | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* \
+    | --c=*)
+      ;;
+    --config-cache | -C)
+      ;;
+    -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+      ac_prev=srcdir ;;
+    -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+      ;;
+    -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+      ac_prev=prefix ;;
+    -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+      ;;
+    --disable-option-checking)
+      ;;
+    *)
+      case $ac_arg in
+      *\'*) ac_arg=`AS_ECHO(["$ac_arg"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+      esac
+      AS_VAR_APPEND([ac_sub_configure_args], [" '$ac_arg'"]) ;;
+    esac
+  done
+
+  # Always prepend --prefix to ensure using the same prefix
+  # in subdir configurations.
+  ac_arg="--prefix=$prefix"
+  case $ac_arg in
+  *\'*) ac_arg=`AS_ECHO(["$ac_arg"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+  esac
+  ac_sub_configure_args="'$ac_arg' $ac_sub_configure_args"
+
+  # Pass --silent
+  if test "$silent" = yes; then
+    ac_sub_configure_args="--silent $ac_sub_configure_args"
+  fi
+
+  # Always prepend --disable-option-checking to silence warnings, since
+  # different subdirs can have different --enable and --with options.
+  ac_sub_configure_args="--disable-option-checking $ac_sub_configure_args"
+
+  ac_popdir=`pwd`
+  for ac_dir in : $subdirs; do test "x$ac_dir" = x: && continue
+
+    # Do not complain, so a configure script can configure whichever
+    # parts of a large source tree are present.
+    test -d "$srcdir/$ac_dir" || continue
+
+    ac_msg="=== configuring in $ac_dir (`pwd`/$ac_dir)"
+    _AS_ECHO_LOG([$ac_msg])
+    _AS_ECHO([$ac_msg])
+    AS_MKDIR_P(["$ac_dir"])
+    _AC_SRCDIRS(["$ac_dir"])
+
+    cd "$ac_dir"
+
+    # Check for guested configure; otherwise get Cygnus style configure.
+    if test -f "$ac_srcdir/configure.gnu"; then
+      ac_sub_configure=$ac_srcdir/configure.gnu
+    elif test -f "$ac_srcdir/configure"; then
+      ac_sub_configure=$ac_srcdir/configure
+    elif test -f "$ac_srcdir/configure.in"; then
+      # This should be Cygnus configure.
+      ac_sub_configure=$ac_aux_dir/configure
+    else
+      AC_MSG_WARN([no configuration information is in $ac_dir])
+      ac_sub_configure=
+    fi
+
+    # The recursion is here.
+    if test -n "$ac_sub_configure"; then
+      # Make the cache file name correct relative to the subdirectory.
+      case $cache_file in
+      [[\\/]]* | ?:[[\\/]]* ) ac_sub_cache_file=$cache_file ;;
+      *) # Relative name.
+	ac_sub_cache_file=$ac_top_build_prefix$cache_file ;;
+      esac
+
+      AC_MSG_NOTICE([running $SHELL $ac_sub_configure $ac_sub_configure_args --cache-file=$ac_sub_cache_file --srcdir=$ac_srcdir])
+      # The eval makes quoting arguments work.
+      eval "\$SHELL \"\$ac_sub_configure\" $ac_sub_configure_args \
+	   --cache-file=\"\$ac_sub_cache_file\" --srcdir=\"\$ac_srcdir\"" ||
+	AC_MSG_ERROR([$ac_sub_configure failed for $ac_dir])
+    fi
+
+    cd "$ac_popdir"
+  done
+fi
+])# _AC_OUTPUT_SUBDIRS
+
+
+
+
+## -------------------------- ##
+## Outputting config.status.  ##
+## -------------------------- ##
+
+
+# AU::AC_OUTPUT([CONFIG_FILES...], [EXTRA-CMDS], [INIT-CMDS])
+# -----------------------------------------------------------
+#
+# If there are arguments given to AC_OUTPUT, dispatch them to the
+# proper modern macros.
+AU_DEFUN([AC_OUTPUT],
+[m4_ifvaln([$1],
+	   [AC_CONFIG_FILES([$1])])dnl
+m4_ifvaln([$2$3],
+	  [AC_CONFIG_COMMANDS(default, [$2], [$3])])dnl
+[AC_OUTPUT]])
+
+
+# AC_OUTPUT([CONFIG_FILES...], [EXTRA-CMDS], [INIT-CMDS])
+# -------------------------------------------------------
+# The big finish.
+# Produce config.status, config.h, and links; and configure subdirs.
+#
+m4_define([AC_OUTPUT],
+[dnl Dispatch the extra arguments to their native macros.
+m4_ifvaln([$1],
+	  [AC_CONFIG_FILES([$1])])dnl
+m4_ifvaln([$2$3],
+	  [AC_CONFIG_COMMANDS(default, [$2], [$3])])dnl
+m4_ifval([$1$2$3],
+	 [AC_DIAGNOSE([obsolete],
+		      [$0 should be used without arguments.
+You should run autoupdate.])])dnl
+AC_CACHE_SAVE
+
+test "x$prefix" = xNONE && prefix=$ac_default_prefix
+# Let make expand exec_prefix.
+test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
+
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)], [DEFS=-DHAVE_CONFIG_H], [AC_OUTPUT_MAKE_DEFS()])
+
+dnl Commands to run before creating config.status.
+AC_OUTPUT_COMMANDS_PRE()dnl
+
+: "${CONFIG_STATUS=./config.status}"
+ac_write_fail=0
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files $CONFIG_STATUS"
+_AC_OUTPUT_CONFIG_STATUS()dnl
+ac_clean_files=$ac_clean_files_save
+
+test $ac_write_fail = 0 ||
+  AC_MSG_ERROR([write failure creating $CONFIG_STATUS])
+
+dnl Commands to run after config.status was created
+AC_OUTPUT_COMMANDS_POST()dnl
+
+# configure is writing to config.log, and then calls config.status.
+# config.status does its own redirection, appending to config.log.
+# Unfortunately, on DOS this fails, as config.log is still kept open
+# by configure, so config.status won't be able to write to it; its
+# output is simply discarded.  So we exec the FD to /dev/null,
+# effectively closing config.log, so it can be properly (re)opened and
+# appended to by config.status.  When coming back to configure, we
+# need to make the FD available again.
+if test "$no_create" != yes; then
+  ac_cs_success=:
+  ac_config_status_args=
+  test "$silent" = yes &&
+    ac_config_status_args="$ac_config_status_args --quiet"
+  exec AS_MESSAGE_LOG_FD>/dev/null
+  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
+  exec AS_MESSAGE_LOG_FD>>config.log
+  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
+  # would make configure fail if this is the last instruction.
+  $ac_cs_success || AS_EXIT([1])
+fi
+dnl config.status should not do recursion.
+AC_PROVIDE_IFELSE([AC_CONFIG_SUBDIRS], [_AC_OUTPUT_SUBDIRS()])dnl
+if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
+  AC_MSG_WARN([unrecognized options: $ac_unrecognized_opts])
+fi
+])# AC_OUTPUT
+
+
+# _AC_OUTPUT_CONFIG_STATUS
+# ------------------------
+# Produce config.status.  Called by AC_OUTPUT.
+# Pay special attention not to have too long here docs: some old
+# shells die.  Unfortunately the limit is not known precisely...
+m4_define([_AC_OUTPUT_CONFIG_STATUS],
+[AC_MSG_NOTICE([creating $CONFIG_STATUS])
+dnl AS_MESSAGE_LOG_FD is not available yet:
+m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[AS_INIT_GENERATED([$CONFIG_STATUS],
+[# Run this file to recreate the current configuration.
+# Compiler output produced by configure, useful for debugging
+# configure, is in config.log if it exists.
+
+debug=false
+ac_cs_recheck=false
+ac_cs_silent=false
+]) || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+[#] Save the log message, to keep $[0] and so on meaningful, and to
+# report actual input values of CONFIG_FILES etc. instead of their
+# values after options handling.
+ac_log="
+This file was extended by m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])dnl
+$as_me[]m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]), which was
+generated by m4_PACKAGE_STRING.  Invocation command line was
+
+  CONFIG_FILES    = $CONFIG_FILES
+  CONFIG_HEADERS  = $CONFIG_HEADERS
+  CONFIG_LINKS    = $CONFIG_LINKS
+  CONFIG_COMMANDS = $CONFIG_COMMANDS
+  $ $[0] $[@]
+
+on `(hostname || uname -n) 2>/dev/null | sed 1q`
+"
+
+_ACEOF
+
+dnl remove any newlines from these variables.
+m4_ifdef([_AC_SEEN_CONFIG(FILES)],
+[case $ac_config_files in *"
+"*) set x $ac_config_files; shift; ac_config_files=$[*];;
+esac
+])
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],
+[case $ac_config_headers in *"
+"*) set x $ac_config_headers; shift; ac_config_headers=$[*];;
+esac
+])
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+# Files that config.status was made for.
+m4_ifdef([_AC_SEEN_CONFIG(FILES)],
+[config_files="$ac_config_files"
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],
+[config_headers="$ac_config_headers"
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(LINKS)],
+[config_links="$ac_config_links"
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(COMMANDS)],
+[config_commands="$ac_config_commands"
+])dnl
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ac_cs_usage="\
+\`$as_me' instantiates files and other configuration actions
+from templates according to the current configuration.  Unless the files
+and actions are specified as TAGs, all are instantiated by default.
+
+Usage: $[0] [[OPTION]]... [[TAG]]...
+
+  -h, --help       print this help, then exit
+  -V, --version    print version number and configuration settings, then exit
+      --config     print configuration, then exit
+  -q, --quiet, --silent
+[]                   do not print progress messages
+  -d, --debug      don't remove temporary files
+      --recheck    update $as_me by reconfiguring in the same conditions
+m4_ifdef([_AC_SEEN_CONFIG(FILES)],
+  [AS_HELP_STRING([[    --file=FILE[:TEMPLATE]]],
+    [instantiate the configuration file FILE], [                   ])
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],
+  [AS_HELP_STRING([[    --header=FILE[:TEMPLATE]]],
+    [instantiate the configuration header FILE], [                   ])
+])dnl
+
+m4_ifdef([_AC_SEEN_CONFIG(FILES)],
+[Configuration files:
+$config_files
+
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],
+[Configuration headers:
+$config_headers
+
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(LINKS)],
+[Configuration links:
+$config_links
+
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(COMMANDS)],
+[Configuration commands:
+$config_commands
+
+])dnl
+Report bugs to m4_ifset([AC_PACKAGE_BUGREPORT], [<AC_PACKAGE_BUGREPORT>],
+  [the package provider]).dnl
+m4_ifdef([AC_PACKAGE_NAME], [m4_ifset([AC_PACKAGE_URL], [
+AC_PACKAGE_NAME home page: <AC_PACKAGE_URL>.])dnl
+m4_if(m4_index(m4_defn([AC_PACKAGE_NAME]), [GNU ]), [0], [
+General help using GNU software: <http://www.gnu.org/gethelp/>.])])"
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_cs_config="`AS_ECHO(["$ac_configure_args"]) | sed 's/^ //; s/[[\\""\`\$]]/\\\\&/g'`"
+ac_cs_version="\\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.status[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING,
+  with options \\"\$ac_cs_config\\"
+
+Copyright (C) m4_PACKAGE_YEAR Free Software Foundation, Inc.
+This config.status script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it."
+
+ac_pwd='$ac_pwd'
+srcdir='$srcdir'
+AC_PROVIDE_IFELSE([AC_PROG_INSTALL],
+[INSTALL='$INSTALL'
+])dnl
+AC_PROVIDE_IFELSE([AC_PROG_MKDIR_P],
+[MKDIR_P='$MKDIR_P'
+])dnl
+AC_PROVIDE_IFELSE([AC_PROG_AWK],
+[AWK='$AWK'
+])dnl
+test -n "\$AWK" || AWK=awk
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# The default lists apply if the user does not specify any file.
+ac_need_defaults=:
+while test $[#] != 0
+do
+  case $[1] in
+  --*=?*)
+    ac_option=`expr "X$[1]" : 'X\([[^=]]*\)='`
+    ac_optarg=`expr "X$[1]" : 'X[[^=]]*=\(.*\)'`
+    ac_shift=:
+    ;;
+  --*=)
+    ac_option=`expr "X$[1]" : 'X\([[^=]]*\)='`
+    ac_optarg=
+    ac_shift=:
+    ;;
+  *)
+    ac_option=$[1]
+    ac_optarg=$[2]
+    ac_shift=shift
+    ;;
+  esac
+
+  case $ac_option in
+  # Handling of the options.
+  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
+    ac_cs_recheck=: ;;
+  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
+    AS_ECHO(["$ac_cs_version"]); exit ;;
+  --config | --confi | --conf | --con | --co | --c )
+    AS_ECHO(["$ac_cs_config"]); exit ;;
+  --debug | --debu | --deb | --de | --d | -d )
+    debug=: ;;
+m4_ifdef([_AC_SEEN_CONFIG(FILES)], [dnl
+  --file | --fil | --fi | --f )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`AS_ECHO(["$ac_optarg"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+    '') AC_MSG_ERROR([missing file argument]) ;;
+    esac
+    AS_VAR_APPEND([CONFIG_FILES], [" '$ac_optarg'"])
+    ac_need_defaults=false;;
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)], [dnl
+  --header | --heade | --head | --hea )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`AS_ECHO(["$ac_optarg"]) | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    AS_VAR_APPEND([CONFIG_HEADERS], [" '$ac_optarg'"])
+    ac_need_defaults=false;;
+  --he | --h)
+    # Conflict between --help and --header
+    AC_MSG_ERROR([ambiguous option: `$[1]'
+Try `$[0] --help' for more information.]);;
+], [  --he | --h |])dnl
+  --help | --hel | -h )
+    AS_ECHO(["$ac_cs_usage"]); exit ;;
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil | --si | --s)
+    ac_cs_silent=: ;;
+
+  # This is an error.
+  -*) AC_MSG_ERROR([unrecognized option: `$[1]'
+Try `$[0] --help' for more information.]) ;;
+
+  *) AS_VAR_APPEND([ac_config_targets], [" $[1]"])
+     ac_need_defaults=false ;;
+
+  esac
+  shift
+done
+
+ac_configure_extra_args=
+
+if $ac_cs_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+  ac_configure_extra_args="$ac_configure_extra_args --silent"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+dnl Check this before opening the log, to avoid a bug on MinGW,
+dnl which prohibits the recursive instance from truncating an open log.
+if \$ac_cs_recheck; then
+  set X '$SHELL' '$[0]' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
+  shift
+  \AS_ECHO(["running CONFIG_SHELL=$SHELL \$[*]"]) >&AS_MESSAGE_FD
+  CONFIG_SHELL='$SHELL'
+  export CONFIG_SHELL
+  exec "\$[@]"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+dnl Open the log:
+m4_popdef([AS_MESSAGE_LOG_FD])dnl
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+  AS_ECHO(["$ac_log"])
+} >&AS_MESSAGE_LOG_FD
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+m4_ifdef([_AC_OUTPUT_COMMANDS_INIT],
+[#
+# INIT-COMMANDS
+#
+_AC_OUTPUT_COMMANDS_INIT
+])dnl
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+
+# Handling of arguments.
+for ac_config_target in $ac_config_targets
+do
+  case $ac_config_target in
+m4_ifdef([_AC_LIST_TAGS], [_AC_LIST_TAGS])
+  *) AC_MSG_ERROR([invalid argument: `$ac_config_target']);;
+  esac
+done
+
+m4_ifdef([_AC_SEEN_CONFIG(ANY)], [_AC_OUTPUT_MAIN_LOOP])[]dnl
+
+AS_EXIT(0)
+_ACEOF
+])# _AC_OUTPUT_CONFIG_STATUS
+
+# _AC_OUTPUT_MAIN_LOOP
+# --------------------
+# The main loop in $CONFIG_STATUS.
+#
+# This macro is expanded inside a here document.  If the here document is
+# closed, it has to be reopened with
+# "cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1".
+#
+AC_DEFUN([_AC_OUTPUT_MAIN_LOOP],
+[
+# If the user did not use the arguments to specify the items to instantiate,
+# then the envvar interface is used.  Set only those that are not.
+# We use the long form for the default assignment because of an extremely
+# bizarre bug on SunOS 4.1.3.
+if $ac_need_defaults; then
+m4_ifdef([_AC_SEEN_CONFIG(FILES)],
+[  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],
+[  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(LINKS)],
+[  test "${CONFIG_LINKS+set}" = set || CONFIG_LINKS=$config_links
+])dnl
+m4_ifdef([_AC_SEEN_CONFIG(COMMANDS)],
+[  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
+])dnl
+fi
+
+# Have a temporary directory for convenience.  Make it in the build tree
+# simply because there is no reason against having it here, and in addition,
+# creating and moving files from /tmp can sometimes cause problems.
+# Hook for its removal unless debugging.
+# Note that there is a small window in which the directory will not be cleaned:
+# after its creation but before its name has been assigned to `$tmp'.
+dnl For historical reasons, AS_TMPDIR must continue to place the results
+dnl in $tmp; but we swap to the namespace-clean $ac_tmp to avoid issues
+dnl with any CONFIG_COMMANDS playing with the common variable name $tmp.
+$debug ||
+{
+  tmp= ac_tmp=
+  trap 'exit_status=$?
+  : "${ac_tmp:=$tmp}"
+  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+' 0
+  trap 'AS_EXIT([1])' 1 2 13 15
+}
+dnl The comment above AS_TMPDIR says at most 4 chars are allowed.
+AS_TMPDIR([conf], [.])
+ac_tmp=$tmp
+
+m4_ifdef([_AC_SEEN_CONFIG(FILES)], [_AC_OUTPUT_FILES_PREPARE])[]dnl
+m4_ifdef([_AC_SEEN_CONFIG(HEADERS)], [_AC_OUTPUT_HEADERS_PREPARE])[]dnl
+
+eval set X "dnl
+  m4_ifdef([_AC_SEEN_CONFIG(FILES)],    [:F $CONFIG_FILES])[]dnl
+  m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],  [:H $CONFIG_HEADERS])[]dnl
+  m4_ifdef([_AC_SEEN_CONFIG(LINKS)],    [:L $CONFIG_LINKS])[]dnl
+  m4_ifdef([_AC_SEEN_CONFIG(COMMANDS)], [:C $CONFIG_COMMANDS])[]dnl
+"
+shift
+for ac_tag
+do
+  case $ac_tag in
+  :[[FHLC]]) ac_mode=$ac_tag; continue;;
+  esac
+  case $ac_mode$ac_tag in
+  :[[FHL]]*:*);;
+  :L* | :C*:*) AC_MSG_ERROR([invalid tag `$ac_tag']);;
+  :[[FH]]-) ac_tag=-:-;;
+  :[[FH]]*) ac_tag=$ac_tag:$ac_tag.in;;
+  esac
+  ac_save_IFS=$IFS
+  IFS=:
+  set x $ac_tag
+  IFS=$ac_save_IFS
+  shift
+  ac_file=$[1]
+  shift
+
+  case $ac_mode in
+  :L) ac_source=$[1];;
+  :[[FH]])
+    ac_file_inputs=
+    for ac_f
+    do
+      case $ac_f in
+      -) ac_f="$ac_tmp/stdin";;
+      *) # Look for the file first in the build tree, then in the source tree
+	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
+	 # because $ac_f cannot contain `:'.
+	 test -f "$ac_f" ||
+	   case $ac_f in
+	   [[\\/$]]*) false;;
+	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
+	   esac ||
+	   AC_MSG_ERROR([cannot find input file: `$ac_f'], [1]);;
+      esac
+      case $ac_f in *\'*) ac_f=`AS_ECHO(["$ac_f"]) | sed "s/'/'\\\\\\\\''/g"`;; esac
+      AS_VAR_APPEND([ac_file_inputs], [" '$ac_f'"])
+    done
+
+    # Let's still pretend it is `configure' which instantiates (i.e., don't
+    # use $as_me), people would be surprised to read:
+    #    /* config.h.  Generated by config.status.  */
+    configure_input='Generated from '`
+	  AS_ECHO(["$[*]"]) | sed ['s|^[^:]*/||;s|:[^:]*/|, |g']
+	`' by configure.'
+    if test x"$ac_file" != x-; then
+      configure_input="$ac_file.  $configure_input"
+      AC_MSG_NOTICE([creating $ac_file])
+    fi
+    # Neutralize special characters interpreted by sed in replacement strings.
+    case $configure_input in #(
+    *\&* | *\|* | *\\* )
+       ac_sed_conf_input=`AS_ECHO(["$configure_input"]) |
+       sed 's/[[\\\\&|]]/\\\\&/g'`;; #(
+    *) ac_sed_conf_input=$configure_input;;
+    esac
+
+    case $ac_tag in
+    *:-:* | *:-) cat >"$ac_tmp/stdin" \
+      || AC_MSG_ERROR([could not create $ac_file]) ;;
+    esac
+    ;;
+  esac
+
+  ac_dir=`AS_DIRNAME(["$ac_file"])`
+  AS_MKDIR_P(["$ac_dir"])
+  _AC_SRCDIRS(["$ac_dir"])
+
+  case $ac_mode in
+  m4_ifdef([_AC_SEEN_CONFIG(FILES)],    [:F)_AC_OUTPUT_FILE ;;])
+  m4_ifdef([_AC_SEEN_CONFIG(HEADERS)],  [:H)_AC_OUTPUT_HEADER ;;])
+  m4_ifdef([_AC_SEEN_CONFIG(LINKS)],    [:L)_AC_OUTPUT_LINK ;;])
+  m4_ifdef([_AC_SEEN_CONFIG(COMMANDS)], [:C)_AC_OUTPUT_COMMAND ;;])
+  esac
+
+dnl Some shells don't like empty case/esac
+m4_ifdef([_AC_LIST_TAG_COMMANDS], [
+  case $ac_file$ac_mode in
+_AC_LIST_TAG_COMMANDS
+  esac
+])dnl
+done # for ac_tag
+
+])# _AC_OUTPUT_MAIN_LOOP
+
+
+# AC_OUTPUT_MAKE_DEFS
+# -------------------
+# Set the DEFS variable to the -D options determined earlier.
+# This is a subroutine of AC_OUTPUT.
+# It is called inside configure, outside of config.status.
+m4_define([AC_OUTPUT_MAKE_DEFS],
+[[# Transform confdefs.h into DEFS.
+# Protect against shell expansion while executing Makefile rules.
+# Protect against Makefile macro expansion.
+#
+# If the first sed substitution is executed (which looks for macros that
+# take arguments), then branch to the quote section.  Otherwise,
+# look for a macro that doesn't take arguments.
+ac_script='
+:mline
+/\\$/{
+ N
+ s,\\\n,,
+ b mline
+}
+t clear
+:clear
+s/^[	 ]*#[	 ]*define[	 ][	 ]*\([^	 (][^	 (]*([^)]*)\)[	 ]*\(.*\)/-D\1=\2/g
+t quote
+s/^[	 ]*#[	 ]*define[	 ][	 ]*\([^	 ][^	 ]*\)[	 ]*\(.*\)/-D\1=\2/g
+t quote
+b any
+:quote
+s/[	 `~#$^&*(){}\\|;'\''"<>?]/\\&/g
+s/\[/\\&/g
+s/\]/\\&/g
+s/\$/$$/g
+H
+:any
+${
+	g
+	s/^\n//
+	s/\n/ /g
+	p
+}
+'
+DEFS=`sed -n "$ac_script" confdefs.h`
+]])# AC_OUTPUT_MAKE_DEFS
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/types.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/types.m4
new file mode 100644
index 0000000..9f482da
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoconf/types.m4
@@ -0,0 +1,1069 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# Type related macros: existence, sizeof, and structure members.
+#
+# Copyright (C) 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008, 2009,
+# 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie, with help from
+# Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor,
+# Roland McGrath, Noah Friedman, david d zuhn, and many others.
+
+
+## ---------------- ##
+## Type existence.  ##
+## ---------------- ##
+
+# ---------------- #
+# General checks.  #
+# ---------------- #
+
+# Up to 2.13 included, Autoconf used to provide the macro
+#
+#    AC_CHECK_TYPE(TYPE, DEFAULT)
+#
+# Since, it provides another version which fits better with the other
+# AC_CHECK_ families:
+#
+#    AC_CHECK_TYPE(TYPE,
+#		   [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		   [INCLUDES = DEFAULT-INCLUDES])
+#
+# In order to provide backward compatibility, the new scheme is
+# implemented as _AC_CHECK_TYPE_NEW, the old scheme as _AC_CHECK_TYPE_OLD,
+# and AC_CHECK_TYPE branches to one or the other, depending upon its
+# arguments.
+
+
+# _AC_CHECK_TYPE_NEW_BODY
+# -----------------------
+# Shell function body for _AC_CHECK_TYPE_NEW.  This macro implements the
+# former task of AC_CHECK_TYPE, with one big difference though: AC_CHECK_TYPE
+# used to grep in the headers, which, BTW, led to many problems until the
+# extended regular expression was correct and did not give false positives.
+# It turned out there are even portability issues with egrep...
+#
+# The most obvious way to check for a TYPE is just to compile a variable
+# definition:
+#
+#	  TYPE my_var;
+#
+# (TYPE being the second parameter to the shell function, hence $[]2 in m4).
+# Unfortunately this does not work for const qualified types in C++, where
+# you need an initializer.  So you think of
+#
+#	  TYPE my_var = (TYPE) 0;
+#
+# Unfortunately, again, this is not valid for some C++ classes.
+#
+# Then you look for another scheme.  For instance you think of declaring
+# a function which uses a parameter of type TYPE:
+#
+#	  int foo (TYPE param);
+#
+# but of course you soon realize this does not make it with K&R
+# compilers.  And by no ways you want to
+#
+#	  int foo (param)
+#	    TYPE param
+#	  { ; }
+#
+# since this time it's C++ who is not happy.
+#
+# Don't even think of the return type of a function, since K&R cries
+# there too.  So you start thinking of declaring a *pointer* to this TYPE:
+#
+#	  TYPE *p;
+#
+# but you know fairly well that this is legal in C for aggregates which
+# are unknown (TYPE = struct does-not-exist).
+#
+# Then you think of using sizeof to make sure the TYPE is really
+# defined:
+#
+#	  sizeof (TYPE);
+#
+# But this succeeds if TYPE is a variable: you get the size of the
+# variable's type!!!
+#
+# So, to filter out the last possibility, you try this too:
+#
+#	  sizeof ((TYPE));
+#
+# This fails if TYPE is a type, but succeeds if TYPE is actually a variable.
+#
+# Also note that we use
+#
+#	  if (sizeof (TYPE))
+#
+# to `read' sizeof (to avoid warnings), while not depending on its type
+# (not necessarily size_t etc.).
+#
+# C++ disallows defining types inside `sizeof ()', but that's OK,
+# since we don't want to consider unnamed structs to be types for C++,
+# precisely because they don't work in cases like that.
+m4_define([_AC_CHECK_TYPE_NEW_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for $[]2], [$[]3],
+  [AS_VAR_SET([$[]3], [no])
+  AC_COMPILE_IFELSE(
+    [AC_LANG_PROGRAM([$[]4],
+       [if (sizeof ($[]2))
+	 return 0;])],
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_PROGRAM([$[]4],
+	  [if (sizeof (($[]2)))
+	    return 0;])],
+       [],
+       [AS_VAR_SET([$[]3], [yes])])])])
+  AS_LINENO_POP
+])dnl
+
+# _AC_CHECK_TYPE_NEW(TYPE,
+#		     [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		     [INCLUDES = DEFAULT-INCLUDES])
+# ------------------------------------------------------------
+# Check whether the type TYPE is supported by the system, maybe via the
+# the provided includes.
+AC_DEFUN([_AC_CHECK_TYPE_NEW],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_type],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_type],
+    [LINENO TYPE VAR INCLUDES],
+    [Tests whether TYPE exists after having included INCLUDES, setting
+     cache variable VAR accordingly.])],
+    [$0_BODY])]dnl
+[AS_VAR_PUSHDEF([ac_Type], [ac_cv_type_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_type "$LINENO" "$1" "ac_Type" ]dnl
+["AS_ESCAPE([AC_INCLUDES_DEFAULT([$4])], [""])"
+AS_VAR_IF([ac_Type], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Type])dnl
+])# _AC_CHECK_TYPE_NEW
+
+
+# _AC_CHECK_TYPES(TYPE)
+# ---------------------
+# Helper to AC_CHECK_TYPES, which generates two of the four arguments
+# to _AC_CHECK_TYPE_NEW that are based on TYPE.
+m4_define([_AC_CHECK_TYPES],
+[[$1], [AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_$1]), [1],
+  [Define to 1 if the system has the type `$1'.])]])
+
+
+# AC_CHECK_TYPES(TYPES,
+#		 [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		 [INCLUDES = DEFAULT-INCLUDES])
+# --------------------------------------------------------
+# TYPES is an m4 list.  There are no ambiguities here, we mean the newer
+# AC_CHECK_TYPE.
+AC_DEFUN([AC_CHECK_TYPES],
+[m4_map_args_sep([_AC_CHECK_TYPE_NEW(_$0(], [)[
+$2], [$3], [$4])], [], $1)])
+
+
+# _AC_CHECK_TYPE_OLD(TYPE, DEFAULT)
+# ---------------------------------
+# FIXME: This is an extremely badly chosen name, since this
+# macro actually performs an AC_REPLACE_TYPE.  Some day we
+# have to clean this up.
+m4_define([_AC_CHECK_TYPE_OLD],
+[_AC_CHECK_TYPE_NEW([$1],,
+   [AC_DEFINE_UNQUOTED([$1], [$2],
+		       [Define to `$2' if <sys/types.h> does not define.])])dnl
+])# _AC_CHECK_TYPE_OLD
+
+
+# _AC_CHECK_TYPE_REPLACEMENT_TYPE_P(STRING)
+# -----------------------------------------
+# Return `1' if STRING seems to be a builtin C/C++ type, i.e., if it
+# starts with `_Bool', `bool', `char', `double', `float', `int',
+# `long', `short', `signed', or `unsigned' followed by characters
+# that are defining types.
+# Because many people have used `off_t' and `size_t' too, they are added
+# for better common-useward backward compatibility.
+m4_define([_AC_CHECK_TYPE_REPLACEMENT_TYPE_P],
+[m4_bmatch([$1],
+	  [^\(_Bool\|bool\|char\|double\|float\|int\|long\|short\|\(un\)?signed\|[_a-zA-Z][_a-zA-Z0-9]*_t\)[][_a-zA-Z0-9() *]*$],
+	  1, 0)dnl
+])# _AC_CHECK_TYPE_REPLACEMENT_TYPE_P
+
+
+# _AC_CHECK_TYPE_MAYBE_TYPE_P(STRING)
+# -----------------------------------
+# Return `1' if STRING looks like a C/C++ type.
+m4_define([_AC_CHECK_TYPE_MAYBE_TYPE_P],
+[m4_bmatch([$1], [^[_a-zA-Z0-9 ]+\([_a-zA-Z0-9() *]\|\[\|\]\)*$],
+	  1, 0)dnl
+])# _AC_CHECK_TYPE_MAYBE_TYPE_P
+
+
+# AC_CHECK_TYPE(TYPE, DEFAULT)
+#  or
+# AC_CHECK_TYPE(TYPE,
+#		[ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		[INCLUDES = DEFAULT-INCLUDES])
+# -------------------------------------------------------
+#
+# Dispatch respectively to _AC_CHECK_TYPE_OLD or _AC_CHECK_TYPE_NEW.
+# 1. More than two arguments	     => NEW
+# 2. $2 seems to be replacement type => OLD
+#    See _AC_CHECK_TYPE_REPLACEMENT_TYPE_P for `replacement type'.
+# 3. $2 seems to be a type	     => NEW plus a warning
+# 4. default			     => NEW
+AC_DEFUN([AC_CHECK_TYPE],
+[m4_cond([$#], [3],
+  [_AC_CHECK_TYPE_NEW],
+	 [$#], [4],
+  [_AC_CHECK_TYPE_NEW],
+	 [_AC_CHECK_TYPE_REPLACEMENT_TYPE_P([$2])], [1],
+  [_AC_CHECK_TYPE_OLD],
+	 [_AC_CHECK_TYPE_MAYBE_TYPE_P([$2])], [1],
+  [AC_DIAGNOSE([syntax],
+	       [$0: assuming `$2' is not a type])_AC_CHECK_TYPE_NEW],
+  [_AC_CHECK_TYPE_NEW])($@)])# AC_CHECK_TYPE
+
+
+
+# ---------------------------- #
+# Types that must be checked.  #
+# ---------------------------- #
+
+AN_IDENTIFIER([ptrdiff_t], [AC_CHECK_TYPES])
+
+
+# ----------------- #
+# Specific checks.  #
+# ----------------- #
+
+# AC_TYPE_GETGROUPS
+# -----------------
+AC_DEFUN([AC_TYPE_GETGROUPS],
+[AC_REQUIRE([AC_TYPE_UID_T])dnl
+AC_CACHE_CHECK(type of array argument to getgroups, ac_cv_type_getgroups,
+[AC_RUN_IFELSE([AC_LANG_SOURCE(
+[[/* Thanks to Mike Rendell for this test.  */
+]AC_INCLUDES_DEFAULT[
+#define NGID 256
+#undef MAX
+#define MAX(x, y) ((x) > (y) ? (x) : (y))
+
+int
+main ()
+{
+  gid_t gidset[NGID];
+  int i, n;
+  union { gid_t gval; long int lval; }  val;
+
+  val.lval = -1;
+  for (i = 0; i < NGID; i++)
+    gidset[i] = val.gval;
+  n = getgroups (sizeof (gidset) / MAX (sizeof (int), sizeof (gid_t)) - 1,
+		 gidset);
+  /* Exit non-zero if getgroups seems to require an array of ints.  This
+     happens when gid_t is short int but getgroups modifies an array
+     of ints.  */
+  return n > 0 && gidset[n] != val.gval;
+}]])],
+	       [ac_cv_type_getgroups=gid_t],
+	       [ac_cv_type_getgroups=int],
+	       [ac_cv_type_getgroups=cross])
+if test $ac_cv_type_getgroups = cross; then
+  dnl When we can't run the test program (we are cross compiling), presume
+  dnl that <unistd.h> has either an accurate prototype for getgroups or none.
+  dnl Old systems without prototypes probably use int.
+  AC_EGREP_HEADER([getgroups.*int.*gid_t], unistd.h,
+		  ac_cv_type_getgroups=gid_t, ac_cv_type_getgroups=int)
+fi])
+AC_DEFINE_UNQUOTED(GETGROUPS_T, $ac_cv_type_getgroups,
+		   [Define to the type of elements in the array set by
+		    `getgroups'. Usually this is either `int' or `gid_t'.])
+])# AC_TYPE_GETGROUPS
+
+
+# AU::AM_TYPE_PTRDIFF_T
+# ---------------------
+AU_DEFUN([AM_TYPE_PTRDIFF_T],
+[AC_CHECK_TYPES(ptrdiff_t)])
+
+
+# AC_TYPE_INTMAX_T
+# ----------------
+AC_DEFUN([AC_TYPE_INTMAX_T],
+[
+  AC_REQUIRE([AC_TYPE_LONG_LONG_INT])
+  AC_CHECK_TYPE([intmax_t],
+    [AC_DEFINE([HAVE_INTMAX_T], 1,
+       [Define to 1 if the system has the type `intmax_t'.])],
+    [test $ac_cv_type_long_long_int = yes \
+       && ac_type='long long int' \
+       || ac_type='long int'
+     AC_DEFINE_UNQUOTED([intmax_t], [$ac_type],
+       [Define to the widest signed integer type
+	if <stdint.h> and <inttypes.h> do not define.])])
+])
+
+
+# AC_TYPE_UINTMAX_T
+# -----------------
+AC_DEFUN([AC_TYPE_UINTMAX_T],
+[
+  AC_REQUIRE([AC_TYPE_UNSIGNED_LONG_LONG_INT])
+  AC_CHECK_TYPE([uintmax_t],
+    [AC_DEFINE([HAVE_UINTMAX_T], 1,
+       [Define to 1 if the system has the type `uintmax_t'.])],
+    [test $ac_cv_type_unsigned_long_long_int = yes \
+       && ac_type='unsigned long long int' \
+       || ac_type='unsigned long int'
+     AC_DEFINE_UNQUOTED([uintmax_t], [$ac_type],
+       [Define to the widest unsigned integer type
+	if <stdint.h> and <inttypes.h> do not define.])])
+])
+
+
+# AC_TYPE_INTPTR_T
+# ----------------
+AC_DEFUN([AC_TYPE_INTPTR_T],
+[
+  AC_CHECK_TYPE([intptr_t],
+    [AC_DEFINE([HAVE_INTPTR_T], 1,
+       [Define to 1 if the system has the type `intptr_t'.])],
+    [for ac_type in 'int' 'long int' 'long long int'; do
+       AC_COMPILE_IFELSE(
+	 [AC_LANG_BOOL_COMPILE_TRY(
+	    [AC_INCLUDES_DEFAULT],
+	    [[sizeof (void *) <= sizeof ($ac_type)]])],
+	 [AC_DEFINE_UNQUOTED([intptr_t], [$ac_type],
+	    [Define to the type of a signed integer type wide enough to
+	     hold a pointer, if such a type exists, and if the system
+	     does not define it.])
+	  ac_type=])
+       test -z "$ac_type" && break
+     done])
+])
+
+
+# AC_TYPE_UINTPTR_T
+# -----------------
+AC_DEFUN([AC_TYPE_UINTPTR_T],
+[
+  AC_CHECK_TYPE([uintptr_t],
+    [AC_DEFINE([HAVE_UINTPTR_T], 1,
+       [Define to 1 if the system has the type `uintptr_t'.])],
+    [for ac_type in 'unsigned int' 'unsigned long int' \
+	'unsigned long long int'; do
+       AC_COMPILE_IFELSE(
+	 [AC_LANG_BOOL_COMPILE_TRY(
+	    [AC_INCLUDES_DEFAULT],
+	    [[sizeof (void *) <= sizeof ($ac_type)]])],
+	 [AC_DEFINE_UNQUOTED([uintptr_t], [$ac_type],
+	    [Define to the type of an unsigned integer type wide enough to
+	     hold a pointer, if such a type exists, and if the system
+	     does not define it.])
+	  ac_type=])
+       test -z "$ac_type" && break
+     done])
+])
+
+
+# AC_TYPE_LONG_DOUBLE
+# -------------------
+AC_DEFUN([AC_TYPE_LONG_DOUBLE],
+[
+  AC_CACHE_CHECK([for long double], [ac_cv_type_long_double],
+    [if test "$GCC" = yes; then
+       ac_cv_type_long_double=yes
+     else
+       AC_COMPILE_IFELSE(
+	 [AC_LANG_BOOL_COMPILE_TRY(
+	    [[/* The Stardent Vistra knows sizeof (long double), but does
+		 not support it.  */
+	      long double foo = 0.0L;]],
+	    [[/* On Ultrix 4.3 cc, long double is 4 and double is 8.  */
+	      sizeof (double) <= sizeof (long double)]])],
+	 [ac_cv_type_long_double=yes],
+	 [ac_cv_type_long_double=no])
+     fi])
+  if test $ac_cv_type_long_double = yes; then
+    AC_DEFINE([HAVE_LONG_DOUBLE], 1,
+      [Define to 1 if the system has the type `long double'.])
+  fi
+])
+
+
+# AC_TYPE_LONG_DOUBLE_WIDER
+# -------------------------
+AC_DEFUN([AC_TYPE_LONG_DOUBLE_WIDER],
+[
+  AC_CACHE_CHECK(
+    [for long double with more range or precision than double],
+    [ac_cv_type_long_double_wider],
+    [AC_COMPILE_IFELSE(
+       [AC_LANG_BOOL_COMPILE_TRY(
+	  [[#include <float.h>
+	    long double const a[] =
+	      {
+		 0.0L, DBL_MIN, DBL_MAX, DBL_EPSILON,
+		 LDBL_MIN, LDBL_MAX, LDBL_EPSILON
+	      };
+	    long double
+	    f (long double x)
+	    {
+	       return ((x + (unsigned long int) 10) * (-1 / x) + a[0]
+			+ (x ? f (x) : 'c'));
+	    }
+	  ]],
+	  [[(0 < ((DBL_MAX_EXP < LDBL_MAX_EXP)
+		   + (DBL_MANT_DIG < LDBL_MANT_DIG)
+		   - (LDBL_MAX_EXP < DBL_MAX_EXP)
+		   - (LDBL_MANT_DIG < DBL_MANT_DIG)))
+	    && (int) LDBL_EPSILON == 0
+	  ]])],
+       ac_cv_type_long_double_wider=yes,
+       ac_cv_type_long_double_wider=no)])
+  if test $ac_cv_type_long_double_wider = yes; then
+    AC_DEFINE([HAVE_LONG_DOUBLE_WIDER], 1,
+      [Define to 1 if the type `long double' works and has more range or
+       precision than `double'.])
+  fi
+])# AC_TYPE_LONG_DOUBLE_WIDER
+
+
+# AC_C_LONG_DOUBLE
+# ----------------
+AU_DEFUN([AC_C_LONG_DOUBLE],
+  [
+    AC_TYPE_LONG_DOUBLE_WIDER
+    ac_cv_c_long_double=$ac_cv_type_long_double_wider
+    if test $ac_cv_c_long_double = yes; then
+      AC_DEFINE([HAVE_LONG_DOUBLE], 1,
+	[Define to 1 if the type `long double' works and has more range or
+	 precision than `double'.])
+    fi
+  ],
+  [The macro `AC_C_LONG_DOUBLE' is obsolete.
+You should use `AC_TYPE_LONG_DOUBLE' or `AC_TYPE_LONG_DOUBLE_WIDER' instead.]
+)
+
+
+# _AC_TYPE_LONG_LONG_SNIPPET
+# --------------------------
+# Expands to a C program that can be used to test for simultaneous support
+# of 'long long' and 'unsigned long long'. We don't want to say that
+# 'long long' is available if 'unsigned long long' is not, or vice versa,
+# because too many programs rely on the symmetry between signed and unsigned
+# integer types (excluding 'bool').
+AC_DEFUN([_AC_TYPE_LONG_LONG_SNIPPET],
+[
+  AC_LANG_PROGRAM(
+    [[/* For now, do not test the preprocessor; as of 2007 there are too many
+	 implementations with broken preprocessors.  Perhaps this can
+	 be revisited in 2012.  In the meantime, code should not expect
+	 #if to work with literals wider than 32 bits.  */
+      /* Test literals.  */
+      long long int ll = 9223372036854775807ll;
+      long long int nll = -9223372036854775807LL;
+      unsigned long long int ull = 18446744073709551615ULL;
+      /* Test constant expressions.   */
+      typedef int a[((-9223372036854775807LL < 0 && 0 < 9223372036854775807ll)
+		     ? 1 : -1)];
+      typedef int b[(18446744073709551615ULL <= (unsigned long long int) -1
+		     ? 1 : -1)];
+      int i = 63;]],
+    [[/* Test availability of runtime routines for shift and division.  */
+      long long int llmax = 9223372036854775807ll;
+      unsigned long long int ullmax = 18446744073709551615ull;
+      return ((ll << 63) | (ll >> 63) | (ll < i) | (ll > i)
+	      | (llmax / ll) | (llmax % ll)
+	      | (ull << 63) | (ull >> 63) | (ull << i) | (ull >> i)
+	      | (ullmax / ull) | (ullmax % ull));]])
+])
+
+
+# AC_TYPE_LONG_LONG_INT
+# ---------------------
+AC_DEFUN([AC_TYPE_LONG_LONG_INT],
+[
+  AC_CACHE_CHECK([for long long int], [ac_cv_type_long_long_int],
+    [AC_LINK_IFELSE(
+       [_AC_TYPE_LONG_LONG_SNIPPET],
+       [dnl This catches a bug in Tandem NonStop Kernel (OSS) cc -O circa 2004.
+	dnl If cross compiling, assume the bug isn't important, since
+	dnl nobody cross compiles for this platform as far as we know.
+	AC_RUN_IFELSE(
+	  [AC_LANG_PROGRAM(
+	     [[@%:@include <limits.h>
+	       @%:@ifndef LLONG_MAX
+	       @%:@ define HALF \
+			(1LL << (sizeof (long long int) * CHAR_BIT - 2))
+	       @%:@ define LLONG_MAX (HALF - 1 + HALF)
+	       @%:@endif]],
+	     [[long long int n = 1;
+	       int i;
+	       for (i = 0; ; i++)
+		 {
+		   long long int m = n << i;
+		   if (m >> i != n)
+		     return 1;
+		   if (LLONG_MAX / 2 < m)
+		     break;
+		 }
+	       return 0;]])],
+	  [ac_cv_type_long_long_int=yes],
+	  [ac_cv_type_long_long_int=no],
+	  [ac_cv_type_long_long_int=yes])],
+       [ac_cv_type_long_long_int=no])])
+  if test $ac_cv_type_long_long_int = yes; then
+    AC_DEFINE([HAVE_LONG_LONG_INT], 1,
+      [Define to 1 if the system has the type `long long int'.])
+  fi
+])
+
+
+# AC_TYPE_UNSIGNED_LONG_LONG_INT
+# ------------------------------
+AC_DEFUN([AC_TYPE_UNSIGNED_LONG_LONG_INT],
+[
+  AC_CACHE_CHECK([for unsigned long long int],
+    [ac_cv_type_unsigned_long_long_int],
+    [AC_LINK_IFELSE(
+       [_AC_TYPE_LONG_LONG_SNIPPET],
+       [ac_cv_type_unsigned_long_long_int=yes],
+       [ac_cv_type_unsigned_long_long_int=no])])
+  if test $ac_cv_type_unsigned_long_long_int = yes; then
+    AC_DEFINE([HAVE_UNSIGNED_LONG_LONG_INT], 1,
+      [Define to 1 if the system has the type `unsigned long long int'.])
+  fi
+])
+
+
+# AC_TYPE_MBSTATE_T
+# -----------------
+AC_DEFUN([AC_TYPE_MBSTATE_T],
+  [AC_CACHE_CHECK([for mbstate_t], ac_cv_type_mbstate_t,
+     [AC_COMPILE_IFELSE(
+	[AC_LANG_PROGRAM(
+	   [AC_INCLUDES_DEFAULT
+#	    include <wchar.h>],
+	   [mbstate_t x; return sizeof x;])],
+	[ac_cv_type_mbstate_t=yes],
+	[ac_cv_type_mbstate_t=no])])
+   if test $ac_cv_type_mbstate_t = yes; then
+     AC_DEFINE([HAVE_MBSTATE_T], 1,
+	       [Define to 1 if <wchar.h> declares mbstate_t.])
+   else
+     AC_DEFINE([mbstate_t], int,
+	       [Define to a type if <wchar.h> does not define.])
+   fi])
+
+
+# AC_TYPE_UID_T
+# -------------
+# FIXME: Rewrite using AC_CHECK_TYPE.
+AN_IDENTIFIER([gid_t], [AC_TYPE_UID_T])
+AN_IDENTIFIER([uid_t], [AC_TYPE_UID_T])
+AC_DEFUN([AC_TYPE_UID_T],
+[AC_CACHE_CHECK(for uid_t in sys/types.h, ac_cv_type_uid_t,
+[AC_EGREP_HEADER(uid_t, sys/types.h,
+  ac_cv_type_uid_t=yes, ac_cv_type_uid_t=no)])
+if test $ac_cv_type_uid_t = no; then
+  AC_DEFINE(uid_t, int, [Define to `int' if <sys/types.h> doesn't define.])
+  AC_DEFINE(gid_t, int, [Define to `int' if <sys/types.h> doesn't define.])
+fi
+])
+
+
+AN_IDENTIFIER([size_t], [AC_TYPE_SIZE_T])
+AC_DEFUN([AC_TYPE_SIZE_T], [AC_CHECK_TYPE(size_t, unsigned int)])
+
+AN_IDENTIFIER([ssize_t], [AC_TYPE_SSIZE_T])
+AC_DEFUN([AC_TYPE_SSIZE_T], [AC_CHECK_TYPE(ssize_t, int)])
+
+AN_IDENTIFIER([pid_t], [AC_TYPE_PID_T])
+AC_DEFUN([AC_TYPE_PID_T],  [AC_CHECK_TYPE(pid_t,  int)])
+
+AN_IDENTIFIER([off_t], [AC_TYPE_OFF_T])
+AC_DEFUN([AC_TYPE_OFF_T],  [AC_CHECK_TYPE(off_t,  long int)])
+
+AN_IDENTIFIER([mode_t], [AC_TYPE_MODE_T])
+AC_DEFUN([AC_TYPE_MODE_T], [AC_CHECK_TYPE(mode_t, int)])
+
+AN_IDENTIFIER([int8_t], [AC_TYPE_INT8_T])
+AN_IDENTIFIER([int16_t], [AC_TYPE_INT16_T])
+AN_IDENTIFIER([int32_t], [AC_TYPE_INT32_T])
+AN_IDENTIFIER([int64_t], [AC_TYPE_INT64_T])
+AN_IDENTIFIER([uint8_t], [AC_TYPE_UINT8_T])
+AN_IDENTIFIER([uint16_t], [AC_TYPE_UINT16_T])
+AN_IDENTIFIER([uint32_t], [AC_TYPE_UINT32_T])
+AN_IDENTIFIER([uint64_t], [AC_TYPE_UINT64_T])
+AC_DEFUN([AC_TYPE_INT8_T], [_AC_TYPE_INT(8)])
+AC_DEFUN([AC_TYPE_INT16_T], [_AC_TYPE_INT(16)])
+AC_DEFUN([AC_TYPE_INT32_T], [_AC_TYPE_INT(32)])
+AC_DEFUN([AC_TYPE_INT64_T], [_AC_TYPE_INT(64)])
+AC_DEFUN([AC_TYPE_UINT8_T], [_AC_TYPE_UNSIGNED_INT(8)])
+AC_DEFUN([AC_TYPE_UINT16_T], [_AC_TYPE_UNSIGNED_INT(16)])
+AC_DEFUN([AC_TYPE_UINT32_T], [_AC_TYPE_UNSIGNED_INT(32)])
+AC_DEFUN([AC_TYPE_UINT64_T], [_AC_TYPE_UNSIGNED_INT(64)])
+
+# _AC_TYPE_INT_BODY
+# -----------------
+# Shell function body for _AC_TYPE_INT.
+m4_define([_AC_TYPE_INT_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for int$[]2_t], [$[]3],
+    [AS_VAR_SET([$[]3], [no])
+     # Order is important - never check a type that is potentially smaller
+     # than half of the expected target width.
+     for ac_type in int$[]2_t 'int' 'long int' \
+	 'long long int' 'short int' 'signed char'; do
+       AC_COMPILE_IFELSE(
+	 [AC_LANG_BOOL_COMPILE_TRY(
+	    [AC_INCLUDES_DEFAULT
+	     enum { N = $[]2 / 2 - 1 };],
+	    [0 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1)])],
+	 [AC_COMPILE_IFELSE(
+	    [AC_LANG_BOOL_COMPILE_TRY(
+	       [AC_INCLUDES_DEFAULT
+	        enum { N = $[]2 / 2 - 1 };],
+	       [($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1)
+		 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 2)])],
+	    [],
+	    [AS_CASE([$ac_type], [int$[]2_t],
+	       [AS_VAR_SET([$[]3], [yes])],
+	       [AS_VAR_SET([$[]3], [$ac_type])])])])
+       AS_VAR_IF([$[]3], [no], [], [break])
+     done])
+  AS_LINENO_POP
+])# _AC_TYPE_INT_BODY
+
+# _AC_TYPE_INT(NBITS)
+# -------------------
+# Set a variable ac_cv_c_intNBITS_t to `yes' if intNBITS_t is available,
+# `no' if it is not and no replacement types could be found, and a C type
+# if it is not available but a replacement signed integer type of width
+# exactly NBITS bits was found.  In the third case, intNBITS_t is AC_DEFINEd
+# to type, as well.
+AC_DEFUN([_AC_TYPE_INT],
+[AC_REQUIRE_SHELL_FN([ac_fn_c_find_intX_t],
+  [AS_FUNCTION_DESCRIBE([ac_fn_c_find_intX_t], [LINENO BITS VAR],
+    [Finds a signed integer type with width BITS, setting cache variable VAR
+     accordingly.])],
+    [$0_BODY])]dnl
+[ac_fn_c_find_intX_t "$LINENO" "$1" "ac_cv_c_int$1_t"
+case $ac_cv_c_int$1_t in #(
+  no|yes) ;; #(
+  *)
+    AC_DEFINE_UNQUOTED([int$1_t], [$ac_cv_c_int$1_t],
+      [Define to the type of a signed integer type of width exactly $1 bits
+       if such a type exists and the standard includes do not define it.]);;
+esac
+])# _AC_TYPE_INT
+
+# _AC_TYPE_UNSIGNED_INT_BODY
+# --------------------------
+# Shell function body for _AC_TYPE_UNSIGNED_INT.
+m4_define([_AC_TYPE_UNSIGNED_INT_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for uint$[]2_t], $[]3,
+    [AS_VAR_SET([$[]3], [no])
+     # Order is important - never check a type that is potentially smaller
+     # than half of the expected target width.
+     for ac_type in uint$[]2_t 'unsigned int' 'unsigned long int' \
+	 'unsigned long long int' 'unsigned short int' 'unsigned char'; do
+       AC_COMPILE_IFELSE(
+	 [AC_LANG_BOOL_COMPILE_TRY(
+	    [AC_INCLUDES_DEFAULT],
+	    [(($ac_type) -1 >> ($[]2 / 2 - 1)) >> ($[]2 / 2 - 1) == 3])],
+	 [AS_CASE([$ac_type], [uint$[]2_t],
+	    [AS_VAR_SET([$[]3], [yes])],
+	    [AS_VAR_SET([$[]3], [$ac_type])])])
+       AS_VAR_IF([$[]3], [no], [], [break])
+     done])
+  AS_LINENO_POP
+])# _AC_TYPE_UNSIGNED_INT_BODY
+
+
+# _AC_TYPE_UNSIGNED_INT(NBITS)
+# ----------------------------
+# Set a variable ac_cv_c_uintNBITS_t to `yes' if uintNBITS_t is available,
+# `no' if it is not and no replacement types could be found, and a C type
+# if it is not available but a replacement unsigned integer type of width
+# exactly NBITS bits was found.  In the third case, uintNBITS_t is AC_DEFINEd
+# to type, as well.
+AC_DEFUN([_AC_TYPE_UNSIGNED_INT],
+[AC_REQUIRE_SHELL_FN([ac_fn_c_find_uintX_t],
+  [AS_FUNCTION_DESCRIBE([ac_fn_c_find_uintX_t], [LINENO BITS VAR],
+    [Finds an unsigned integer type with width BITS, setting cache variable VAR
+     accordingly.])],
+  [$0_BODY])]dnl
+[ac_fn_c_find_uintX_t "$LINENO" "$1" "ac_cv_c_uint$1_t"
+case $ac_cv_c_uint$1_t in #(
+  no|yes) ;; #(
+  *)
+    m4_bmatch([$1], [^\(8\|32\|64\)$],
+      [AC_DEFINE([_UINT$1_T], 1,
+	 [Define for Solaris 2.5.1 so the uint$1_t typedef from
+	  <sys/synch.h>, <pthread.h>, or <semaphore.h> is not used.
+	  If the typedef were allowed, the #define below would cause a
+	  syntax error.])])
+    AC_DEFINE_UNQUOTED([uint$1_t], [$ac_cv_c_uint$1_t],
+      [Define to the type of an unsigned integer type of width exactly $1 bits
+       if such a type exists and the standard includes do not define it.]);;
+  esac
+])# _AC_TYPE_UNSIGNED_INT
+
+# AC_TYPE_SIGNAL
+# --------------
+# Note that identifiers starting with SIG are reserved by ANSI C.
+# C89 requires signal handlers to return void; only K&R returned int;
+# modern code does not need to worry about using this macro (not to
+# mention that sigaction is better than signal).
+AU_DEFUN([AC_TYPE_SIGNAL],
+[AC_CACHE_CHECK([return type of signal handlers], ac_cv_type_signal,
+[AC_COMPILE_IFELSE(
+[AC_LANG_PROGRAM([#include <sys/types.h>
+#include <signal.h>
+],
+		 [return *(signal (0, 0)) (0) == 1;])],
+		   [ac_cv_type_signal=int],
+		   [ac_cv_type_signal=void])])
+AC_DEFINE_UNQUOTED(RETSIGTYPE, $ac_cv_type_signal,
+		   [Define as the return type of signal handlers
+		    (`int' or `void').])
+], [your code may safely assume C89 semantics that RETSIGTYPE is void.
+Remove this warning and the `AC_CACHE_CHECK' when you adjust the code.])
+
+
+## ------------------------ ##
+## Checking size of types.  ##
+## ------------------------ ##
+
+# ---------------- #
+# Generic checks.  #
+# ---------------- #
+
+
+# AC_CHECK_SIZEOF(TYPE, [IGNORED], [INCLUDES = DEFAULT-INCLUDES])
+# ---------------------------------------------------------------
+AC_DEFUN([AC_CHECK_SIZEOF],
+[AS_LITERAL_IF(m4_translit([[$1]], [*], [p]), [],
+	       [m4_fatal([$0: requires literal arguments])])]dnl
+[# The cast to long int works around a bug in the HP C Compiler
+# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects
+# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'.
+# This bug is HP SR number 8606223364.
+_AC_CACHE_CHECK_INT([size of $1], [AS_TR_SH([ac_cv_sizeof_$1])],
+  [(long int) (sizeof ($1))],
+  [AC_INCLUDES_DEFAULT([$3])],
+  [if test "$AS_TR_SH([ac_cv_type_$1])" = yes; then
+     AC_MSG_FAILURE([cannot compute sizeof ($1)], 77)
+   else
+     AS_TR_SH([ac_cv_sizeof_$1])=0
+   fi])
+
+AC_DEFINE_UNQUOTED(AS_TR_CPP(sizeof_$1), $AS_TR_SH([ac_cv_sizeof_$1]),
+		   [The size of `$1', as computed by sizeof.])
+])# AC_CHECK_SIZEOF
+
+
+# AC_CHECK_ALIGNOF(TYPE, [INCLUDES = DEFAULT-INCLUDES])
+# -----------------------------------------------------
+# TYPE can include braces and semicolon, which AS_TR_CPP and AS_TR_SH
+# (correctly) recognize as potential shell metacharacters.  So we
+# have to flatten problematic characters ourselves to guarantee that
+# AC_DEFINE_UNQUOTED will see a literal.
+AC_DEFUN([AC_CHECK_ALIGNOF],
+[m4_if(m4_index(m4_translit([[$1]], [`\"], [$]), [$]), [-1], [],
+       [m4_fatal([$0: requires literal arguments])])]dnl
+[_$0([$1], [$2], m4_translit([[$1]], [{;}], [___]))])
+
+m4_define([_AC_CHECK_ALIGNOF],
+[# The cast to long int works around a bug in the HP C Compiler,
+# see AC_CHECK_SIZEOF for more information.
+_AC_CACHE_CHECK_INT([alignment of $1], [AS_TR_SH([ac_cv_alignof_$3])],
+  [(long int) offsetof (ac__type_alignof_, y)],
+  [AC_INCLUDES_DEFAULT([$2])
+#ifndef offsetof
+# define offsetof(type, member) ((char *) &((type *) 0)->member - (char *) 0)
+#endif
+typedef struct { char x; $1 y; } ac__type_alignof_;],
+  [if test "$AS_TR_SH([ac_cv_type_$3])" = yes; then
+     AC_MSG_FAILURE([cannot compute alignment of $1], 77)
+   else
+     AS_TR_SH([ac_cv_alignof_$3])=0
+   fi])
+
+AC_DEFINE_UNQUOTED(AS_TR_CPP(alignof_$3), $AS_TR_SH([ac_cv_alignof_$3]),
+		   [The normal alignment of `$1', in bytes.])
+])# AC_CHECK_ALIGNOF
+
+
+# AU::AC_INT_16_BITS
+# ------------------
+# What a great name :)
+AU_DEFUN([AC_INT_16_BITS],
+[AC_CHECK_SIZEOF([int])
+test $ac_cv_sizeof_int = 2 &&
+  AC_DEFINE(INT_16_BITS, 1,
+	    [Define to 1 if `sizeof (int)' = 2.  Obsolete, use `SIZEOF_INT'.])
+], [your code should no longer depend upon `INT_16_BITS', but upon
+`SIZEOF_INT == 2'.  Remove this warning and the `AC_DEFINE' when you
+adjust the code.])
+
+
+# AU::AC_LONG_64_BITS
+# -------------------
+AU_DEFUN([AC_LONG_64_BITS],
+[AC_CHECK_SIZEOF([long int])
+test $ac_cv_sizeof_long_int = 8 &&
+  AC_DEFINE(LONG_64_BITS, 1,
+	    [Define to 1 if `sizeof (long int)' = 8.  Obsolete, use
+	     `SIZEOF_LONG_INT'.])
+], [your code should no longer depend upon `LONG_64_BITS', but upon
+`SIZEOF_LONG_INT == 8'.  Remove this warning and the `AC_DEFINE' when
+you adjust the code.])
+
+
+
+## -------------------------- ##
+## Generic structure checks.  ##
+## -------------------------- ##
+
+
+# ---------------- #
+# Generic checks.  #
+# ---------------- #
+
+# _AC_CHECK_MEMBER_BODY
+# ---------------------
+# Shell function body for AC_CHECK_MEMBER.
+m4_define([_AC_CHECK_MEMBER_BODY],
+[  AS_LINENO_PUSH([$[]1])
+  AC_CACHE_CHECK([for $[]2.$[]3], [$[]4],
+  [AC_COMPILE_IFELSE([AC_LANG_PROGRAM([$[]5],
+[static $[]2 ac_aggr;
+if (ac_aggr.$[]3)
+return 0;])],
+		[AS_VAR_SET([$[]4], [yes])],
+  [AC_COMPILE_IFELSE([AC_LANG_PROGRAM([$[]5],
+[static $[]2 ac_aggr;
+if (sizeof ac_aggr.$[]3)
+return 0;])],
+		[AS_VAR_SET([$[]4], [yes])],
+		[AS_VAR_SET([$[]4], [no])])])])
+  AS_LINENO_POP
+])dnl
+
+# AC_CHECK_MEMBER(AGGREGATE.MEMBER,
+#		  [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		  [INCLUDES = DEFAULT-INCLUDES])
+# ---------------------------------------------------------
+# AGGREGATE.MEMBER is for instance `struct passwd.pw_gecos', shell
+# variables are not a valid argument.
+AC_DEFUN([AC_CHECK_MEMBER],
+[AC_REQUIRE_SHELL_FN([ac_fn_]_AC_LANG_ABBREV[_check_member],
+  [AS_FUNCTION_DESCRIBE([ac_fn_]_AC_LANG_ABBREV[_check_member],
+    [LINENO AGGR MEMBER VAR INCLUDES],
+    [Tries to find if the field MEMBER exists in type AGGR, after including
+     INCLUDES, setting cache variable VAR accordingly.])],
+    [_$0_BODY])]dnl
+[AS_LITERAL_IF([$1], [], [m4_fatal([$0: requires literal arguments])])]dnl
+[m4_if(m4_index([$1], [.]), [-1],
+  [m4_fatal([$0: Did not see any dot in `$1'])])]dnl
+[AS_VAR_PUSHDEF([ac_Member], [ac_cv_member_$1])]dnl
+[ac_fn_[]_AC_LANG_ABBREV[]_check_member "$LINENO" ]dnl
+[m4_bpatsubst([$1], [^\([^.]*\)\.\(.*\)], ["\1" "\2"]) "ac_Member" ]dnl
+["AS_ESCAPE([AC_INCLUDES_DEFAULT([$4])], [""])"
+AS_VAR_IF([ac_Member], [yes], [$2], [$3])
+AS_VAR_POPDEF([ac_Member])dnl
+])# AC_CHECK_MEMBER
+
+
+# _AC_CHECK_MEMBERS(AGGREGATE.MEMBER)
+# -----------------------------------
+# Helper to AC_CHECK_MEMBERS, which generates two of the four
+# arguments to AC_CHECK_MEMBER that are based on AGGREGATE and MEMBER.
+m4_define([_AC_CHECK_MEMBERS],
+[[$1], [AC_DEFINE_UNQUOTED(AS_TR_CPP([HAVE_$1]), [1],
+  [Define to 1 if `]m4_bpatsubst([$1],
+    [^\([^.]*\)\.\(.*\)], [[\2' is a member of `\1]])['.])]])
+
+# AC_CHECK_MEMBERS([AGGREGATE.MEMBER, ...],
+#		   [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND],
+#		   [INCLUDES = DEFAULT-INCLUDES])
+# ----------------------------------------------------------
+# The first argument is an m4 list.
+AC_DEFUN([AC_CHECK_MEMBERS],
+[m4_map_args_sep([AC_CHECK_MEMBER(_$0(], [)[
+$2], [$3], [$4])], [], $1)])
+
+
+
+# ------------------------------------------------------- #
+# Members that ought to be tested with AC_CHECK_MEMBERS.  #
+# ------------------------------------------------------- #
+
+AN_IDENTIFIER([st_blksize], [AC_CHECK_MEMBERS([struct stat.st_blksize])])
+AN_IDENTIFIER([st_rdev],    [AC_CHECK_MEMBERS([struct stat.st_rdev])])
+
+
+# Alphabetic order, please.
+
+# _AC_STRUCT_DIRENT(MEMBER)
+# -------------------------
+AC_DEFUN([_AC_STRUCT_DIRENT],
+[
+  AC_REQUIRE([AC_HEADER_DIRENT])
+  AC_CHECK_MEMBERS([struct dirent.$1], [], [],
+    [[
+#include <sys/types.h>
+#ifdef HAVE_DIRENT_H
+# include <dirent.h>
+#else
+# define dirent direct
+# ifdef HAVE_SYS_NDIR_H
+#  include <sys/ndir.h>
+# endif
+# ifdef HAVE_SYS_DIR_H
+#  include <sys/dir.h>
+# endif
+# ifdef HAVE_NDIR_H
+#  include <ndir.h>
+# endif
+#endif
+    ]])
+])
+
+# AC_STRUCT_DIRENT_D_INO
+# ----------------------
+AC_DEFUN([AC_STRUCT_DIRENT_D_INO], [_AC_STRUCT_DIRENT([d_ino])])
+
+# AC_STRUCT_DIRENT_D_TYPE
+# -----------------------
+AC_DEFUN([AC_STRUCT_DIRENT_D_TYPE], [_AC_STRUCT_DIRENT([d_type])])
+
+
+# AC_STRUCT_ST_BLKSIZE
+# --------------------
+AU_DEFUN([AC_STRUCT_ST_BLKSIZE],
+[AC_CHECK_MEMBERS([struct stat.st_blksize],
+		 [AC_DEFINE(HAVE_ST_BLKSIZE, 1,
+			    [Define to 1 if your `struct stat' has
+			     `st_blksize'.  Deprecated, use
+			     `HAVE_STRUCT_STAT_ST_BLKSIZE' instead.])])
+], [your code should no longer depend upon `HAVE_ST_BLKSIZE', but
+`HAVE_STRUCT_STAT_ST_BLKSIZE'.  Remove this warning and
+the `AC_DEFINE' when you adjust the code.])# AC_STRUCT_ST_BLKSIZE
+
+
+# AC_STRUCT_ST_BLOCKS
+# -------------------
+# If `struct stat' contains an `st_blocks' member, define
+# HAVE_STRUCT_STAT_ST_BLOCKS.  Otherwise, add `fileblocks.o' to the
+# output variable LIBOBJS.  We still define HAVE_ST_BLOCKS for backward
+# compatibility.  In the future, we will activate specializations for
+# this macro, so don't obsolete it right now.
+#
+# AC_OBSOLETE([$0], [; replace it with
+#   AC_CHECK_MEMBERS([struct stat.st_blocks],
+#		      [AC_LIBOBJ([fileblocks])])
+# Please note that it will define `HAVE_STRUCT_STAT_ST_BLOCKS',
+# and not `HAVE_ST_BLOCKS'.])dnl
+#
+AN_IDENTIFIER([st_blocks],  [AC_STRUCT_ST_BLOCKS])
+AC_DEFUN([AC_STRUCT_ST_BLOCKS],
+[AC_CHECK_MEMBERS([struct stat.st_blocks],
+		  [AC_DEFINE(HAVE_ST_BLOCKS, 1,
+			     [Define to 1 if your `struct stat' has
+			      `st_blocks'.  Deprecated, use
+			      `HAVE_STRUCT_STAT_ST_BLOCKS' instead.])],
+		  [AC_LIBOBJ([fileblocks])])
+])# AC_STRUCT_ST_BLOCKS
+
+
+# AC_STRUCT_ST_RDEV
+# -----------------
+AU_DEFUN([AC_STRUCT_ST_RDEV],
+[AC_CHECK_MEMBERS([struct stat.st_rdev],
+		 [AC_DEFINE(HAVE_ST_RDEV, 1,
+			    [Define to 1 if your `struct stat' has `st_rdev'.
+			     Deprecated, use `HAVE_STRUCT_STAT_ST_RDEV'
+			     instead.])])
+], [your code should no longer depend upon `HAVE_ST_RDEV', but
+`HAVE_STRUCT_STAT_ST_RDEV'.  Remove this warning and
+the `AC_DEFINE' when you adjust the code.])# AC_STRUCT_ST_RDEV
+
+
+# AC_STRUCT_TM
+# ------------
+# FIXME: This macro is badly named, it should be AC_CHECK_TYPE_STRUCT_TM.
+# Or something else, but what? AC_CHECK_TYPE_STRUCT_TM_IN_SYS_TIME?
+AC_DEFUN([AC_STRUCT_TM],
+[AC_CACHE_CHECK([whether struct tm is in sys/time.h or time.h],
+  ac_cv_struct_tm,
+[AC_COMPILE_IFELSE([AC_LANG_PROGRAM([#include <sys/types.h>
+#include <time.h>
+],
+				    [struct tm tm;
+				     int *p = &tm.tm_sec;
+				     return !p;])],
+		   [ac_cv_struct_tm=time.h],
+		   [ac_cv_struct_tm=sys/time.h])])
+if test $ac_cv_struct_tm = sys/time.h; then
+  AC_DEFINE(TM_IN_SYS_TIME, 1,
+	    [Define to 1 if your <sys/time.h> declares `struct tm'.])
+fi
+])# AC_STRUCT_TM
+
+
+# AC_STRUCT_TIMEZONE
+# ------------------
+# Figure out how to get the current timezone.  If `struct tm' has a
+# `tm_zone' member, define `HAVE_TM_ZONE'.  Otherwise, if the
+# external array `tzname' is found, define `HAVE_TZNAME'.
+AN_IDENTIFIER([tm_zone], [AC_STRUCT_TIMEZONE])
+AC_DEFUN([AC_STRUCT_TIMEZONE],
+[AC_REQUIRE([AC_STRUCT_TM])dnl
+AC_CHECK_MEMBERS([struct tm.tm_zone],,,[#include <sys/types.h>
+#include <$ac_cv_struct_tm>
+])
+if test "$ac_cv_member_struct_tm_tm_zone" = yes; then
+  AC_DEFINE(HAVE_TM_ZONE, 1,
+	    [Define to 1 if your `struct tm' has `tm_zone'. Deprecated, use
+	     `HAVE_STRUCT_TM_TM_ZONE' instead.])
+else
+  AC_CHECK_DECLS([tzname], , , [#include <time.h>])
+  AC_CACHE_CHECK(for tzname, ac_cv_var_tzname,
+[AC_LINK_IFELSE([AC_LANG_PROGRAM(
+[[#include <time.h>
+#if !HAVE_DECL_TZNAME
+extern char *tzname[];
+#endif
+]],
+[[return tzname[0][0];]])],
+		[ac_cv_var_tzname=yes],
+		[ac_cv_var_tzname=no])])
+  if test $ac_cv_var_tzname = yes; then
+    AC_DEFINE(HAVE_TZNAME, 1,
+	      [Define to 1 if you don't have `tm_zone' but do have the external
+	       array `tzname'.])
+  fi
+fi
+])# AC_STRUCT_TIMEZONE
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autom4te.cfg b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autom4te.cfg
new file mode 100644
index 0000000..7c01920
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autom4te.cfg
@@ -0,0 +1,164 @@
+# Definition of Autom4te option sets.                     -*- Makefile -*-
+#
+# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+# 2010 Free Software Foundation, Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## -------------------------- ##
+## Autoheader preselections.  ##
+## -------------------------- ##
+
+begin-language: "Autoheader-preselections"
+args: --preselect AC_CONFIG_HEADERS
+args: --preselect AH_OUTPUT
+args: --preselect AC_DEFINE_TRACE_LITERAL
+end-language: "Autoheader-preselections"
+
+
+## ------------------------ ##
+## Automake-preselections.  ##
+## ------------------------ ##
+
+begin-language: "Automake-preselections"
+args: --preselect AC_CANONICAL_BUILD
+args: --preselect AC_CANONICAL_HOST
+# AC_CANONICAL_SYSTEM was traced from Automake 1.7 to Automake 1.9.
+# Later versions trace AC_CANONICAL_TARGET instead.
+args: --preselect AC_CANONICAL_SYSTEM
+args: --preselect AC_CANONICAL_TARGET
+args: --preselect AC_CONFIG_AUX_DIR
+args: --preselect AC_CONFIG_FILES
+args: --preselect AC_CONFIG_HEADERS
+args: --preselect AC_CONFIG_LIBOBJ_DIR
+args: --preselect AC_CONFIG_LINKS
+args: --preselect AC_FC_FREEFORM
+args: --preselect AC_FC_SRCEXT
+args: --preselect AC_INIT
+args: --preselect AC_LIBSOURCE
+args: --preselect AC_REQUIRE_AUX_FILE
+# Automake < 1.10 traces AC_SUBST.  Automake >= 1.10 traces AC_SUBST_TRACE.
+args: --preselect AC_SUBST
+args: --preselect AC_SUBST_TRACE
+args: --preselect AM_AUTOMAKE_VERSION
+args: --preselect AM_CONDITIONAL
+args: --preselect AM_ENABLE_MULTILIB
+args: --preselect AM_GNU_GETTEXT
+args: --preselect AM_GNU_GETTEXT_INTL_SUBDIR
+args: --preselect AM_INIT_AUTOMAKE
+args: --preselect AM_MAKEFILE_INCLUDE
+args: --preselect AM_MAINTAINER_MODE
+args: --preselect AM_NLS
+args: --preselect AM_POT_TOOLS
+args: --preselect AM_PATH_GUILE
+args: --preselect AM_PROG_AR
+args: --preselect AM_PROG_CC_C_O
+args: --preselect AM_PROG_CXX_C_O
+args: --preselect AM_PROG_F77_C_O
+args: --preselect AM_PROG_FC_C_O
+args: --preselect AM_PROG_MOC
+args: --preselect AM_SILENT_RULES
+args: --preselect AM_XGETTEXT_OPTION
+args: --preselect _AM_MAKEFILE_INCLUDE
+args: --preselect _AM_SUBST_NOTMAKE
+args: --preselect _AM_COND_IF
+args: --preselect _AM_COND_ELSE
+args: --preselect _AM_COND_ENDIF
+args: --preselect LT_SUPPORTED_TAG
+args: --preselect _LT_AC_TAGCONFIG
+args: --preselect m4_include
+args: --preselect m4_sinclude
+end-language: "Automake-preselections"
+
+
+## -------------------------- ##
+## Autoreconf-preselections.  ##
+## -------------------------- ##
+
+begin-language: "Autoreconf-preselections"
+args: --preselect AC_CONFIG_AUX_DIR
+args: --preselect AC_CONFIG_HEADERS
+args: --preselect AC_CONFIG_SUBDIRS
+args: --preselect AC_INIT
+args: --preselect AC_PROG_LIBTOOL
+args: --preselect LT_INIT
+args: --preselect LT_CONFIG_LTDL_DIR
+args: --preselect AM_GNU_GETTEXT
+end-language: "Autoreconf-preselections"
+
+
+## ----------------------------- ##
+## Autoconf without aclocal.m4.  ##
+## ----------------------------- ##
+
+# This intermediate language is used by aclocal to build aclocal.m4.
+
+begin-language: "Autoconf-without-aclocal-m4"
+args: --prepend-include '//share/autoconf'
+args: --cache=autom4te.cache
+args: autoconf/autoconf.m4f
+args: acsite.m4?
+args: --mode 777
+args: --language M4sh
+end-language: "Autoconf-without-aclocal-m4"
+
+
+## ---------- ##
+## Autoconf.  ##
+## ---------- ##
+
+begin-language: "Autoconf"
+args: --language Autoheader-preselections
+args: --language Automake-preselections
+args: --language Autoreconf-preselections
+args: --language Autoconf-without-aclocal-m4
+args: aclocal.m4?
+end-language: "Autoconf"
+
+
+## -------- ##
+## Autotest ##
+## -------- ##
+
+begin-language: "Autotest"
+args: --prepend-include '//share/autoconf'
+args: autotest/autotest.m4f
+args: package.m4?
+args: local.at?
+args: --mode 777
+args: --language M4sh
+end-language: "Autotest"
+
+
+## ---- ##
+## M4sh ##
+## ---- ##
+
+begin-language: "M4sh"
+args: --prepend-include '//share/autoconf'
+args: m4sugar/m4sh.m4f
+args: --mode 777
+args: --language M4sugar
+end-language: "M4sh"
+
+
+## ------- ##
+## M4sugar ##
+## ------- ##
+
+begin-language: "M4sugar"
+args: --prepend-include '//share/autoconf'
+args: m4sugar/m4sugar.m4f
+args: --warnings syntax
+end-language: "M4sugar"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoscan/autoscan.list b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoscan/autoscan.list
new file mode 100644
index 0000000..b23b977
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autoscan/autoscan.list
@@ -0,0 +1,268 @@
+# Automatically Generated: do not edit this file
+# autoscan's mapping to Autoconf macros      -*- Makefile -*-
+# Copyright (C) 1992, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2009,
+# 2010 Free Software Foundation, Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+function: __argz_count		AC_CHECK_FUNCS
+function: __argz_next		AC_CHECK_FUNCS
+function: __argz_stringify		AC_CHECK_FUNCS
+function: __fpending		AC_CHECK_FUNCS
+function: acl		AC_CHECK_FUNCS
+function: alarm		AC_CHECK_FUNCS
+function: alloca		AC_FUNC_ALLOCA
+function: atexit		AC_CHECK_FUNCS
+function: btowc		AC_CHECK_FUNCS
+function: bzero		AC_CHECK_FUNCS
+function: chown		AC_FUNC_CHOWN
+function: clock_gettime		AC_CHECK_FUNCS
+function: dcgettext		AM_GNU_GETTEXT
+function: doprnt		AC_CHECK_FUNCS
+function: dup2		AC_CHECK_FUNCS
+function: endgrent		AC_CHECK_FUNCS
+function: endpwent		AC_CHECK_FUNCS
+function: error		AC_FUNC_ERROR_AT_LINE
+function: error_at_line		AC_FUNC_ERROR_AT_LINE
+function: euidaccess		AC_CHECK_FUNCS
+function: fchdir		AC_CHECK_FUNCS
+function: fdatasync		AC_CHECK_FUNCS
+function: fesetround		AC_CHECK_FUNCS
+function: floor		AC_CHECK_FUNCS
+function: fork		AC_FUNC_FORK
+function: fs_stat_dev		AC_CHECK_FUNCS
+function: fseeko		AC_FUNC_FSEEKO
+function: ftello		AC_FUNC_FSEEKO
+function: ftime		AC_CHECK_FUNCS
+function: ftruncate		AC_CHECK_FUNCS
+function: getcwd		AC_CHECK_FUNCS
+function: getdelim		AC_CHECK_FUNCS
+function: getgroups		AC_FUNC_GETGROUPS
+function: gethostbyaddr		AC_CHECK_FUNCS
+function: gethostbyname		AC_CHECK_FUNCS
+function: gethostname		AC_CHECK_FUNCS
+function: gethrtime		AC_CHECK_FUNCS
+function: getmntent		AC_CHECK_FUNCS
+function: getmntent		AC_FUNC_GETMNTENT
+function: getmntinfo		AC_CHECK_FUNCS
+function: getpagesize		AC_CHECK_FUNCS
+function: getpass		AC_CHECK_FUNCS
+function: getspnam		AC_CHECK_FUNCS
+function: gettimeofday		AC_CHECK_FUNCS
+function: getusershell		AC_CHECK_FUNCS
+function: getwd		warn: getwd is deprecated, use getcwd instead
+function: hasmntopt		AC_CHECK_FUNCS
+function: inet_ntoa		AC_CHECK_FUNCS
+function: isascii		AC_CHECK_FUNCS
+function: iswprint		AC_CHECK_FUNCS
+function: lchown		AC_CHECK_FUNCS
+function: listmntent		AC_CHECK_FUNCS
+function: localeconv		AC_CHECK_FUNCS
+function: localtime_r		AC_CHECK_FUNCS
+function: lstat		AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK
+function: major		AC_HEADER_MAJOR
+function: makedev		AC_HEADER_MAJOR
+function: malloc		AC_FUNC_MALLOC
+function: mblen		AC_CHECK_FUNCS
+function: mbrlen		AC_CHECK_FUNCS
+function: mbrtowc		AC_FUNC_MBRTOWC
+function: memchr		AC_CHECK_FUNCS
+function: memmove		AC_CHECK_FUNCS
+function: mempcpy		AC_CHECK_FUNCS
+function: memset		AC_CHECK_FUNCS
+function: minor		AC_HEADER_MAJOR
+function: mkdir		AC_CHECK_FUNCS
+function: mkfifo		AC_CHECK_FUNCS
+function: mktime		AC_FUNC_MKTIME
+function: mmap		AC_FUNC_MMAP
+function: modf		AC_CHECK_FUNCS
+function: munmap		AC_CHECK_FUNCS
+function: next_dev		AC_CHECK_FUNCS
+function: nl_langinfo		AC_CHECK_FUNCS
+function: obstack_init		AC_FUNC_OBSTACK
+function: pathconf		AC_CHECK_FUNCS
+function: pow		AC_CHECK_FUNCS
+function: pstat_getdynamic		AC_CHECK_FUNCS
+function: putenv		AC_CHECK_FUNCS
+function: re_comp		AC_CHECK_FUNCS
+function: realloc		AC_FUNC_REALLOC
+function: realpath		AC_CHECK_FUNCS
+function: regcmp		AC_CHECK_FUNCS
+function: regcomp		AC_CHECK_FUNCS
+function: resolvepath		AC_CHECK_FUNCS
+function: rint		AC_CHECK_FUNCS
+function: rmdir		AC_CHECK_FUNCS
+function: rpmatch		AC_CHECK_FUNCS
+function: select		AC_CHECK_FUNCS
+function: setenv		AC_CHECK_FUNCS
+function: sethostname		AC_CHECK_FUNCS
+function: setlocale		AC_CHECK_FUNCS
+function: socket		AC_CHECK_FUNCS
+function: sqrt		AC_CHECK_FUNCS
+function: stime		AC_CHECK_FUNCS
+function: stpcpy		AC_CHECK_FUNCS
+function: strcasecmp		AC_CHECK_FUNCS
+function: strchr		AC_CHECK_FUNCS
+function: strcoll		AC_FUNC_STRCOLL
+function: strcspn		AC_CHECK_FUNCS
+function: strdup		AC_CHECK_FUNCS
+function: strerror		AC_CHECK_FUNCS
+function: strerror_r		AC_FUNC_STRERROR_R
+function: strncasecmp		AC_CHECK_FUNCS
+function: strndup		AC_CHECK_FUNCS
+function: strnlen		AC_FUNC_STRNLEN
+function: strpbrk		AC_CHECK_FUNCS
+function: strrchr		AC_CHECK_FUNCS
+function: strspn		AC_CHECK_FUNCS
+function: strstr		AC_CHECK_FUNCS
+function: strtod		AC_FUNC_STRTOD
+function: strtol		AC_CHECK_FUNCS
+function: strtoul		AC_CHECK_FUNCS
+function: strtoull		AC_CHECK_FUNCS
+function: strtoumax		AC_CHECK_FUNCS
+function: strverscmp		AC_CHECK_FUNCS
+function: sysinfo		AC_CHECK_FUNCS
+function: tzset		AC_CHECK_FUNCS
+function: uname		AC_CHECK_FUNCS
+function: utime		AC_CHECK_FUNCS
+function: utmpname		AC_CHECK_FUNCS
+function: utmpxname		AC_CHECK_FUNCS
+function: vfork		AC_FUNC_FORK
+function: wait3		AC_FUNC_WAIT3
+function: wcwidth		AC_CHECK_FUNCS
+header: OS.h		AC_CHECK_HEADERS
+header: X11/Xlib.h		AC_PATH_X
+header: alloca.h		AC_FUNC_ALLOCA
+header: argz.h		AC_CHECK_HEADERS
+header: arpa/inet.h		AC_CHECK_HEADERS
+header: fcntl.h		AC_CHECK_HEADERS
+header: fenv.h		AC_CHECK_HEADERS
+header: float.h		AC_CHECK_HEADERS
+header: fs_info.h		AC_CHECK_HEADERS
+header: inttypes.h		AC_CHECK_HEADERS
+header: langinfo.h		AC_CHECK_HEADERS
+header: libintl.h		AC_CHECK_HEADERS
+header: limits.h		AC_CHECK_HEADERS
+header: locale.h		AC_CHECK_HEADERS
+header: mach/mach.h		AC_CHECK_HEADERS
+header: malloc.h		AC_CHECK_HEADERS
+header: memory.h		AC_CHECK_HEADERS
+header: mntent.h		AC_CHECK_HEADERS
+header: mnttab.h		AC_CHECK_HEADERS
+header: netdb.h		AC_CHECK_HEADERS
+header: netinet/in.h		AC_CHECK_HEADERS
+header: nl_types.h		AC_CHECK_HEADERS
+header: nlist.h		AC_CHECK_HEADERS
+header: paths.h		AC_CHECK_HEADERS
+header: resolv.h		AC_HEADER_RESOLV
+header: sgtty.h		AC_CHECK_HEADERS
+header: shadow.h		AC_CHECK_HEADERS
+header: stddef.h		AC_CHECK_HEADERS
+header: stdint.h		AC_CHECK_HEADERS
+header: stdio_ext.h		AC_CHECK_HEADERS
+header: stdlib.h		AC_CHECK_HEADERS
+header: string.h		AC_CHECK_HEADERS
+header: strings.h		AC_CHECK_HEADERS
+header: sys/acl.h		AC_CHECK_HEADERS
+header: sys/file.h		AC_CHECK_HEADERS
+header: sys/filsys.h		AC_CHECK_HEADERS
+header: sys/fs/s5param.h		AC_CHECK_HEADERS
+header: sys/fs_types.h		AC_CHECK_HEADERS
+header: sys/fstyp.h		AC_CHECK_HEADERS
+header: sys/ioctl.h		AC_CHECK_HEADERS
+header: sys/mkdev.h		AC_HEADER_MAJOR
+header: sys/mntent.h		AC_CHECK_HEADERS
+header: sys/mount.h		AC_CHECK_HEADERS
+header: sys/param.h		AC_CHECK_HEADERS
+header: sys/socket.h		AC_CHECK_HEADERS
+header: sys/statfs.h		AC_CHECK_HEADERS
+header: sys/statvfs.h		AC_CHECK_HEADERS
+header: sys/systeminfo.h		AC_CHECK_HEADERS
+header: sys/time.h		AC_CHECK_HEADERS
+header: sys/timeb.h		AC_CHECK_HEADERS
+header: sys/vfs.h		AC_CHECK_HEADERS
+header: sys/window.h		AC_CHECK_HEADERS
+header: syslog.h		AC_CHECK_HEADERS
+header: termio.h		AC_CHECK_HEADERS
+header: termios.h		AC_CHECK_HEADERS
+header: unistd.h		AC_CHECK_HEADERS
+header: utime.h		AC_CHECK_HEADERS
+header: utmp.h		AC_CHECK_HEADERS
+header: utmpx.h		AC_CHECK_HEADERS
+header: values.h		AC_CHECK_HEADERS
+header: wchar.h		AC_CHECK_HEADERS
+header: wctype.h		AC_CHECK_HEADERS
+identifier: bool		AC_HEADER_STDBOOL
+identifier: false		AC_HEADER_STDBOOL
+identifier: gid_t		AC_TYPE_UID_T
+identifier: inline		AC_C_INLINE
+identifier: int16_t		AC_TYPE_INT16_T
+identifier: int32_t		AC_TYPE_INT32_T
+identifier: int64_t		AC_TYPE_INT64_T
+identifier: int8_t		AC_TYPE_INT8_T
+identifier: mode_t		AC_TYPE_MODE_T
+identifier: obstack		AC_FUNC_OBSTACK
+identifier: off_t		AC_TYPE_OFF_T
+identifier: pid_t		AC_TYPE_PID_T
+identifier: ptrdiff_t		AC_CHECK_TYPES
+identifier: restrict		AC_C_RESTRICT
+identifier: size_t		AC_TYPE_SIZE_T
+identifier: ssize_t		AC_TYPE_SSIZE_T
+identifier: st_blksize		AC_CHECK_MEMBERS([struct stat.st_blksize])
+identifier: st_blocks		AC_STRUCT_ST_BLOCKS
+identifier: st_rdev		AC_CHECK_MEMBERS([struct stat.st_rdev])
+identifier: sys_siglist		AC_CHECK_DECLS([sys_siglist])
+identifier: tm_zone		AC_STRUCT_TIMEZONE
+identifier: true		AC_HEADER_STDBOOL
+identifier: uid_t		AC_TYPE_UID_T
+identifier: uint16_t		AC_TYPE_UINT16_T
+identifier: uint32_t		AC_TYPE_UINT32_T
+identifier: uint64_t		AC_TYPE_UINT64_T
+identifier: uint8_t		AC_TYPE_UINT8_T
+makevar: AWK		AC_PROG_AWK
+makevar: BISON		AC_PROG_YACC
+makevar: CC		AC_PROG_CC
+makevar: CPP		AC_PROG_CPP
+makevar: CXX		AC_PROG_CXX
+makevar: INSTALL		AC_PROG_INSTALL
+makevar: LEX		AC_PROG_LEX
+makevar: LN		AC_PROG_LN_S
+makevar: MAKE		AC_PROG_MAKE_SET
+makevar: MKDIR_P		AC_PROG_MKDIR_P
+makevar: OBJC		AC_PROG_OBJC
+makevar: OBJCXX		AC_PROG_OBJCXX
+makevar: RANLIB		AC_PROG_RANLIB
+makevar: YACC		AC_PROG_YACC
+makevar: YFLAGS		AC_PROG_YACC
+program: CC		AC_PROG_CXX
+program: awk		AC_PROG_AWK
+program: bison		AC_PROG_YACC
+program: byacc		AC_PROG_YACC
+program: c++		AC_PROG_CXX
+program: cc		AC_PROG_CC
+program: cpp		AC_PROG_CPP
+program: flex		AC_PROG_LEX
+program: g++		AC_PROG_CXX
+program: gawk		AC_PROG_AWK
+program: gcc		AC_PROG_CC
+program: install		AC_PROG_INSTALL
+program: lex		AC_PROG_LEX
+program: ln		AC_PROG_LN_S
+program: make		AC_PROG_MAKE_SET
+program: mawk		AC_PROG_AWK
+program: nawk		AC_PROG_AWK
+program: objc		AC_PROG_OBJC
+program: objcc		AC_PROG_OBJC
+program: objcxx		AC_PROG_OBJCXX
+program: ranlib		AC_PROG_RANLIB
+program: yacc		AC_PROG_YACC
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4
new file mode 100644
index 0000000..5081ccf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4
@@ -0,0 +1,27 @@
+# This file is part of Autoconf.			-*- Autoconf -*-
+# M4 macros used in building test suites.
+# Copyright (C) 2000, 2001, 2002, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+m4_include([autotest/general.m4])
+m4_include([autotest/specific.m4])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4f b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4f
new file mode 100644
index 0000000..9fdeb04
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/autotest.m4f
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/general.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/general.m4
new file mode 100644
index 0000000..78f1cc6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/general.m4
@@ -0,0 +1,2216 @@
+# This file is part of Autoconf.                          -*- Autoconf -*-
+# M4 macros used in building test suites.
+m4_define([_AT_COPYRIGHT_YEARS], [
+Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,
+2009, 2010 Free Software Foundation, Inc.
+])
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+
+# _m4_divert(DIVERSION-NAME)
+# --------------------------
+# Convert a diversion name into its number.  Otherwise, return
+# DIVERSION-NAME which is supposed to be an actual diversion number.
+# Of course it would be nicer to use m4_case here, instead of zillions
+# of little macros, but it then takes twice longer to run `autoconf'!
+#
+# From M4sugar:
+#    -1. KILL
+# 10000. GROW
+#
+# From M4sh:
+#    0. BINSH
+#    1. HEADER-REVISION
+#    2. HEADER-COMMENT
+#    3. HEADER-COPYRIGHT
+#    4. M4SH-SANITIZE
+#    5. M4SH-INIT
+# 1000. BODY
+#
+# Defined below:
+#  - DEFAULTS
+#    Overall initialization, value of $at_groups_all.
+#  - PARSE_ARGS_BEGIN
+#    Setup defaults required for option processing.
+#  - PARSE_ARGS
+#    Option processing.  After AT_INIT, user options can be entered here as
+#    cases of a case statement.
+#  - PARSE_ARGS_END
+#    Finish up the option processing.
+#
+#  - HELP
+#    Start printing the help message.
+#  - HELP_MODES
+#    Modes help text.  Additional modes can be appended as self-contained
+#    cat'd here-docs as generated by AS_HELP_STRING.
+#  - HELP_TUNING_BEGIN
+#    Tuning help text.  This is for Autotest-provided text.
+#  - HELP_TUNING
+#    Additional tuning options' help text can be appended here as
+#    self-contained cat'd here-docs as generated by AS_HELP_STRING.
+#  - HELP_OTHER
+#    User help can be appended to this as self-contained cat'd here-docs.
+#  - HELP_END
+#    Finish up the help texts.
+#
+#  - VERSION
+#    Head of the handling of --version.
+#  - VERSION_NOTICES
+#    Copyright notices for --version.
+#  - VERSION_END
+#    Tail of the handling of --version.
+#
+#  - BANNERS
+#    Output shell initialization for the associative array of banner text.
+#  - TESTS_BEGIN
+#    Like DEFAULTS but run after argument processing for purposes of
+#    optimization.  Do anything else that needs to be done to prepare for
+#    tests.  Sets up verbose and log file descriptors.  Sets and logs PATH.
+#  - PREPARE_TESTS
+#    Declares functions shared among the tests.  Perform any user
+#    initialization to be shared among all tests.
+#  - TESTS
+#    The core of the test suite.
+#
+#  - TEST_SCRIPT
+#    The collector for code for each test, the ``normal'' diversion, but
+#    undiverted into other locations before final output.
+#
+#  - TEST_GROUPS
+#    Contents of each test group.  The tests deliberately occur after the
+#    end of the shell script, so that the shell need not spend time parsing
+#    commands it will not execute.
+
+m4_define([_m4_divert(DEFAULTS)],           100)
+m4_define([_m4_divert(PARSE_ARGS_BEGIN)],   200)
+m4_define([_m4_divert(PARSE_ARGS)],         201)
+m4_define([_m4_divert(PARSE_ARGS_END)],     202)
+m4_define([_m4_divert(HELP)],               300)
+m4_define([_m4_divert(HELP_MODES)],         301)
+m4_define([_m4_divert(HELP_TUNING_BEGIN)],  302)
+m4_define([_m4_divert(HELP_TUNING)],        303)
+m4_define([_m4_divert(HELP_OTHER)],         304)
+m4_define([_m4_divert(HELP_END)],           305)
+m4_define([_m4_divert(VERSION)],            350)
+m4_define([_m4_divert(VERSION_NOTICES)],    351)
+m4_define([_m4_divert(VERSION_END)],        352)
+m4_define([_m4_divert(BANNERS)],            400)
+m4_define([_m4_divert(TESTS_BEGIN)],        401)
+m4_define([_m4_divert(PREPARE_TESTS)],      402)
+m4_define([_m4_divert(TESTS)],              403)
+m4_define([_m4_divert(TEST_SCRIPT)],        450)
+m4_define([_m4_divert(TEST_GROUPS)],        500)
+
+
+# AT_LINE
+# -------
+# Return the current file sans directory, a colon, and the current
+# line.  Be sure to return a _quoted_ file name, so if, for instance,
+# the user is lunatic enough to have a file named `dnl' (and I, for
+# one, love to be brainless and stubborn sometimes), then we return a
+# quoted name.
+#
+# Gee, we can't use simply
+#
+#  m4_bpatsubst(__file__, [^.*/\(.*\)], [[\1]])
+#
+# since then, since `dnl' doesn't match the pattern, it is returned
+# with once quotation level less, so you lose!  And since GNU M4
+# is one of the biggest junk in the whole universe wrt regexp, don't
+# even think about using `?' or `\?'.  Bah, `*' will do.
+# Pleeeeeeeease, Gary, provide us with dirname and ERE!
+#
+# M4 recompiles the regular expression for every m4_bpatsubst, but __file__
+# rarely changes.  Be fast - only compute the dirname when necessary; for
+# autoconf alone, this shaves off several seconds in building testsuite.
+m4_define([_AT_LINE_file])
+m4_define([_AT_LINE_base])
+m4_define([AT_LINE],
+[m4_if(m4_defn([_AT_LINE_file]), __file__, [],
+       [m4_do([m4_define([_AT_LINE_file], __file__)],
+	      [m4_define([_AT_LINE_base],
+			 m4_bregexp(/__file__, [/\([^/]*\)$], [[\1]]))])])dnl
+m4_defn([_AT_LINE_base]):__line__])
+
+# _AT_LINE_ESCAPED
+# ----------------
+# Same as AT_LINE, but already escaped for the shell.
+m4_define([_AT_LINE_ESCAPED], ["AS_ESCAPE(m4_dquote(AT_LINE))"])
+
+
+# _AT_NORMALIZE_TEST_GROUP_NUMBER(SHELL-VAR)
+# ------------------------------------------
+# Normalize SHELL-VAR so that its value has the same number of digits as
+# all the other test group numbers.
+m4_define([_AT_NORMALIZE_TEST_GROUP_NUMBER],
+[
+  eval 'while :; do
+    case $$1 in #(
+    '"$at_format"'*) break;;
+    esac
+    $1=0$$1
+  done'
+])
+
+# _AT_DEFINE_INIT(NAME, [DEFINITION])
+# -----------------------------------
+# Define macro NAME to die if invoked prior to AT_INIT, and to DEFINITION
+# after AT_INIT.
+m4_define([_AT_DEFINE_INIT],
+[m4_define($@)m4_pushdef([$1], [m4_fatal([$1: missing AT_INIT detected])])dnl
+m4_append([_AT_DEFINE_INIT_LIST], [[$1]], [,])])
+
+# _AT_DEFINE_SETUP(NAME, [DEFINITION])
+# ------------------------------------
+# Define macro NAME to die if invoked outside AT_SETUP/AT_CLEANUP, and
+# to DEFINITION otherwise.
+m4_define([_AT_DEFINE_SETUP],
+[m4_define([$1], [m4_ifndef([AT_ingroup],
+ [m4_fatal([$1: missing AT_SETUP detected])])$2])])
+
+
+# AT_INIT([TESTSUITE-NAME])
+# -------------------------
+# Begin test suite.
+m4_define([AT_INIT],
+[m4_pushdef([AT_INIT], [m4_fatal([$0: invoked multiple times])])]
+[m4_pattern_forbid([^_?AT_])]
+[m4_pattern_allow([^_ATEOF$])]
+[m4_ifndef([AT_PACKAGE_BUGREPORT], [m4_fatal(
+  [$1: AT_PACKAGE_BUGREPORT is missing, consider writing package.m4])])]
+[m4_define([AT_TESTSUITE_NAME],
+  m4_defn([AT_PACKAGE_STRING])[ test suite]m4_ifval([$1],
+   [m4_expand([: $1])]))]
+[m4_define([AT_ordinal], 0)]
+[m4_define([AT_banner_ordinal], 0)]
+[m4_define([AT_help_all], [])]
+[m4_map_args([_m4_popdef], _AT_DEFINE_INIT_LIST)]
+[m4_wrap([_AT_FINISH])]
+[AS_INIT[]]dnl
+dnl We don't use m4sh's BODY diversion, but AS_INIT sticks a banner there.
+dnl This trick removes that banner, since it adds nothing to autotest.
+[m4_cleardivert([BODY])]dnl
+[AS_ME_PREPARE[]]dnl
+[m4_divert_push([DEFAULTS])]dnl
+[AT_COPYRIGHT(m4_defn([_AT_COPYRIGHT_YEARS]), [
+m4_copyright_condense])]
+[AT_COPYRIGHT(
+[This test suite is free software; the Free Software Foundation gives
+unlimited permission to copy, distribute and modify it.], [m4_echo])]
+[AS_PREPARE
+
+SHELL=${CONFIG_SHELL-/bin/sh}
+
+# How were we run?
+at_cli_args="$[@]"
+
+m4_divert_push([BANNERS])dnl
+
+# Should we print banners?  Yes if more than one test is run.
+case $at_groups in #(
+  *$as_nl* )
+      at_print_banners=: ;; #(
+  * ) at_print_banners=false ;;
+esac
+# Text for banner N, set to a single space once printed.
+m4_divert_pop([BANNERS])dnl back to DEFAULTS
+m4_divert_push([PREPARE_TESTS])dnl
+
+m4_text_box([Autotest shell functions.])
+
+AS_FUNCTION_DESCRIBE([at_fn_banner], [NUMBER],
+[Output banner NUMBER, provided the testsuite is running multiple groups
+and this particular banner has not yet been printed.])
+at_fn_banner ()
+{
+  $at_print_banners || return 0
+  eval at_banner_text=\$at_banner_text_$[1]
+  test "x$at_banner_text" = "x " && return 0
+  eval "at_banner_text_$[1]=\" \""
+  if test -z "$at_banner_text"; then
+    $at_first || echo
+  else
+    AS_ECHO(["$as_nl$at_banner_text$as_nl"])
+  fi
+} # at_fn_banner
+
+AS_FUNCTION_DESCRIBE([at_fn_check_prepare_notrace], [REASON LINE],
+[Perform AT_CHECK preparations for the command at LINE for an
+untraceable command; REASON is the reason for disabling tracing.])
+at_fn_check_prepare_notrace ()
+{
+  $at_trace_echo "Not enabling shell tracing (command contains $[1])"
+  AS_ECHO(["$[2]"]) >"$at_check_line_file"
+  at_check_trace=: at_check_filter=:
+  : >"$at_stdout"; : >"$at_stderr"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_check_prepare_trace], [LINE],
+[Perform AT_CHECK preparations for the command at LINE for a traceable
+command.])
+at_fn_check_prepare_trace ()
+{
+  AS_ECHO(["$[1]"]) >"$at_check_line_file"
+  at_check_trace=$at_traceon at_check_filter=$at_check_filter_trace
+  : >"$at_stdout"; : >"$at_stderr"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_check_prepare_dynamic], [COMMAND LINE],
+[Decide if COMMAND at LINE is traceable at runtime, and call the
+appropriate preparation function.])
+at_fn_check_prepare_dynamic ()
+{
+  case $[1] in
+    *$as_nl*)
+      at_fn_check_prepare_notrace 'an embedded newline' "$[2]" ;;
+    *)
+      at_fn_check_prepare_trace "$[2]" ;;
+  esac
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_filter_trace], [],
+[Remove the lines in the file "$at_stderr" generated by "set -x" and print
+them to stderr.])
+at_fn_filter_trace ()
+{
+  mv "$at_stderr" "$at_stder1"
+  grep '^ *+' "$at_stder1" >&2
+  grep -v '^ *+' "$at_stder1" >"$at_stderr"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_log_failure], [FILE-LIST],
+[Copy the files in the list on stdout with a "> " prefix, and exit the shell
+with a failure exit code.])
+at_fn_log_failure ()
+{
+  for file
+    do AS_ECHO(["$file:"]); sed 's/^/> /' "$file"; done
+  echo 1 > "$at_status_file"
+  exit 1
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_check_skip], [EXIT-CODE LINE],
+[Check whether EXIT-CODE is a special exit code (77 or 99), and if so exit
+the test group subshell with that same exit code.  Use LINE in any report
+about test failure.])
+at_fn_check_skip ()
+{
+  case $[1] in
+    99) echo 99 > "$at_status_file"; at_failed=:
+	AS_ECHO(["$[2]: hard failure"]); exit 99;;
+    77) echo 77 > "$at_status_file"; exit 77;;
+  esac
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_check_status], [EXPECTED EXIT-CODE LINE],
+[Check whether EXIT-CODE is the EXPECTED exit code, and if so do nothing.
+Otherwise, if it is 77 or 99, exit the test group subshell with that same
+exit code; if it is anything else print an error message referring to LINE,
+and fail the test.])
+at_fn_check_status ()
+{
+dnl This order ensures that we don't `skip' if we are precisely checking
+dnl $? = 77 or $? = 99.
+  case $[2] in
+    $[1] ) ;;
+    77) echo 77 > "$at_status_file"; exit 77;;
+    99) echo 99 > "$at_status_file"; at_failed=:
+	AS_ECHO(["$[3]: hard failure"]); exit 99;;
+    *) AS_ECHO(["$[3]: exit code was $[2], expected $[1]"])
+      at_failed=:;;
+  esac
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_diff_devnull], [FILE],
+[Emit a diff between /dev/null and FILE.  Uses "test -s" to avoid useless
+diff invocations.])
+at_fn_diff_devnull ()
+{
+  test -s "$[1]" || return 0
+  $at_diff "$at_devnull" "$[1]"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_test], [NUMBER],
+[Parse out test NUMBER from the tail of this file.])
+at_fn_test ()
+{
+  eval at_sed=\$at_sed$[1]
+  sed "$at_sed" "$at_myself" > "$at_test_source"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_create_debugging_script], [],
+[Create the debugging script $at_group_dir/run which will reproduce the
+current test group.])
+at_fn_create_debugging_script ()
+{
+  {
+    echo "#! /bin/sh" &&
+    echo 'test "${ZSH_VERSION+set}" = set dnl
+&& alias -g '\''${1+"$[@]"}'\''='\''"$[@]"'\''' &&
+    AS_ECHO(["cd '$at_dir'"]) &&
+    AS_ECHO(["exec \${CONFIG_SHELL-$SHELL} \"$at_myself\" -v -d ]dnl
+[$at_debug_args $at_group \${1+\"\$[@]\"}"]) &&
+    echo 'exit 1'
+  } >"$at_group_dir/run" &&
+  chmod +x "$at_group_dir/run"
+}
+
+m4_text_box([End of autotest shell functions.])
+m4_divert_pop([PREPARE_TESTS])dnl back to DEFAULTS
+
+# Not all shells have the 'times' builtin; the subshell is needed to make
+# sure we discard the 'times: not found' message from the shell.
+at_times_p=false
+(times) >/dev/null 2>&1 && at_times_p=:
+
+# CLI Arguments to pass to the debugging scripts.
+at_debug_args=
+# -e sets to true
+at_errexit_p=false
+# Shall we be verbose?  ':' means no, empty means yes.
+at_verbose=:
+at_quiet=
+# Running several jobs in parallel, 0 means as many as test groups.
+at_jobs=1
+at_traceon=:
+at_trace_echo=:
+at_check_filter_trace=:
+
+# Shall we keep the debug scripts?  Must be `:' when the suite is
+# run by a debug script, so that the script doesn't remove itself.
+at_debug_p=false
+# Display help message?
+at_help_p=false
+# Display the version message?
+at_version_p=false
+# List test groups?
+at_list_p=false
+# --clean
+at_clean=false
+# Test groups to run
+at_groups=
+# Whether to rerun failed tests.
+at_recheck=
+# Whether a write failure occurred
+at_write_fail=0
+
+# The directory we run the suite in.  Default to . if no -C option.
+at_dir=`pwd`
+# An absolute reference to this testsuite script.
+dnl m4-double quote, to preserve []
+[case $as_myself in
+  [\\/]* | ?:[\\/]* ) at_myself=$as_myself ;;
+  * ) at_myself=$at_dir/$as_myself ;;
+esac]
+# Whether -C is in effect.
+at_change_dir=false
+m4_divert_pop([DEFAULTS])dnl
+m4_define([_AT_FINISH],
+[m4_ifdef([AT_ingroup], [m4_fatal([missing AT_CLEANUP detected])])dnl
+m4_divert_text([DEFAULTS],
+[
+# Whether to enable colored test results.
+at_color=m4_ifdef([AT_color], [AT_color], [no])
+# List of the tested programs.
+at_tested='m4_ifdef([AT_tested],
+  [m4_translit(m4_dquote(m4_defn([AT_tested])), [ ], m4_newline)])'
+# As many question marks as there are digits in the last test group number.
+# Used to normalize the test group numbers so that `ls' lists them in
+# numerical order.
+at_format='m4_bpatsubst(m4_defn([AT_ordinal]), [.], [?])'
+# Description of all the test groups.
+at_help_all="AS_ESCAPE(m4_dquote(m4_defn([AT_help_all])))"
+# List of the all the test groups.
+at_groups_all=`AS_ECHO(["$at_help_all"]) | sed 's/;.*//'`
+
+AS_FUNCTION_DESCRIBE([at_fn_validate_ranges], [NAME...],
+[Validate and normalize the test group number contained in each
+variable NAME.  Leading zeroes are treated as decimal.])
+at_fn_validate_ranges ()
+{
+  for at_grp
+  do
+    eval at_value=\$$at_grp
+    if test $at_value -lt 1 || test $at_value -gt AT_ordinal; then
+      AS_ECHO(["invalid test group: $at_value"]) >&2
+      exit 1
+    fi
+    case $at_value in
+      0*) # We want to treat leading 0 as decimal, like expr and test, but
+	  # AS_VAR_ARITH treats it as octal if it uses $(( )).
+	  # With XSI shells, ${at_value#${at_value%%[1-9]*}} avoids the
+	  # expr fork, but it is not worth the effort to determine if the
+	  # shell supports XSI when the user can just avoid leading 0.
+	  eval $at_grp='`expr $at_value + 0`' ;;
+    esac
+  done
+}])])dnl
+m4_divert_push([PARSE_ARGS])dnl
+
+at_prev=
+for at_option
+do
+  # If the previous option needs an argument, assign it.
+  if test -n "$at_prev"; then
+    at_option=$at_prev=$at_option
+    at_prev=
+  fi
+
+  case $at_option in
+  *=?*) at_optarg=`expr "X$at_option" : '[[^=]]*=\(.*\)'` ;;
+  *)    at_optarg= ;;
+  esac
+
+  # Accept the important Cygnus configure options, so we can diagnose typos.
+
+  case $at_option in
+    --help | -h )
+	at_help_p=:
+	;;
+
+    --list | -l )
+	at_list_p=:
+	;;
+
+    --version | -V )
+	at_version_p=:
+	;;
+
+    --clean | -c )
+	at_clean=:
+	;;
+
+    --color )
+	at_color=always
+	;;
+    --color=* )
+	case $at_optarg in
+	no | never | none) at_color=never ;;
+	auto | tty | if-tty) at_color=auto ;;
+	always | yes | force) at_color=always ;;
+	*) at_optname=`echo " $at_option" | sed 's/^ //; s/=.*//'`
+	   AS_ERROR([unrecognized argument to $at_optname: $at_optarg]) ;;
+	esac
+	;;
+
+    --debug | -d )
+	at_debug_p=:
+	;;
+
+    --errexit | -e )
+	at_debug_p=:
+	at_errexit_p=:
+	;;
+
+    --verbose | -v )
+	at_verbose=; at_quiet=:
+	;;
+
+    --trace | -x )
+	at_traceon='set -x'
+	at_trace_echo=echo
+	at_check_filter_trace=at_fn_filter_trace
+	;;
+
+    [[0-9] | [0-9][0-9] | [0-9][0-9][0-9] | [0-9][0-9][0-9][0-9]])
+	at_fn_validate_ranges at_option
+	AS_VAR_APPEND([at_groups], ["$at_option$as_nl"])
+	;;
+
+    # Ranges
+    [[0-9]- | [0-9][0-9]- | [0-9][0-9][0-9]- | [0-9][0-9][0-9][0-9]-])
+	at_range_start=`echo $at_option |tr -d X-`
+	at_fn_validate_ranges at_range_start
+	at_range=`AS_ECHO(["$at_groups_all"]) | \
+	  sed -ne '/^'$at_range_start'$/,$p'`
+	AS_VAR_APPEND([at_groups], ["$at_range$as_nl"])
+	;;
+
+    [-[0-9] | -[0-9][0-9] | -[0-9][0-9][0-9] | -[0-9][0-9][0-9][0-9]])
+	at_range_end=`echo $at_option |tr -d X-`
+	at_fn_validate_ranges at_range_end
+	at_range=`AS_ECHO(["$at_groups_all"]) | \
+	  sed -ne '1,/^'$at_range_end'$/p'`
+	AS_VAR_APPEND([at_groups], ["$at_range$as_nl"])
+	;;
+
+    [[0-9]-[0-9] | [0-9]-[0-9][0-9] | [0-9]-[0-9][0-9][0-9]] | \
+    [[0-9]-[0-9][0-9][0-9][0-9] | [0-9][0-9]-[0-9][0-9]] | \
+    [[0-9][0-9]-[0-9][0-9][0-9] | [0-9][0-9]-[0-9][0-9][0-9][0-9]] | \
+    [[0-9][0-9][0-9]-[0-9][0-9][0-9]] | \
+    [[0-9][0-9][0-9]-[0-9][0-9][0-9][0-9]] | \
+    [[0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9]] )
+	at_range_start=`expr $at_option : '\(.*\)-'`
+	at_range_end=`expr $at_option : '.*-\(.*\)'`
+	if test $at_range_start -gt $at_range_end; then
+	  at_tmp=$at_range_end
+	  at_range_end=$at_range_start
+	  at_range_start=$at_tmp
+	fi
+	at_fn_validate_ranges at_range_start at_range_end
+	at_range=`AS_ECHO(["$at_groups_all"]) | \
+	  sed -ne '/^'$at_range_start'$/,/^'$at_range_end'$/p'`
+	AS_VAR_APPEND([at_groups], ["$at_range$as_nl"])
+	;;
+
+    # Directory selection.
+    --directory | -C )
+	at_prev=--directory
+	;;
+    --directory=* )
+	at_change_dir=:
+	at_dir=$at_optarg
+	if test x- = "x$at_dir" ; then
+	  at_dir=./-
+	fi
+	;;
+
+    # Parallel execution.
+    --jobs | -j )
+	at_jobs=0
+	;;
+    --jobs=* | -j[[0-9]]* )
+	if test -n "$at_optarg"; then
+	  at_jobs=$at_optarg
+	else
+	  at_jobs=`expr X$at_option : 'X-j\(.*\)'`
+	fi
+	case $at_jobs in *[[!0-9]]*)
+	  at_optname=`echo " $at_option" | sed 's/^ //; s/[[0-9=]].*//'`
+	  AS_ERROR([non-numeric argument to $at_optname: $at_jobs]) ;;
+	esac
+	;;
+
+    # Keywords.
+    --keywords | -k )
+	at_prev=--keywords
+	;;
+    --keywords=* )
+	at_groups_selected=$at_help_all
+	at_save_IFS=$IFS
+	IFS=,
+	set X $at_optarg
+	shift
+	IFS=$at_save_IFS
+	for at_keyword
+	do
+	  at_invert=
+	  case $at_keyword in
+	  '!'*)
+	    at_invert="-v"
+	    at_keyword=`expr "X$at_keyword" : 'X!\(.*\)'`
+	    ;;
+	  esac
+	  # It is on purpose that we match the test group titles too.
+	  at_groups_selected=`AS_ECHO(["$at_groups_selected"]) |
+	      grep -i $at_invert ["^[1-9][^;]*;.*[; ]$at_keyword[ ;]"]`
+	done
+	# Smash the keywords.
+	at_groups_selected=`AS_ECHO(["$at_groups_selected"]) | sed 's/;.*//'`
+	AS_VAR_APPEND([at_groups], ["$at_groups_selected$as_nl"])
+	;;
+    --recheck)
+	at_recheck=:
+	;;
+m4_divert_pop([PARSE_ARGS])dnl
+dnl Process *=* last to allow for user specified --option=* type arguments.
+m4_divert_push([PARSE_ARGS_END])dnl
+
+    *=*)
+	at_envvar=`expr "x$at_option" : 'x\([[^=]]*\)='`
+	# Reject names that are not valid shell variable names.
+	case $at_envvar in
+	  '' | [[0-9]]* | *[[!_$as_cr_alnum]]* )
+	    AS_ERROR([invalid variable name: `$at_envvar']) ;;
+	esac
+	at_value=`AS_ECHO(["$at_optarg"]) | sed "s/'/'\\\\\\\\''/g"`
+	# Export now, but save eval for later and for debug scripts.
+	export $at_envvar
+	AS_VAR_APPEND([at_debug_args], [" $at_envvar='$at_value'"])
+	;;
+
+     *) AS_ECHO(["$as_me: invalid option: $at_option"]) >&2
+	AS_ECHO(["Try \`$[0] --help' for more information."]) >&2
+	exit 1
+	;;
+  esac
+done
+
+# Verify our last option didn't require an argument
+AS_IF([test -n "$at_prev"], [AS_ERROR([`$at_prev' requires an argument])])
+
+# The file containing the suite.
+at_suite_log=$at_dir/$as_me.log
+
+# Selected test groups.
+if test -z "$at_groups$at_recheck"; then
+  at_groups=$at_groups_all
+else
+  if test -n "$at_recheck" && test -r "$at_suite_log"; then
+    at_oldfails=`sed -n ['
+      /^Failed tests:$/,/^Skipped tests:$/{
+	s/^[ ]*\([1-9][0-9]*\):.*/\1/p
+      }
+      /^Unexpected passes:$/,/^## Detailed failed tests/{
+	s/^[ ]*\([1-9][0-9]*\):.*/\1/p
+      }
+      /^## Detailed failed tests/q
+      '] "$at_suite_log"`
+    AS_VAR_APPEND([at_groups], ["$at_oldfails$as_nl"])
+  fi
+  # Sort the tests, removing duplicates.
+  at_groups=`AS_ECHO(["$at_groups"]) | sort -nu | sed '/^$/d'`
+fi
+
+if test x"$at_color" = xalways \
+   || { test x"$at_color" = xauto && test -t 1; }; then
+  at_red=`printf '\033@<:@0;31m'`
+  at_grn=`printf '\033@<:@0;32m'`
+  at_lgn=`printf '\033@<:@1;32m'`
+  at_blu=`printf '\033@<:@1;34m'`
+  at_std=`printf '\033@<:@m'`
+else
+  at_red= at_grn= at_lgn= at_blu= at_std=
+fi
+m4_divert_pop([PARSE_ARGS_END])dnl
+m4_divert_push([HELP])dnl
+
+# Help message.
+if $at_help_p; then
+  cat <<_ATEOF || at_write_fail=1
+Usage: $[0] [[OPTION]... [VARIABLE=VALUE]... [TESTS]]
+
+Run all the tests, or the selected TESTS, given by numeric ranges, and
+save a detailed log file.  Upon failure, create debugging scripts.
+
+Do not change environment variables directly.  Instead, set them via
+command line arguments.  Set \`AUTOTEST_PATH' to select the executables
+to exercise.  Each relative directory is expanded as build and source
+directories relative to the top level of this distribution.
+E.g., from within the build directory /tmp/foo-1.0, invoking this:
+
+  $ $[0] AUTOTEST_PATH=bin
+
+is equivalent to the following, assuming the source directory is /src/foo-1.0:
+
+  PATH=/tmp/foo-1.0/bin:/src/foo-1.0/bin:\$PATH $[0]
+_ATEOF
+m4_divert_pop([HELP])dnl
+m4_divert_push([HELP_MODES])dnl
+cat <<_ATEOF || at_write_fail=1
+
+Operation modes:
+  -h, --help     print the help message, then exit
+  -V, --version  print version number, then exit
+  -c, --clean    remove all the files this test suite might create and exit
+  -l, --list     describes all the tests, or the selected TESTS
+_ATEOF
+m4_divert_pop([HELP_MODES])dnl
+m4_wrap([m4_divert_push([HELP_TUNING_BEGIN])dnl
+cat <<_ATEOF || at_write_fail=1
+
+dnl extra quoting prevents emacs whitespace mode from putting tabs in output
+Execution tuning:
+  -C, --directory=DIR
+[                 change to directory DIR before starting]
+      --color[[=never|auto|always]]
+[                 ]m4_ifdef([AT_color],
+		      [disable colored test results, or enable even without terminal],
+		      [enable colored test results on terminal, or always])
+  -j, --jobs[[=N]]
+[                 Allow N jobs at once; infinite jobs with no arg (default 1)]
+  -k, --keywords=KEYWORDS
+[                 select the tests matching all the comma-separated KEYWORDS]
+[                 multiple \`-k' accumulate; prefixed \`!' negates a KEYWORD]
+      --recheck  select all tests that failed or passed unexpectedly last time
+  -e, --errexit  abort as soon as a test fails; implies --debug
+  -v, --verbose  force more detailed output
+[                 default for debugging scripts]
+  -d, --debug    inhibit clean up and top-level logging
+[                 default for debugging scripts]
+  -x, --trace    enable tests shell tracing
+_ATEOF
+m4_divert_pop([HELP_TUNING_BEGIN])])dnl
+m4_divert_push([HELP_END])dnl
+cat <<_ATEOF || at_write_fail=1
+
+Report bugs to <AT_PACKAGE_BUGREPORT>.dnl
+m4_ifdef([AT_PACKAGE_NAME],
+[m4_ifset([AT_PACKAGE_URL], [
+m4_defn([AT_PACKAGE_NAME]) home page: <AT_PACKAGE_URL>.])dnl
+m4_if(m4_index(m4_defn([AT_PACKAGE_NAME]), [GNU ]), [0], [
+General help using GNU software: <http://www.gnu.org/gethelp/>.])])
+_ATEOF
+  exit $at_write_fail
+fi
+
+# List of tests.
+if $at_list_p; then
+  cat <<_ATEOF || at_write_fail=1
+AT_TESTSUITE_NAME test groups:
+
+ NUM: FILE-NAME:LINE     TEST-GROUP-NAME
+      KEYWORDS
+
+_ATEOF
+  # Pass an empty line as separator between selected groups and help.
+  AS_ECHO(["$at_groups$as_nl$as_nl$at_help_all"]) |
+    awk 'NF == 1 && FS != ";" {
+	   selected[[$ 1]] = 1
+	   next
+	 }
+	 /^$/ { FS = ";" }
+	 NF > 0 {
+	   if (selected[[$ 1]]) {
+	     printf " %3d: %-18s %s\n", $ 1, $ 2, $ 3
+	     if ($ 4) {
+	       lmax = 79
+	       indent = "     "
+	       line = indent
+	       len = length (line)
+	       n = split ($ 4, a, " ")
+	       for (i = 1; i <= n; i++) {
+		 l = length (a[[i]]) + 1
+		 if (i > 1 && len + l > lmax) {
+		   print line
+		   line = indent " " a[[i]]
+		   len = length (line)
+		 } else {
+		   line = line " " a[[i]]
+		   len += l
+		 }
+	       }
+	       if (n)
+		 print line
+	     }
+	   }
+	 }' || at_write_fail=1
+  exit $at_write_fail
+fi
+m4_divert_pop([HELP_END])dnl
+m4_divert_push([VERSION])dnl
+if $at_version_p; then
+  AS_ECHO(["$as_me (AT_PACKAGE_STRING)"]) &&
+  cat <<\_ATEOF || at_write_fail=1
+m4_divert_pop([VERSION])dnl
+m4_divert_push([VERSION_END])dnl
+_ATEOF
+  exit $at_write_fail
+fi
+m4_divert_pop([VERSION_END])dnl
+m4_divert_push([TESTS_BEGIN])dnl
+
+# Take any -C into account.
+if $at_change_dir ; then
+  test x != "x$at_dir" && cd "$at_dir" \
+    || AS_ERROR([unable to change directory])
+  at_dir=`pwd`
+fi
+
+# Load the config files for any default variable assignments.
+for at_file in atconfig atlocal
+do
+  test -r $at_file || continue
+  . ./$at_file || AS_ERROR([invalid content: $at_file])
+done
+
+# Autoconf <=2.59b set at_top_builddir instead of at_top_build_prefix:
+: "${at_top_build_prefix=$at_top_builddir}"
+
+# Perform any assignments requested during argument parsing.
+eval "$at_debug_args"
+
+# atconfig delivers names relative to the directory the test suite is
+# in, but the groups themselves are run in testsuite-dir/group-dir.
+if test -n "$at_top_srcdir"; then
+  builddir=../..
+  for at_dir_var in srcdir top_srcdir top_build_prefix
+  do
+    AS_VAR_COPY([at_val], [at_$at_dir_var])
+    case $at_val in
+      [[\\/$]]* | ?:[[\\/]]* ) at_prefix= ;;
+      *) at_prefix=../../ ;;
+    esac
+    AS_VAR_SET([$at_dir_var], [$at_prefix$at_val])
+  done
+fi
+
+m4_text_box([Directory structure.])
+
+# This is the set of directories and files used by this script
+# (non-literals are capitalized):
+#
+# TESTSUITE         - the testsuite
+# TESTSUITE.log     - summarizes the complete testsuite run
+# TESTSUITE.dir/    - created during a run, remains after -d or failed test
+# + at-groups/      - during a run: status of all groups in run
+# | + NNN/          - during a run: meta-data about test group NNN
+# | | + check-line  - location (source file and line) of current AT_CHECK
+# | | + status      - exit status of current AT_CHECK
+# | | + stdout      - stdout of current AT_CHECK
+# | | + stder1      - stderr, including trace
+# | | + stderr      - stderr, with trace filtered out
+# | | + test-source - portion of testsuite that defines group
+# | | + times       - timestamps for computing duration
+# | | + pass        - created if group passed
+# | | + xpass       - created if group xpassed
+# | | + fail        - created if group failed
+# | | + xfail       - created if group xfailed
+# | | + skip        - created if group skipped
+# + at-stop         - during a run: end the run if this file exists
+# + at-source-lines - during a run: cache of TESTSUITE line numbers for extraction
+# + 0..NNN/         - created for each group NNN, remains after -d or failed test
+# | + TESTSUITE.log - summarizes the group results
+# | + ...           - files created during the group
+
+# The directory the whole suite works in.
+# Should be absolute to let the user `cd' at will.
+at_suite_dir=$at_dir/$as_me.dir
+# The file containing the suite ($at_dir might have changed since earlier).
+at_suite_log=$at_dir/$as_me.log
+# The directory containing helper files per test group.
+at_helper_dir=$at_suite_dir/at-groups
+# Stop file: if it exists, do not start new jobs.
+at_stop_file=$at_suite_dir/at-stop
+# The fifo used for the job dispatcher.
+at_job_fifo=$at_suite_dir/at-job-fifo
+
+if $at_clean; then
+  test -d "$at_suite_dir" &&
+    find "$at_suite_dir" -type d ! -perm -700 -exec chmod u+rwx \{\} \;
+  rm -f -r "$at_suite_dir" "$at_suite_log"
+  exit $?
+fi
+
+# Don't take risks: use only absolute directories in PATH.
+#
+# For stand-alone test suites (ie. atconfig was not found),
+# AUTOTEST_PATH is relative to `.'.
+#
+# For embedded test suites, AUTOTEST_PATH is relative to the top level
+# of the package.  Then expand it into build/src parts, since users
+# may create executables in both places.
+AUTOTEST_PATH=`AS_ECHO(["$AUTOTEST_PATH"]) | sed "s|:|$PATH_SEPARATOR|g"`
+at_path=
+_AS_PATH_WALK([$AUTOTEST_PATH $PATH],
+[test -n "$at_path" && AS_VAR_APPEND([at_path], [$PATH_SEPARATOR])
+case $as_dir in
+  [[\\/]]* | ?:[[\\/]]* )
+    AS_VAR_APPEND([at_path], ["$as_dir"])
+    ;;
+  * )
+    if test -z "$at_top_build_prefix"; then
+      # Stand-alone test suite.
+      AS_VAR_APPEND([at_path], ["$as_dir"])
+    else
+      # Embedded test suite.
+      AS_VAR_APPEND([at_path], ["$at_top_build_prefix$as_dir$PATH_SEPARATOR"])
+      AS_VAR_APPEND([at_path], ["$at_top_srcdir/$as_dir"])
+    fi
+    ;;
+esac])
+
+# Now build and simplify PATH.
+#
+# There might be directories that don't exist, but don't redirect
+# builtins' (eg., cd) stderr directly: Ultrix's sh hates that.
+at_new_path=
+_AS_PATH_WALK([$at_path],
+[test -d "$as_dir" || continue
+case $as_dir in
+  [[\\/]]* | ?:[[\\/]]* ) ;;
+  * ) as_dir=`(cd "$as_dir" && pwd) 2>/dev/null` ;;
+esac
+case $PATH_SEPARATOR$at_new_path$PATH_SEPARATOR in
+  *$PATH_SEPARATOR$as_dir$PATH_SEPARATOR*) ;;
+  $PATH_SEPARATOR$PATH_SEPARATOR) at_new_path=$as_dir ;;
+  *) AS_VAR_APPEND([at_new_path], ["$PATH_SEPARATOR$as_dir"]) ;;
+esac])
+PATH=$at_new_path
+export PATH
+
+# Setting up the FDs.
+m4_define([AS_MESSAGE_LOG_FD], [5])
+dnl The parent needs two fds to the same fifo, otherwise, there is a race
+dnl where the parent can read the fifo before a child opens it for writing
+m4_define([AT_JOB_FIFO_IN_FD], [6])
+m4_define([AT_JOB_FIFO_OUT_FD], [7])
+[#] AS_MESSAGE_LOG_FD is the log file.  Not to be overwritten if `-d'.
+if $at_debug_p; then
+  at_suite_log=/dev/null
+else
+  : >"$at_suite_log"
+fi
+exec AS_MESSAGE_LOG_FD>>"$at_suite_log"
+
+# Banners and logs.
+AS_BOX(m4_defn([AT_TESTSUITE_NAME])[.])
+{
+  AS_BOX(m4_defn([AT_TESTSUITE_NAME])[.])
+  echo
+
+  AS_ECHO(["$as_me: command line was:"])
+  AS_ECHO(["  \$ $[0] $at_cli_args"])
+  echo
+
+  # If ChangeLog exists, list a few lines in case it might help determining
+  # the exact version.
+  if test -n "$at_top_srcdir" && test -f "$at_top_srcdir/ChangeLog"; then
+    AS_BOX([ChangeLog.])
+    echo
+    sed 's/^/| /;10q' "$at_top_srcdir/ChangeLog"
+    echo
+  fi
+
+  AS_UNAME
+  echo
+
+  # Contents of the config files.
+  for at_file in atconfig atlocal
+  do
+    test -r $at_file || continue
+    AS_ECHO(["$as_me: $at_file:"])
+    sed 's/^/| /' $at_file
+    echo
+  done
+} >&AS_MESSAGE_LOG_FD
+
+m4_divert_pop([TESTS_BEGIN])dnl
+m4_divert_push([PREPARE_TESTS])dnl
+{
+  AS_BOX([Tested programs.])
+  echo
+} >&AS_MESSAGE_LOG_FD
+
+# Report what programs are being tested.
+for at_program in : $at_tested
+do
+  test "$at_program" = : && continue
+  case $at_program in
+    [[\\/]* | ?:[\\/]* ) $at_program_=$at_program ;;]
+    * )
+    _AS_PATH_WALK([$PATH], [test -f "$as_dir/$at_program" && break])
+    at_program_=$as_dir/$at_program ;;
+  esac
+  if test -f "$at_program_"; then
+    {
+      AS_ECHO(["$at_srcdir/AT_LINE: $at_program_ --version"])
+      "$at_program_" --version </dev/null
+      echo
+    } >&AS_MESSAGE_LOG_FD 2>&1
+  else
+    AS_ERROR([cannot find $at_program])
+  fi
+done
+
+{
+  AS_BOX([Running the tests.])
+} >&AS_MESSAGE_LOG_FD
+
+at_start_date=`date`
+at_start_time=`date +%s 2>/dev/null`
+AS_ECHO(["$as_me: starting at: $at_start_date"]) >&AS_MESSAGE_LOG_FD
+m4_divert_pop([PREPARE_TESTS])dnl
+m4_divert_push([TESTS])dnl
+
+# Create the master directory if it doesn't already exist.
+AS_MKDIR_P(["$at_suite_dir"]) ||
+  AS_ERROR([cannot create `$at_suite_dir'])
+
+# Can we diff with `/dev/null'?  DU 5.0 refuses.
+if diff /dev/null /dev/null >/dev/null 2>&1; then
+  at_devnull=/dev/null
+else
+  at_devnull=$at_suite_dir/devnull
+  >"$at_devnull"
+fi
+
+# Use `diff -u' when possible.
+if at_diff=`diff -u "$at_devnull" "$at_devnull" 2>&1` && test -z "$at_diff"
+then
+  at_diff='diff -u'
+else
+  at_diff=diff
+fi
+
+# Get the last needed group.
+for at_group in : $at_groups; do :; done
+
+# Extract the start and end lines of each test group at the tail
+# of this file
+awk '
+BEGIN { FS="" }
+/^@%:@AT_START_/ {
+  start = NR
+}
+/^@%:@AT_STOP_/ {
+  test = substr ($ 0, 10)
+  print "at_sed" test "=\"1," start "d;" (NR-1) "q\""
+  if (test == "'"$at_group"'") exit
+}' "$at_myself" > "$at_suite_dir/at-source-lines" &&
+. "$at_suite_dir/at-source-lines" ||
+  AS_ERROR([cannot create test line number cache])
+rm -f "$at_suite_dir/at-source-lines"
+
+# Set number of jobs for `-j'; avoid more jobs than test groups.
+set X $at_groups; shift; at_max_jobs=$[@%:@]
+if test $at_max_jobs -eq 0; then
+  at_jobs=1
+fi
+if test $at_jobs -ne 1 &&
+   { test $at_jobs -eq 0 || test $at_jobs -gt $at_max_jobs; }; then
+  at_jobs=$at_max_jobs
+fi
+
+# If parallel mode, don't output banners, don't split summary lines.
+if test $at_jobs -ne 1; then
+  at_print_banners=false
+  at_quiet=:
+fi
+
+# Set up helper dirs.
+rm -rf "$at_helper_dir" &&
+mkdir "$at_helper_dir" &&
+cd "$at_helper_dir" &&
+{ test -z "$at_groups" || mkdir $at_groups; } ||
+AS_ERROR([testsuite directory setup failed])
+
+# Functions for running a test group.  We leave the actual
+# test group execution outside of a shell function in order
+# to avoid hitting zsh 4.x exit status bugs.
+
+AS_FUNCTION_DESCRIBE([at_fn_group_prepare], [],
+[Prepare for running a test group.])
+at_fn_group_prepare ()
+{
+  # The directory for additional per-group helper files.
+  at_job_dir=$at_helper_dir/$at_group
+  # The file containing the location of the last AT_CHECK.
+  at_check_line_file=$at_job_dir/check-line
+  # The file containing the exit status of the last command.
+  at_status_file=$at_job_dir/status
+  # The files containing the output of the tested commands.
+  at_stdout=$at_job_dir/stdout
+  at_stder1=$at_job_dir/stder1
+  at_stderr=$at_job_dir/stderr
+  # The file containing the code for a test group.
+  at_test_source=$at_job_dir/test-source
+  # The file containing dates.
+  at_times_file=$at_job_dir/times
+
+  # Be sure to come back to the top test directory.
+  cd "$at_suite_dir"
+
+  # Clearly separate the test groups when verbose.
+  $at_first || $at_verbose echo
+
+  at_group_normalized=$at_group
+  _AT_NORMALIZE_TEST_GROUP_NUMBER(at_group_normalized)
+
+  # Create a fresh directory for the next test group, and enter.
+  # If one already exists, the user may have invoked ./run from
+  # within that directory; we remove the contents, but not the
+  # directory itself, so that we aren't pulling the rug out from
+  # under the shell's notion of the current directory.
+  at_group_dir=$at_suite_dir/$at_group_normalized
+  at_group_log=$at_group_dir/$as_me.log
+  _AS_CLEAN_DIR("$at_group_dir") ||
+    AS_WARN([test directory for $at_group_normalized could not be cleaned])
+  # Be tolerant if the above `rm' was not able to remove the directory.
+  AS_MKDIR_P(["$at_group_dir"])
+
+  echo 0 > "$at_status_file"
+
+  # In verbose mode, append to the log file *and* show on
+  # the standard output; in quiet mode only write to the log.
+  if test -z "$at_verbose"; then
+    at_tee_pipe='tee -a "$at_group_log"'
+  else
+    at_tee_pipe='cat >> "$at_group_log"'
+  fi
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_group_banner], [[ORDINAL LINE DESC PAD [BANNER]]],
+[Declare the test group ORDINAL, located at LINE with group description
+DESC, and residing under BANNER.  Use PAD to align the status column.])
+at_fn_group_banner ()
+{
+  at_setup_line="$[2]"
+  test -n "$[5]" && at_fn_banner $[5]
+  at_desc="$[3]"
+  case $[1] in
+    [[0-9]])      at_desc_line="  $[1]: ";;
+    [[0-9][0-9]]) at_desc_line=" $[1]: " ;;
+    [*])          at_desc_line="$[1]: "  ;;
+  esac
+  AS_VAR_APPEND([at_desc_line], ["$[3]$[4]"])
+  $at_quiet AS_ECHO_N(["$at_desc_line"])
+  echo "#                             -*- compilation -*-" >> "$at_group_log"
+}
+
+AS_FUNCTION_DESCRIBE([at_fn_group_postprocess], [],
+[Perform cleanup after running a test group.])
+at_fn_group_postprocess ()
+{
+  # Be sure to come back to the suite directory, in particular
+  # since below we might `rm' the group directory we are in currently.
+  cd "$at_suite_dir"
+
+  if test ! -f "$at_check_line_file"; then
+    sed "s/^ */$as_me: WARNING: /" <<_ATEOF
+      A failure happened in a test group before any test could be
+      run. This means that test suite is improperly designed.  Please
+      report this failure to <AT_PACKAGE_BUGREPORT>.
+_ATEOF
+    AS_ECHO(["$at_setup_line"]) >"$at_check_line_file"
+    at_status=99
+  fi
+  $at_verbose AS_ECHO_N(["$at_group. $at_setup_line: "])
+  AS_ECHO_N(["$at_group. $at_setup_line: "]) >> "$at_group_log"
+  case $at_xfail:$at_status in
+    yes:0)
+	at_msg="UNEXPECTED PASS"
+	at_res=xpass
+	at_errexit=$at_errexit_p
+	at_color=$at_red
+	;;
+    no:0)
+	at_msg="ok"
+	at_res=pass
+	at_errexit=false
+	at_color=$at_grn
+	;;
+    *:77)
+	at_msg='skipped ('`cat "$at_check_line_file"`')'
+	at_res=skip
+	at_errexit=false
+	at_color=$at_blu
+	;;
+    no:* | *:99)
+	at_msg='FAILED ('`cat "$at_check_line_file"`')'
+	at_res=fail
+	at_errexit=$at_errexit_p
+	at_color=$at_red
+	;;
+    yes:*)
+	at_msg='expected failure ('`cat "$at_check_line_file"`')'
+	at_res=xfail
+	at_errexit=false
+	at_color=$at_lgn
+	;;
+  esac
+  echo "$at_res" > "$at_job_dir/$at_res"
+  # In parallel mode, output the summary line only afterwards.
+  if test $at_jobs -ne 1 && test -n "$at_verbose"; then
+    AS_ECHO(["$at_desc_line $at_color$at_msg$at_std"])
+  else
+    # Make sure there is a separator even with long titles.
+    AS_ECHO([" $at_color$at_msg$at_std"])
+  fi
+  at_log_msg="$at_group. $at_desc ($at_setup_line): $at_msg"
+  case $at_status in
+    0|77)
+      # $at_times_file is only available if the group succeeded.
+      # We're not including the group log, so the success message
+      # is written in the global log separately.  But we also
+      # write to the group log in case they're using -d.
+      if test -f "$at_times_file"; then
+	at_log_msg="$at_log_msg     ("`sed 1d "$at_times_file"`')'
+	rm -f "$at_times_file"
+      fi
+      AS_ECHO(["$at_log_msg"]) >> "$at_group_log"
+      AS_ECHO(["$at_log_msg"]) >&AS_MESSAGE_LOG_FD
+
+      # Cleanup the group directory, unless the user wants the files
+      # or the success was unexpected.
+      if $at_debug_p || test $at_res = xpass; then
+	at_fn_create_debugging_script
+	if test $at_res = xpass && $at_errexit; then
+	  echo stop > "$at_stop_file"
+	fi
+      else
+	if test -d "$at_group_dir"; then
+	  find "$at_group_dir" -type d ! -perm -700 -exec chmod u+rwx \{\} \;
+	  rm -fr "$at_group_dir"
+	fi
+	rm -f "$at_test_source"
+      fi
+      ;;
+    *)
+      # Upon failure, include the log into the testsuite's global
+      # log.  The failure message is written in the group log.  It
+      # is later included in the global log.
+      AS_ECHO(["$at_log_msg"]) >> "$at_group_log"
+
+      # Upon failure, keep the group directory for autopsy, and create
+      # the debugging script.  With -e, do not start any further tests.
+      at_fn_create_debugging_script
+      if $at_errexit; then
+	echo stop > "$at_stop_file"
+      fi
+      ;;
+  esac
+}
+
+
+m4_text_box([Driver loop.])
+
+dnl Catching signals correctly:
+dnl
+dnl The first idea was: trap the signal, send it to all spawned jobs,
+dnl then reset the handler and reraise the signal for ourselves.
+dnl However, before exiting, ksh will then send the signal to all
+dnl process group members, potentially killing the outer testsuite
+dnl and/or the 'make' process driving us.
+dnl So now the strategy is: trap the signal, send it to all spawned jobs,
+dnl then exit the script with the right status.
+dnl
+dnl In order to let the jobs know about the signal, we cannot just send it
+dnl to the current process group (kill $SIG 0), for the same reason as above.
+dnl Also, it does not reliably stop the suite to send the signal to the
+dnl spawned processes, because they might not transport it further
+dnl (maybe this can be fixed?).
+dnl
+dnl So what we do is enable shell job control if available, which causes the
+dnl shell to start each parallel task as its own shell job, thus as a new
+dnl process group leader.  We then send the signal to all new process groups.
+
+dnl Do we have job control?
+if (set -m && set +m && set +b) >/dev/null 2>&1; then
+  set +b
+  at_job_control_on='set -m' at_job_control_off='set +m' at_job_group=-
+else
+  at_job_control_on=: at_job_control_off=: at_job_group=
+fi
+
+for at_signal in 1 2 15; do
+dnl This signal handler is not suitable for PIPE: it causes writes.
+dnl The code that was interrupted may have the errexit, monitor, or xtrace
+dnl flags enabled, so sanitize.
+  trap 'set +x; set +e
+	$at_job_control_off
+	at_signal='"$at_signal"'
+dnl Safety belt: even with runaway processes, prevent starting new jobs.
+	echo stop > "$at_stop_file"
+dnl Do not enter this area multiple times, do not kill self prematurely.
+	trap "" $at_signal
+dnl Gather process group IDs of currently running jobs.
+	at_pgids=
+	for at_pgid in `jobs -p 2>/dev/null`; do
+	  at_pgids="$at_pgids $at_job_group$at_pgid"
+	done
+dnl Ignore `kill' errors, as some jobs may have finished in the meantime.
+	test -z "$at_pgids" || kill -$at_signal $at_pgids 2>/dev/null
+dnl wait until all jobs have exited.
+	wait
+dnl Status output.  Do this after waiting for the jobs, for ordered output.
+dnl Avoid scribbling onto the end of a possibly incomplete line.
+	if test "$at_jobs" -eq 1 || test -z "$at_verbose"; then
+	  echo >&2
+	fi
+	at_signame=`kill -l $at_signal 2>&1 || echo $at_signal`
+	set x $at_signame
+	test $# -gt 2 && at_signame=$at_signal
+	AS_WARN([caught signal $at_signame, bailing out])
+dnl Do not reinstall the default handler here and reraise the signal to
+dnl let the default handler do its job, see the note about ksh above.
+dnl	trap - $at_signal
+dnl	kill -$at_signal $$
+dnl Instead, exit with appropriate status.
+	AS_VAR_ARITH([exit_status], [128 + $at_signal])
+	AS_EXIT([$exit_status])' $at_signal
+done
+
+rm -f "$at_stop_file"
+at_first=:
+
+if test $at_jobs -ne 1 &&
+     rm -f "$at_job_fifo" &&
+     test -n "$at_job_group" &&
+     ( mkfifo "$at_job_fifo" && trap 'exit 1' PIPE STOP TSTP ) 2>/dev/null
+then
+  # FIFO job dispatcher.
+
+dnl Since we use job control, we need to propagate TSTP.
+dnl This handler need not be used for serial execution.
+dnl Again, we should stop all processes in the job groups, otherwise
+dnl the stopping will not be effective while one test group is running.
+dnl Apparently ksh does not honor the TSTP trap.
+dnl As a safety measure, not use the same variable names as in the
+dnl termination handlers above, one might get called during execution
+dnl of the other.
+  trap 'at_pids=
+	for at_pid in `jobs -p`; do
+	  at_pids="$at_pids $at_job_group$at_pid"
+	done
+dnl Send it to all spawned jobs, ignoring those finished meanwhile.
+	if test -n "$at_pids"; then
+dnl Unfortunately, ksh93 fork-bombs when we send TSTP, so send STOP
+dnl if this might be ksh (STOP prevents possible TSTP handlers inside
+dnl AT_CHECKs from running).  Then stop ourselves.
+	  at_sig=TSTP
+	  test "${TMOUT+set}" = set && at_sig=STOP
+	  kill -$at_sig $at_pids 2>/dev/null
+	fi
+	kill -STOP $$
+dnl We got a CONT, so let's go again.  Passing this to all processes
+dnl in the groups is necessary (because we stopped them), but it may
+dnl cause changed test semantics; e.g., a sleep will be interrupted.
+	test -z "$at_pids" || kill -CONT $at_pids 2>/dev/null' TSTP
+
+  echo
+  # Turn jobs into a list of numbers, starting from 1.
+  at_joblist=`AS_ECHO(["$at_groups"]) | sed -n 1,${at_jobs}p`
+
+  set X $at_joblist
+  shift
+  for at_group in $at_groups; do
+dnl Enable job control only for spawning the test group:
+dnl Let the jobs to run in separate process groups, but
+dnl avoid all the status output by the shell.
+    $at_job_control_on 2>/dev/null
+    (
+      # Start one test group.
+      $at_job_control_off
+dnl First child must open the fifo to avoid blocking parent; all other
+dnl children inherit it already opened from the parent.
+      if $at_first; then
+	exec AT_JOB_FIFO_OUT_FD>"$at_job_fifo"
+      else
+dnl Children do not need parent's copy of fifo.
+	exec AT_JOB_FIFO_IN_FD<&-
+      fi
+dnl When a child receives PIPE, be sure to write back the token,
+dnl so the master does not hang waiting for it.
+dnl errexit and xtrace should not be set in this shell instance,
+dnl except as debug measures.  However, shells such as dash may
+dnl optimize away the _AT_CHECK subshell, so normalize here.
+      trap 'set +x; set +e
+dnl Ignore PIPE signals that stem from writing back the token.
+	    trap "" PIPE
+	    echo stop > "$at_stop_file"
+	    echo >&AT_JOB_FIFO_OUT_FD
+dnl Do not reraise the default PIPE handler.
+dnl It wreaks havoc with ksh, see above.
+dnl	    trap - 13
+dnl	    kill -13 $$
+	    AS_EXIT([141])' PIPE
+      at_fn_group_prepare
+      if cd "$at_group_dir" &&
+	 at_fn_test $at_group &&
+	 . "$at_test_source"
+      then :; else
+	AS_WARN([unable to parse test group: $at_group])
+	at_failed=:
+      fi
+      at_fn_group_postprocess
+      echo >&AT_JOB_FIFO_OUT_FD
+    ) &
+    $at_job_control_off
+    if $at_first; then
+      at_first=false
+      exec AT_JOB_FIFO_IN_FD<"$at_job_fifo" AT_JOB_FIFO_OUT_FD>"$at_job_fifo"
+    fi
+    shift # Consume one token.
+    if test $[@%:@] -gt 0; then :; else
+      read at_token <&AT_JOB_FIFO_IN_FD || break
+      set x $[*]
+    fi
+    test -f "$at_stop_file" && break
+  done
+  exec AT_JOB_FIFO_OUT_FD>&-
+  # Read back the remaining ($at_jobs - 1) tokens.
+  set X $at_joblist
+  shift
+  if test $[@%:@] -gt 0; then
+    shift
+    for at_job
+    do
+      read at_token
+    done <&AT_JOB_FIFO_IN_FD
+  fi
+  exec AT_JOB_FIFO_IN_FD<&-
+  wait
+else
+  # Run serially, avoid forks and other potential surprises.
+  for at_group in $at_groups; do
+    at_fn_group_prepare
+    if cd "$at_group_dir" &&
+       at_fn_test $at_group &&
+       . "$at_test_source"; then :; else
+      AS_WARN([unable to parse test group: $at_group])
+      at_failed=:
+    fi
+    at_fn_group_postprocess
+    test -f "$at_stop_file" && break
+    at_first=false
+  done
+fi
+
+# Wrap up the test suite with summary statistics.
+cd "$at_helper_dir"
+
+# Use ?..???? when the list must remain sorted, the faster * otherwise.
+at_pass_list=`for f in */pass; do echo $f; done | sed '/\*/d; s,/pass,,'`
+at_skip_list=`for f in */skip; do echo $f; done | sed '/\*/d; s,/skip,,'`
+at_xfail_list=`for f in */xfail; do echo $f; done | sed '/\*/d; s,/xfail,,'`
+at_xpass_list=`for f in ?/xpass ??/xpass ???/xpass ????/xpass; do
+		 echo $f; done | sed '/?/d; s,/xpass,,'`
+at_fail_list=`for f in ?/fail ??/fail ???/fail ????/fail; do
+		echo $f; done | sed '/?/d; s,/fail,,'`
+
+set X $at_pass_list $at_xpass_list $at_xfail_list $at_fail_list $at_skip_list
+shift; at_group_count=$[@%:@]
+set X $at_xpass_list; shift; at_xpass_count=$[@%:@]; at_xpass_list=$[*]
+set X $at_xfail_list; shift; at_xfail_count=$[@%:@]
+set X $at_fail_list; shift; at_fail_count=$[@%:@]; at_fail_list=$[*]
+set X $at_skip_list; shift; at_skip_count=$[@%:@]
+
+AS_VAR_ARITH([at_run_count], [$at_group_count - $at_skip_count])
+AS_VAR_ARITH([at_unexpected_count], [$at_xpass_count + $at_fail_count])
+AS_VAR_ARITH([at_total_fail_count], [$at_xfail_count + $at_fail_count])
+
+# Back to the top directory.
+cd "$at_dir"
+rm -rf "$at_helper_dir"
+
+# Compute the duration of the suite.
+at_stop_date=`date`
+at_stop_time=`date +%s 2>/dev/null`
+AS_ECHO(["$as_me: ending at: $at_stop_date"]) >&AS_MESSAGE_LOG_FD
+case $at_start_time,$at_stop_time in
+  [[0-9]*,[0-9]*])
+    AS_VAR_ARITH([at_duration_s], [$at_stop_time - $at_start_time])
+    AS_VAR_ARITH([at_duration_m], [$at_duration_s / 60])
+    AS_VAR_ARITH([at_duration_h], [$at_duration_m / 60])
+    AS_VAR_ARITH([at_duration_s], [$at_duration_s % 60])
+    AS_VAR_ARITH([at_duration_m], [$at_duration_m % 60])
+    at_duration="${at_duration_h}h ${at_duration_m}m ${at_duration_s}s"
+    AS_ECHO(["$as_me: test suite duration: $at_duration"]) >&AS_MESSAGE_LOG_FD
+    ;;
+esac
+
+echo
+AS_BOX([Test results.])
+echo
+{
+  echo
+  AS_BOX([Test results.])
+  echo
+} >&AS_MESSAGE_LOG_FD
+
+dnl
+dnl FIXME: this code is as far from i18n-cleanness as man
+dnl could imagine...
+dnl
+if test $at_run_count = 1; then
+  at_result="1 test"
+  at_were=was
+else
+  at_result="$at_run_count tests"
+  at_were=were
+fi
+if $at_errexit_p && test $at_unexpected_count != 0; then
+  if test $at_xpass_count = 1; then
+    at_result="$at_result $at_were run, one passed"
+  else
+    at_result="$at_result $at_were run, one failed"
+  fi
+  at_result="$at_result unexpectedly and inhibited subsequent tests."
+  at_color=$at_red
+else
+  # Don't you just love exponential explosion of the number of cases?
+  at_color=$at_red
+  case $at_xpass_count:$at_fail_count:$at_xfail_count in
+    # So far, so good.
+    0:0:0) at_result="$at_result $at_were successful." at_color=$at_grn ;;
+    0:0:*) at_result="$at_result behaved as expected." at_color=$at_lgn ;;
+
+    # Some unexpected failures
+    0:*:0) at_result="$at_result $at_were run,
+$at_fail_count failed unexpectedly." ;;
+
+    # Some failures, both expected and unexpected
+    0:*:1) at_result="$at_result $at_were run,
+$at_total_fail_count failed ($at_xfail_count expected failure)." ;;
+    0:*:*) at_result="$at_result $at_were run,
+$at_total_fail_count failed ($at_xfail_count expected failures)." ;;
+
+    # No unexpected failures, but some xpasses
+    *:0:*) at_result="$at_result $at_were run,
+$at_xpass_count passed unexpectedly." ;;
+
+    # No expected failures, but failures and xpasses
+    *:1:0) at_result="$at_result $at_were run,
+$at_unexpected_count did not behave as expected dnl
+($at_fail_count unexpected failure)." ;;
+    *:*:0) at_result="$at_result $at_were run,
+$at_unexpected_count did not behave as expected dnl
+($at_fail_count unexpected failures)." ;;
+
+    # All of them.
+    *:*:1) at_result="$at_result $at_were run,
+$at_xpass_count passed unexpectedly,
+$at_total_fail_count failed ($at_xfail_count expected failure)." ;;
+    *:*:*) at_result="$at_result $at_were run,
+$at_xpass_count passed unexpectedly,
+$at_total_fail_count failed ($at_xfail_count expected failures)." ;;
+  esac
+
+  if test $at_skip_count = 0 && test $at_run_count -gt 1; then
+    at_result="All $at_result"
+  fi
+fi
+
+# Now put skips in the mix.
+case $at_skip_count in
+  0) ;;
+  1) at_result="$at_result
+1 test was skipped." ;;
+  *) at_result="$at_result
+$at_skip_count tests were skipped." ;;
+esac
+
+if test $at_unexpected_count = 0; then
+  echo "$at_color$at_result$at_std"
+  echo "$at_result" >&AS_MESSAGE_LOG_FD
+else
+  echo "${at_color}ERROR: $at_result$at_std" >&2
+  echo "ERROR: $at_result" >&AS_MESSAGE_LOG_FD
+  {
+    echo
+    AS_BOX([Summary of the failures.])
+
+    # Summary of failed and skipped tests.
+    if test $at_fail_count != 0; then
+      echo "Failed tests:"
+      $SHELL "$at_myself" $at_fail_list --list
+      echo
+    fi
+    if test $at_skip_count != 0; then
+      echo "Skipped tests:"
+      $SHELL "$at_myself" $at_skip_list --list
+      echo
+    fi
+    if test $at_xpass_count != 0; then
+      echo "Unexpected passes:"
+      $SHELL "$at_myself" $at_xpass_list --list
+      echo
+    fi
+    if test $at_fail_count != 0; then
+      AS_BOX([Detailed failed tests.])
+      echo
+      for at_group in $at_fail_list
+      do
+	at_group_normalized=$at_group
+	_AT_NORMALIZE_TEST_GROUP_NUMBER(at_group_normalized)
+	cat "$at_suite_dir/$at_group_normalized/$as_me.log"
+	echo
+      done
+      echo
+    fi
+    if test -n "$at_top_srcdir"; then
+      AS_BOX([${at_top_build_prefix}config.log])
+      sed 's/^/| /' ${at_top_build_prefix}config.log
+      echo
+    fi
+  } >&AS_MESSAGE_LOG_FD
+
+  AS_BOX([$as_me.log was created.])
+
+  echo
+  if $at_debug_p; then
+    at_msg='per-test log files'
+  else
+    at_msg="\`${at_testdir+${at_testdir}/}$as_me.log'"
+  fi
+  AS_ECHO(["Please send $at_msg and all information you think might help:
+
+   To: <AT_PACKAGE_BUGREPORT>
+   Subject: @<:@AT_PACKAGE_STRING@:>@ $as_me: dnl
+$at_fail_list${at_fail_list:+ failed${at_xpass_list:+, }}dnl
+$at_xpass_list${at_xpass_list:+ passed unexpectedly}
+
+You may investigate any problem if you feel able to do so, in which
+case the test suite provides a good starting point.  Its output may
+be found below \`${at_testdir+${at_testdir}/}$as_me.dir'.
+"])
+  exit 1
+fi
+
+exit 0
+
+m4_text_box([Actual tests.])
+m4_divert_pop([TESTS])dnl
+dnl End of AT_INIT: divert to KILL, only test groups are to be
+dnl output, the rest is ignored.  Current diversion is BODY, inherited
+dnl from M4sh.
+m4_divert([KILL])
+])# AT_INIT
+
+
+# _AT_ARG_OPTION(OPTIONS,HELP-TEXT,[ARGS],[ACTION-IF-GIVEN],
+#                [ACTION-IF-NOT-GIVEN])
+# ----------------------------------------------------------
+# Internal implementation of AT_ARG_OPTION & AT_ARG_OPTION_ARG
+m4_defun([_AT_ARG_OPTION],
+[m4_divert_once([HELP_OTHER],
+[cat <<_ATEOF || at_write_fail=1
+
+Other options:
+_ATEOF
+])dnl m4_divert_once HELP_OTHER
+m4_divert_text([HELP_OTHER],
+[cat <<_ATEOF || at_write_fail=1
+$2
+_ATEOF])dnl
+dnl Turn our options into our desired strings
+m4_ifdef([AT_first_option],[m4_undefine([AT_first_option])])dnl
+m4_ifdef([AT_case],[m4_undefine([AT_case])])dnl
+m4_ifdef([AT_case_no],[m4_undefine([AT_case_no])])dnl
+m4_ifdef([AT_case_arg],[m4_undefine([AT_case_arg])])dnl
+m4_foreach([AT_option], m4_split(m4_normalize([$1]),[[ \|]+]),
+[m4_define_default([AT_first_option],AT_option)dnl
+m4_define_default([AT_first_option_tr],
+		  [m4_bpatsubst(m4_defn([AT_first_option]), -, [_])])dnl
+m4_append([AT_case],m4_if(m4_len(AT_option),1,[],[-])[-]AT_option, [ | ])dnl
+m4_append([AT_case_no],[--no-]AT_option, [ | ])dnl
+m4_append([AT_case_arg],
+	  m4_if(m4_len(AT_option),1,[],[-])[-]AT_option[=*], [ | ])dnl
+])dnl m4_foreach AT_option
+dnl keep track so we or the user may process ACTION-IF-NOT-GIVEN
+m4_divert_once([PARSE_ARGS_BEGIN],
+[
+##
+## Set up package specific options.
+##
+])dnl
+m4_divert_text([PARSE_ARGS_BEGIN],
+[dnl Provide a default value for options without arguments.
+m4_ifvaln([$3],,[at_arg_[]AT_first_option_tr=false])dnl
+at_arg_given_[]AT_first_option_tr=false
+])dnl m4_divert_text DEFAULTS
+m4_divert_text([PARSE_ARGS],
+[dnl Parse the options and args when necessary.
+m4_ifvaln([$3],
+[    AT_case )
+	at_prev=--AT_first_option_tr
+	;;
+    AT_case_arg )
+	at_arg_[]AT_first_option_tr=$at_optarg
+	at_arg_given_[]AT_first_option_tr=:
+	$4
+	;;],
+[    AT_case )
+	at_optarg=:
+	at_arg_[]AT_first_option_tr=:
+	at_arg_given_[]AT_first_option_tr=:
+	m4_ifval([$4],[$4])[]dnl
+	;;
+    AT_case_no )
+	at_optarg=false
+	at_arg_[]AT_first_option_tr=false
+	at_arg_given_[]AT_first_option_tr=:
+	m4_ifval([$4],[$4])[]dnl
+	;;])dnl m4_ifvaln $3
+])dnl m4_divert_text PARSE_ARGS
+m4_ifvaln([$5],
+[m4_divert_once([PARSE_ARGS_END],
+[
+##
+## Process package specific options when _not_ supplied.
+##])dnl m4_divert_once PARSE_ARGS_END
+m4_divert_text([PARSE_ARGS_END],
+[
+AS_IF([$at_arg_given_[]AT_first_option_tr],,[$5])dnl
+])dnl m4_divert_text PARSE_ARGS_END
+])dnl m4_ifvaln $5
+])dnl _AT_ARG_OPTION
+
+
+# AT_ARG_OPTION(OPTIONS,HELP-TEXT,[ACTION-IF-GIVEN],[ACTION-IF-NOT-GIVEN])
+# ------------------------------------------------------------------------
+# Accept a list of space-separated OPTIONS, all aliases of the first one.
+# Add HELP-TEXT to the HELP_OTHER diversion.
+#
+# Leading dashes should not be passed in OPTIONS.  Users will be required
+# to pass `--' before long options and `-' before single character options.
+#
+# $at_arg_OPTION will be set to `:' if this option is received, `false' if
+# if --no-OPTION is received, and `false' by default.
+#
+# Run ACTION-IF-GIVEN each time an option in OPTIONS is encountered; here,
+# $at_optarg will be set to `:' or `false' as appropriate.  $at_optarg is
+# actually just a copy of $at_arg_OPTION.
+#
+# ACTION-IF-NOT-GIVEN will be run once after option parsing is complete and
+# if no option from OPTIONS was used.
+m4_defun([AT_ARG_OPTION],[_AT_ARG_OPTION([$1],[$2],,[$3],[$4])])
+
+
+# AT_ARG_OPTION_ARG(OPTIONS,HELP-TEXT,[ACTION-IF-GIVEN],[ACTION-IF-NOT-GIVEN])
+# ----------------------------------------------------------------------------
+# Accept a set of space-separated OPTIONS with arguments, all aliases of the
+# first one.  Add HELP-TEXT to the HELP_OTHER diversion.
+#
+# Leading dashes should not be passed in OPTIONS.  Users will be required
+# to pass `--' before long options and `-' before single character options.
+#
+# By default, any argument to these options will be assigned to the shell
+# variable $at_arg_OPTION, where OPTION is the first option in OPTIONS with
+# any `-' characters replaced with `_'.
+#
+# Run ACTION-IF-GIVEN each time an option in OPTIONS is encountered; here,
+# $at_optarg will be set to the option argument.  $at_optarg is actually just
+# a copy of $at_arg_OPTION.
+#
+# ACTION-IF-NOT-GIVEN will be run once after option parsing is complete
+# and if no option from OPTIONS was used.
+m4_defun([AT_ARG_OPTION_ARG],[_AT_ARG_OPTION([$1],[$2],1,[$3],[$4])])
+
+
+# AT_TESTED(PROGRAMS)
+# -------------------
+# Specify the list of programs exercised by the test suite.  Their
+# versions are logged, and in the case of embedded test suite, they
+# must correspond to the version of the package.  PATH should be
+# already preset so the proper executable will be selected.
+m4_define([AT_TESTED],
+[m4_append_uniq_w([AT_tested], [$1])])
+
+
+# AT_COPYRIGHT(TEXT, [FILTER = m4_newline])
+# -----------------------------------------
+# Emit TEXT, a copyright notice, in the top of the test suite and in
+# --version output.  Macros in TEXT are evaluated once.  Process
+# the --version output through FILTER (m4_newline, m4_do, and
+# m4_copyright_condense are common filters).
+m4_define([AT_COPYRIGHT],
+[AS_COPYRIGHT([$1])[]]dnl
+[m4_divert_text([VERSION_NOTICES],
+[m4_default([$2], [m4_newline])([$1])])])# AT_COPYRIGHT
+
+
+# AT_COLOR_TESTS
+# --------------
+# Enable colored test results if standard error is connected to a terminal.
+m4_define([AT_COLOR_TESTS],
+[m4_define([AT_color], [auto])])
+
+# AT_SETUP(DESCRIPTION)
+# ---------------------
+# Start a group of related tests, all to be executed in the same subshell.
+# The group is testing what DESCRIPTION says.
+_AT_DEFINE_INIT([AT_SETUP],
+[m4_ifdef([AT_ingroup], [m4_fatal([$0: nested AT_SETUP detected])],
+  [m4_define([AT_ingroup], [AS_ECHO(["$at_setup_line"]) >"$at_check_line_file"
+])])
+m4_ifdef([AT_keywords], [m4_undefine([AT_keywords])])
+m4_define([AT_capture_files], [])
+m4_define([AT_line], AT_LINE)
+m4_define([AT_xfail], [at_xfail=no])
+m4_define([AT_description], m4_expand([$1]))
+m4_define([AT_ordinal], m4_incr(AT_ordinal))
+m4_divert_push([TEST_GROUPS])dnl
+[#AT_START_]AT_ordinal
+at_fn_group_banner AT_ordinal 'm4_defn([AT_line])' \
+  "AS_ESCAPE(m4_dquote(m4_defn([AT_description])))" m4_format(["%*s"],
+  m4_max(0, m4_eval(47 - m4_qlen(m4_defn([AT_description])))), [])m4_if(
+  AT_banner_ordinal, [0], [], [ AT_banner_ordinal])
+m4_divert_push([TEST_SCRIPT])dnl
+])
+
+
+# AT_FAIL_IF(SHELL-EXPRESSION)
+# ----------------------------
+# Make the test die with hard failure if SHELL-EXPRESSION evaluates to
+# true (exitcode = 0).
+_AT_DEFINE_SETUP([AT_FAIL_IF],
+[dnl
+dnl Try to limit the amount of conditionals that we emit.
+m4_case([$1],
+      [], [],
+      [false], [],
+      [:], [_AT_CHECK_EXIT([], [99])],
+      [true], [_AT_CHECK_EXIT([], [99])],
+      [_AT_CHECK_EXIT([$1], [99])])])
+
+
+# AT_SKIP_IF(SHELL-EXPRESSION)
+# ----------------------------
+# Skip the rest of the group if SHELL-EXPRESSION evaluates to true
+# (exitcode = 0).
+_AT_DEFINE_SETUP([AT_SKIP_IF],
+[dnl
+dnl Try to limit the amount of conditionals that we emit.
+m4_case([$1],
+      [], [],
+      [false], [],
+      [:], [_AT_CHECK_EXIT([], [77])],
+      [true], [_AT_CHECK_EXIT([], [77])],
+      [_AT_CHECK_EXIT([$1], [77])])])
+
+
+# AT_XFAIL_IF(SHELL-EXPRESSION)
+# -----------------------------
+# Set up the test to be expected to fail if SHELL-EXPRESSION evaluates to
+# true (exitcode = 0).
+_AT_DEFINE_SETUP([AT_XFAIL_IF],
+[dnl
+dnl Try to limit the amount of conditionals that we emit.
+m4_case([$1],
+      [], [],
+      [false], [],
+      [:], [m4_define([AT_xfail], [at_xfail=yes])],
+      [true], [m4_define([AT_xfail], [at_xfail=yes])],
+      [m4_append([AT_xfail], [
+      $1 && at_xfail=yes])])])
+
+
+# AT_KEYWORDS(KEYWORDS)
+# ---------------------
+# Declare a list of keywords associated to the current test group.
+# Since the -k option is case-insensitive, the list is stored in lower case
+# to avoid duplicates that differ only by case.
+_AT_DEFINE_SETUP([AT_KEYWORDS],
+[m4_append_uniq_w([AT_keywords], m4_tolower(_m4_expand([$1
+])))])
+
+
+# AT_CAPTURE_FILE(FILE)
+# ---------------------
+# If the current test group does not behave as expected, save the contents of
+# FILE in the test suite log.
+_AT_DEFINE_SETUP([AT_CAPTURE_FILE],
+[m4_append_uniq([AT_capture_files], ["$1"], [ \
+])])
+
+
+# AT_CLEANUP
+# ----------
+# Complete a group of related tests.
+_AT_DEFINE_INIT([AT_CLEANUP],
+[m4_ifdef([AT_ingroup], [AT_ingroup[]_m4_undefine([AT_ingroup])],
+  [m4_fatal([$0: missing AT_SETUP detected])])dnl
+m4_append([AT_help_all],
+m4_defn([AT_ordinal]);m4_defn([AT_line]);m4_defn([AT_description]);dnl
+m4_ifdef([AT_keywords], [m4_defn([AT_keywords])]);
+)dnl
+m4_divert_pop([TEST_SCRIPT])dnl Back to TEST_GROUPS
+AT_xfail
+(
+  AS_ECHO(["AT_ordinal. $at_setup_line: testing $at_desc ..."])
+  $at_traceon
+m4_undivert([TEST_SCRIPT])dnl Insert the code here
+  set +x
+  $at_times_p && times >"$at_times_file"
+) AS_MESSAGE_LOG_FD>&1 2>&1 AT_JOB_FIFO_OUT_FD>&- | eval $at_tee_pipe
+read at_status <"$at_status_file"
+[#AT_STOP_]AT_ordinal
+m4_divert_pop([TEST_GROUPS])dnl Back to KILL.
+])# AT_CLEANUP
+
+
+# AT_BANNER([TEXT])
+# -----------------
+# Start a category of related test groups.  If multiple groups are executed,
+# output TEXT as a banner without any shell expansion, prior to any test
+# from the category.  If TEXT is empty, no banner is printed.
+_AT_DEFINE_INIT([AT_BANNER],
+[m4_ifdef([AT_ingroup], [m4_fatal([$0: nested AT_SETUP detected])])dnl
+m4_define([AT_banner_ordinal], m4_incr(AT_banner_ordinal))
+m4_divert_text([BANNERS],
+[@%:@ Banner AT_banner_ordinal. AT_LINE
+@%:@ Category starts at test group m4_incr(AT_ordinal).
+at_banner_text_[]AT_banner_ordinal="AS_ESCAPE([$1])"])dnl
+])# AT_BANNER
+
+
+# AT_DATA(FILE, CONTENTS)
+# -----------------------
+# Initialize an input data FILE with given CONTENTS, which should be
+# empty or end with a newline.
+# This macro is not robust to active symbols in CONTENTS *on purpose*.
+# If you don't want CONTENTS to be evaluated, quote it twice.
+_AT_DEFINE_SETUP([AT_DATA],
+[m4_if([$2], [], [: >$1],
+       [$2], [[]], [: >$1],
+[cat >$1 <<'_ATEOF'
+$2[]_ATEOF
+])])
+
+
+# AT_CHECK(COMMANDS, [STATUS = 0], STDOUT, STDERR,
+#          [RUN-IF-FAIL], [RUN-IF-PASS])
+# ------------------------------------------------
+# Execute a test by performing given shell COMMANDS.  These commands
+# should normally exit with STATUS, while producing expected STDOUT and
+# STDERR contents.  Shell metacharacters in STDOUT and STDERR are
+# _not_ processed by the shell, but are treated as string literals.
+#
+# STATUS, STDOUT, and STDERR are not checked if equal to `ignore'.
+#
+# If STDOUT is `expout', then stdout is compared to the content of the file
+# `expout'.  Likewise for STDERR and `experr'.
+#
+# If STDOUT is `stdout', then the stdout is left in the file `stdout',
+# likewise for STDERR and `stderr'.  Don't do this:
+#
+#    AT_CHECK([command >out])
+#    # Some checks on `out'
+#
+# do this instead:
+#
+#    AT_CHECK([command], [], [stdout])
+#    # Some checks on `stdout'
+#
+# You might wonder why you can't just use `ignore', then directly use stdout
+# and stderr left by the test suite:
+#
+#    AT_CHECK([command], [], [ignore])
+#    AT_CHECK([check stdout])
+#
+# If the test suite always captured data in the file `stdout', then the
+# second command would be trying to read and write from the same file, with
+# undefined behavior.  Therefore, the test suite actually captures data in
+# an internal file of a different name, and only creates `stdout' when
+# explicitly requested.
+#
+# Any line of stderr starting with leading blanks and a `+' are filtered
+# out, since most shells when tracing include subshell traces in stderr.
+# This may cause spurious failures when the test suite is run with `-x'.
+#
+_AT_DEFINE_SETUP([AT_CHECK],
+[_AT_CHECK(m4_expand([$1]), [$2], AS_ESCAPE(m4_dquote(m4_expand([$3]))),
+  AS_ESCAPE(m4_dquote(m4_expand([$4]))), [$5], [$6])])
+
+# AT_CHECK_UNQUOTED(COMMANDS, [STATUS = 0], STDOUT, STDERR,
+#                   [RUN-IF-FAIL], [RUN-IF-PASS])
+# ---------------------------------------------------------
+# Like AT_CHECK, but do not AS_ESCAPE shell metacharacters in the STDOUT
+# and STDERR arguments before running the comparison.
+_AT_DEFINE_SETUP([AT_CHECK_UNQUOTED],
+[_AT_CHECK(m4_expand([$1]), [$2], AS_ESCAPE(m4_dquote(m4_expand([$3])), [""]),
+  AS_ESCAPE(m4_dquote(m4_expand([$4])), [""]), [$5], [$6])])
+
+# AT_CHECK_NOESCAPE(COMMANDS, [STATUS = 0], STDOUT, STDERR,
+#                   [RUN-IF-FAIL], [RUN-IF-PASS])
+# ---------------------------------------------------------
+# Obsolete spelling of AT_CHECK_UNQUOTED.
+m4_define([AT_CHECK_NOESCAPE],
+[m4_warn([obsolete], [consider using AT_CHECK_UNQUOTED instead of $0])]dnl
+[_AT_CHECK(m4_expand([$1]), [$2], m4_expand([$3]),
+  m4_expand([$4]), [$5], [$6])])
+
+
+# _AT_DECIDE_TRACEABLE(COMMANDS)
+# ------------------------------
+# Worker for _AT_CHECK that expands to shell code.  If COMMANDS are safe to
+# trace with `set -x', the shell code will evaluate to true.  Otherwise,
+# the shell code will print a message stating an aspect of COMMANDS that makes
+# tracing them unsafe, and evaluate to false.
+#
+# Tracing COMMANDS is not safe if they contain a command that spans multiple
+# lines.  When the test suite user passes `-x' or `--trace', the test suite
+# precedes every command with a `set -x'.  Since most tests expect a specific
+# stderr, if only to confirm that it is empty, the test suite filters ^+ from
+# the captured stderr before comparing with the expected stderr.  If a command
+# spans multiple lines, so will its trace, but a `+' only prefixes the first
+# line of that trace:
+#
+# $ echo 'foo
+# bar'
+# => stdout
+# foo
+# bar
+# => stderr
+# + foo
+# bar
+#
+# In a subset of cases, one could filter such extended shell traces from
+# stderr.  Since test commands spanning several lines are rare, I chose
+# instead to simply not trace COMMANDS that could yield multiple trace lines.
+# Distinguishing such COMMANDS became the task at hand.
+#
+# These features may cause a shell command to span multiple lines:
+#
+# (a) A quoted literal newline.
+# Example:
+#   echo foo'
+#   'bar
+# M4 is a hostile language for the job of parsing COMMANDS to determine whether
+# each literal newline is quoted, so we simply disable tracing for all COMMANDS
+# that bear literal newlines.
+#
+# (b) A command substitution not subject to word splitting.
+# Example:
+#   var=$(printf 'foo\nbar')
+# Example:
+#   echo "`printf 'foo\\nbar`"
+# One cannot know in general the number of lines a command substitution will
+# yield without executing the substituted command.  As such, we disable tracing
+# for all COMMANDS containing these constructs.
+#
+# (c) A parameter expansion not subject to word splitting.
+# Example:
+#   var=foo'
+#   'bar
+#   echo "$var"
+# Parameter expansions appear in COMMANDS with much greater frequency than do
+# newlines and command substitutions, so disabling tracing for all such
+# COMMANDS would much more substantially devalue `testsuite -x'.  To determine
+# which parameter expansions yield multiple lines, we escape all ``', `"',
+# and `\' in a copy of COMMANDS and expand that string within double quotes
+# at runtime.  If the result of that expansion contains multiple lines, the
+# test suite disables tracing for the command in question.
+#
+# This method leads the test suite to expand some parameters that the shell
+# itself will never expand due to single-quotes or backslash escapes.  This is
+# not a problem for `$foo' expansions, which will simply yield the empty string
+# or some unrelated value.  A `${...}' expansion could actually form invalid
+# shell code, however; consider `${=foo}'.  Therefore, we disable tracing for
+# all COMMANDS containing `${...}'.  This affects few COMMANDS.
+#
+# This macro falls in a very hot path; the Autoconf test suite expands it 1640
+# times as of this writing.  To give a sense of the impact of the heuristics I
+# just described, the test suite preemptively disables tracing for 31 of those,
+# and 268 contain parameter expansions that require runtime evaluation.  The
+# balance are always safe to trace.
+m4_define([_AT_DECIDE_TRACEABLE],
+dnl Utility macro.
+dnl
+dnl Examine COMMANDS for a reason to never trace COMMANDS.
+[m4_pushdef([at_reason],
+m4_cond([m4_eval(m4_index([$1], [`]) >= 0)], [1],
+		[[a `...` command substitution]],
+	[m4_eval(m4_index([$1], [$(]) >= 0)], [1],
+		[[a $(...) command substitution]],
+	[m4_eval(m4_index([$1], [${]) >= 0)], [1],
+		[[a ${...} parameter expansion]],
+	[m4_eval(m4_index([$1], m4_newline) >= 0)], [1],
+		[[an embedded newline]],
+	[m4_eval(m4_bregexp([$1], [[^|]|[^|]]) >= 0)], [1],
+		[[a shell pipeline]],
+	[]))]dnl No reason.
+[m4_if(m4_index(_m4_defn([at_reason]), [a]), [0],]dnl
+dnl We know at build time that tracing COMMANDS is never safe.
+[[at_fn_check_prepare_notrace '_m4_defn([at_reason])'],
+       m4_index([$1], [$]), [-1],]dnl
+dnl We know at build time that tracing COMMANDS is always safe.
+[[at_fn_check_prepare_trace],]dnl
+dnl COMMANDS may contain parameter expansions; expand them at runtime.
+[[at_fn_check_prepare_dynamic "AS_ESCAPE([[$1]], [`\"])"])[]]dnl
+[_m4_popdef([at_reason])])
+
+
+# AT_DIFF_STDERR/AT_DIFF_STDOUT
+# -----------------------------
+# These are subroutines of AT_CHECK.  Using indirect dispatch is a tad
+# faster than using m4_case, and these are called very frequently.
+m4_define([AT_DIFF_STDERR(stderr)],
+	  [echo stderr:; tee stderr <"$at_stderr"])
+m4_define([AT_DIFF_STDERR(stderr-nolog)],
+	  [echo stderr captured; cp "$at_stderr" stderr])
+m4_define([AT_DIFF_STDERR(ignore)],
+	  [echo stderr:; cat "$at_stderr"])
+m4_define([AT_DIFF_STDERR(ignore-nolog)])
+m4_define([AT_DIFF_STDERR(experr)],
+	  [$at_diff experr "$at_stderr" || at_failed=:])
+m4_define([AT_DIFF_STDERR()],
+	  [at_fn_diff_devnull "$at_stderr" || at_failed=:])
+
+m4_define([AT_DIFF_STDOUT(stdout)],
+	  [echo stdout:; tee stdout <"$at_stdout"])
+m4_define([AT_DIFF_STDOUT(stdout-nolog)],
+	  [echo stdout captured; cp "$at_stdout" stdout])
+m4_define([AT_DIFF_STDOUT(ignore)],
+	  [echo stdout:; cat "$at_stdout"])
+m4_define([AT_DIFF_STDOUT(ignore-nolog)])
+m4_define([AT_DIFF_STDOUT(expout)],
+	  [$at_diff expout "$at_stdout" || at_failed=:])
+m4_define([AT_DIFF_STDOUT()],
+	  [at_fn_diff_devnull "$at_stdout" || at_failed=:])
+
+# _AT_CHECK(COMMANDS, [STATUS = 0], STDOUT, STDERR,
+#           [RUN-IF-FAIL], [RUN-IF-PASS])
+# -------------------------------------------------
+# Worker for AT_CHECK and AT_CHECK_UNQUOTED, with COMMANDS, STDOUT, and
+# STDERR pre-expanded.
+#
+# Implementation Details
+# ----------------------
+# Ideally, we would like to run
+#
+#    ( $at_traceon; COMMANDS >at-stdout 2> at-stderr )
+#
+# but we must group COMMANDS as it is not limited to a single command, and
+# then the shells will save the traces in at-stderr. So we have to filter
+# them out when checking stderr, and we must send them into the test suite's
+# stderr to honor -x properly. Since only the first line of the trace of a
+# multiline command starts with a `+', and I know of no straightforward way to
+# filter out the unadorned trace lines, we disable shell tracing entirely for
+# commands that could span multiple lines.
+#
+# Limiting COMMANDS to a single command is not good either, since then
+# the user herself would use {} or (), and then we face the same problem.
+#
+# But then, there is no point in running
+#
+#   ( $at_traceon { $1 ; } >at-stdout 2>at-stder1 )
+#
+# instead of the simpler
+#
+#  ( $at_traceon; $1 ) >at-stdout 2>at-stder1
+#
+# Note that we truncate and append to the output files, to avoid losing
+# output from multiple concurrent processes, e.g., an inner testsuite
+# with parallel jobs.
+m4_define([_AT_CHECK],
+[m4_define([AT_ingroup])]dnl
+[{ set +x
+AS_ECHO(["$at_srcdir/AT_LINE: AS_ESCAPE([[$1]])"])
+_AT_DECIDE_TRACEABLE([$1]) _AT_LINE_ESCAPED
+( $at_check_trace; [$1]
+) >>"$at_stdout" 2>>"$at_stderr"
+at_status=$? at_failed=false
+$at_check_filter
+m4_ifdef([AT_DIFF_STDERR($4)], [m4_indir([AT_DIFF_STDERR($4)])],
+  [echo >>"$at_stderr"; AS_ECHO([["$4"]]) | \
+  $at_diff - "$at_stderr" || at_failed=:])
+m4_ifdef([AT_DIFF_STDOUT($3)], [m4_indir([AT_DIFF_STDOUT($3)])],
+  [echo >>"$at_stdout"; AS_ECHO([["$3"]]) | \
+  $at_diff - "$at_stdout" || at_failed=:])
+m4_if([$2], [ignore], [at_fn_check_skip],
+  [at_fn_check_status m4_default([$2], [0])]) $at_status "$at_srcdir/AT_LINE"
+m4_ifvaln([$5$6], [AS_IF($at_failed, [$5], [$6])])]dnl
+[$at_failed && at_fn_log_failure AT_capture_files
+$at_traceon; }
+])# _AT_CHECK
+
+# _AT_CHECK_EXIT(COMMANDS, [EXIT-STATUS-IF-PASS])
+# -----------------------------------------------
+# Minimal version of _AT_CHECK for AT_SKIP_IF and AT_FAIL_IF.
+m4_define([_AT_CHECK_EXIT],
+[m4_define([AT_ingroup])]dnl
+[AS_ECHO(_AT_LINE_ESCAPED) >"$at_check_line_file"
+m4_ifval([$1], [($1) \
+  && ])at_fn_check_skip $2 "$at_srcdir/AT_LINE"])# _AT_CHECK_EXIT
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/specific.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/specific.m4
new file mode 100644
index 0000000..11eb88b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/autotest/specific.m4
@@ -0,0 +1,74 @@
+# This file is part of Autoconf.                          -*- Autoconf -*-
+# M4 macros used in running tests using third-party testing tools.
+m4_define([_AT_COPYRIGHT_YEARS],
+[Copyright (C) 2009, 2010 Free Software Foundation, Inc.])
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+
+## ------------------------ ##
+## Erlang EUnit unit tests. ##
+## ------------------------ ##
+
+# AT_CHECK_EUNIT(MODULE, SPEC, [ERLFLAGS], [RUN-IF-FAIL], [RUN-IF-PASS])
+# ----------------------------------------------------------------------
+# Check that the EUnit test specification SPEC passes. The ERLFLAGS
+# optional flags are passed to the Erlang interpreter command line to
+# execute the test. The test is executed from an automatically
+# generated Erlang module named MODULE. Each call to this macro should
+# have a distinct MODULE name within each test group, to ease
+# debugging.
+# An Erlang/OTP version which contains the eunit library must be
+# installed, in order to execute this macro in a test suite.  The ERL,
+# ERLC, and ERLCFLAGS variables must be defined in atconfig,
+# typically by using the AC_ERLANG_PATH_ERL and AC_ERLANG_PATH_ERLC
+# Autoconf macros.
+_AT_DEFINE_SETUP([AT_CHECK_EUNIT],
+[AT_SKIP_IF([test ! -f "$ERL" || test ! -f "$ERLC"])
+## A wrapper to EUnit, to exit the Erlang VM with the right exit code:
+AT_DATA([$1.erl],
+[[-module($1).
+-export([test/0, test/1]).
+test() -> test([]).
+test(Options) ->
+  TestSpec = $2,
+  ReturnValue = case code:load_file(eunit) of
+    {module, _} -> case eunit:test(TestSpec, Options) of
+        ok -> "0\n"; %% test passes
+        _  -> "1\n"  %% test fails
+      end;
+    _ -> "77\n" %% EUnit not found, test skipped
+  end,
+  file:write_file("$1.result", ReturnValue),
+  init:stop().
+]])
+AT_CHECK(["$ERLC" $ERLCFLAGS -b beam $1.erl])
+## Make EUnit verbose when testsuite is verbose:
+if test -z "$at_verbose"; then
+  at_eunit_options="verbose"
+else
+  at_eunit_options=""
+fi
+AT_CHECK(["$ERL" $3 -s $1 test $at_eunit_options -noshell], [0], [ignore], [],
+         [$4], [$5])
+AT_CAPTURE_FILE([$1.result])
+AT_CHECK([test -f "$1.result" && (exit `cat "$1.result"`)])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/foreach.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/foreach.m4
new file mode 100644
index 0000000..0109abf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/foreach.m4
@@ -0,0 +1,362 @@
+#                                                  -*- Autoconf -*-
+# This file is part of Autoconf.
+# foreach-based replacements for recursive functions.
+# Speeds up GNU M4 1.4.x by avoiding quadratic $@ recursion, but penalizes
+# GNU M4 1.6 by requiring more memory and macro expansions.
+#
+# Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Eric Blake.
+
+# In M4 1.4.x, every byte of $@ is rescanned.  This means that an
+# algorithm on n arguments that recurses with one less argument each
+# iteration will scan n * (n + 1) / 2 arguments, for O(n^2) time.  In
+# M4 1.6, this was fixed so that $@ is only scanned once, then
+# back-references are made to information stored about the scan.
+# Thus, n iterations need only scan n arguments, for O(n) time.
+# Additionally, in M4 1.4.x, recursive algorithms did not clean up
+# memory very well, requiring O(n^2) memory rather than O(n) for n
+# iterations.
+#
+# This file is designed to overcome the quadratic nature of $@
+# recursion by writing a variant of m4_foreach that uses m4_for rather
+# than $@ recursion to operate on the list.  This involves more macro
+# expansions, but avoids the need to rescan a quadratic number of
+# arguments, making these replacements very attractive for M4 1.4.x.
+# On the other hand, in any version of M4, expanding additional macros
+# costs additional time; therefore, in M4 1.6, where $@ recursion uses
+# fewer macros, these replacements actually pessimize performance.
+# Additionally, the use of $10 to mean the tenth argument violates
+# POSIX; although all versions of m4 1.4.x support this meaning, a
+# future m4 version may switch to take it as the first argument
+# concatenated with a literal 0, so the implementations in this file
+# are not future-proof.  Thus, this file is conditionally included as
+# part of m4_init(), only when it is detected that M4 probably has
+# quadratic behavior (ie. it lacks the macro __m4_version__).
+#
+# Please keep this file in sync with m4sugar.m4.
+
+# _m4_foreach(PRE, POST, IGNORED, ARG...)
+# ---------------------------------------
+# Form the common basis of the m4_foreach and m4_map macros.  For each
+# ARG, expand PRE[ARG]POST[].  The IGNORED argument makes recursion
+# easier, and must be supplied rather than implicit.
+#
+# This version minimizes the number of times that $@ is evaluated by
+# using m4_for to generate a boilerplate into _m4_f then passing $@ to
+# that temporary macro.  Thus, the recursion is done in m4_for without
+# reparsing any user input, and is not quadratic.  For an idea of how
+# this works, note that m4_foreach(i,[1,2],[i]) calls
+#   _m4_foreach([m4_define([i],],[)i],[],[1],[2])
+# which defines _m4_f:
+#   $1[$4]$2[]$1[$5]$2[]_m4_popdef([_m4_f])
+# then calls _m4_f([m4_define([i],],[)i],[],[1],[2]) for a net result:
+#   m4_define([i],[1])i[]m4_define([i],[2])i[]_m4_popdef([_m4_f]).
+m4_define([_m4_foreach],
+[m4_if([$#], [3], [],
+       [m4_pushdef([_m4_f], _m4_for([4], [$#], [1],
+   [$0_([1], [2],], [)])[_m4_popdef([_m4_f])])_m4_f($@)])])
+
+m4_define([_m4_foreach_],
+[[$$1[$$3]$$2[]]])
+
+# m4_case(SWITCH, VAL1, IF-VAL1, VAL2, IF-VAL2, ..., DEFAULT)
+# -----------------------------------------------------------
+# Find the first VAL that SWITCH matches, and expand the corresponding
+# IF-VAL.  If there are no matches, expand DEFAULT.
+#
+# Use m4_for to create a temporary macro in terms of a boilerplate
+# m4_if with final cleanup.  If $# is even, we have DEFAULT; if it is
+# odd, then rounding the last $# up in the temporary macro is
+# harmless.  For example, both m4_case(1,2,3,4,5) and
+# m4_case(1,2,3,4,5,6) result in the intermediate _m4_case being
+#   m4_if([$1],[$2],[$3],[$1],[$4],[$5],_m4_popdef([_m4_case])[$6])
+m4_define([m4_case],
+[m4_if(m4_eval([$# <= 2]), [1], [$2],
+[m4_pushdef([_$0], [m4_if(]_m4_for([2], m4_eval([($# - 1) / 2 * 2]), [2],
+     [_$0_(], [)])[_m4_popdef(
+	 [_$0])]m4_dquote($m4_eval([($# + 1) & ~1]))[)])_$0($@)])])
+
+m4_define([_m4_case_],
+[$0_([1], [$1], m4_incr([$1]))])
+
+m4_define([_m4_case__],
+[[[$$1],[$$2],[$$3],]])
+
+# m4_bmatch(SWITCH, RE1, VAL1, RE2, VAL2, ..., DEFAULT)
+# -----------------------------------------------------
+# m4 equivalent of
+#
+# if (SWITCH =~ RE1)
+#   VAL1;
+# elif (SWITCH =~ RE2)
+#   VAL2;
+# elif ...
+#   ...
+# else
+#   DEFAULT
+#
+# We build the temporary macro _m4_b:
+#   m4_define([_m4_b], _m4_defn([_m4_bmatch]))_m4_b([$1], [$2], [$3])...
+#   _m4_b([$1], [$m-1], [$m])_m4_b([], [], [$m+1]_m4_popdef([_m4_b]))
+# then invoke m4_unquote(_m4_b($@)), for concatenation with later text.
+m4_define([m4_bmatch],
+[m4_if([$#], 0, [m4_fatal([$0: too few arguments: $#])],
+       [$#], 1, [m4_fatal([$0: too few arguments: $#: $1])],
+       [$#], 2, [$2],
+       [m4_pushdef([_m4_b], [m4_define([_m4_b],
+  _m4_defn([_$0]))]_m4_for([3], m4_eval([($# + 1) / 2 * 2 - 1]),
+  [2], [_$0_(], [)])[_m4_b([], [],]m4_dquote([$]m4_eval(
+  [($# + 1) / 2 * 2]))[_m4_popdef([_m4_b]))])m4_unquote(_m4_b($@))])])
+
+m4_define([_m4_bmatch],
+[m4_if(m4_bregexp([$1], [$2]), [-1], [], [[$3]m4_define([$0])])])
+
+m4_define([_m4_bmatch_],
+[$0_([1], m4_decr([$1]), [$1])])
+
+m4_define([_m4_bmatch__],
+[[_m4_b([$$1], [$$2], [$$3])]])
+
+
+# m4_cond(TEST1, VAL1, IF-VAL1, TEST2, VAL2, IF-VAL2, ..., [DEFAULT])
+# -------------------------------------------------------------------
+# Similar to m4_if, except that each TEST is expanded when encountered.
+# If the expansion of TESTn matches the string VALn, the result is IF-VALn.
+# The result is DEFAULT if no tests passed.  This macro allows
+# short-circuiting of expensive tests, where it pays to arrange quick
+# filter tests to run first.
+#
+# m4_cond already guarantees either 3*n or 3*n + 1 arguments, 1 <= n.
+# We only have to speed up _m4_cond, by building the temporary _m4_c:
+#   m4_define([_m4_c], _m4_defn([m4_unquote]))_m4_c([m4_if(($1), [($2)],
+#   [[$3]m4_define([_m4_c])])])_m4_c([m4_if(($4), [($5)],
+#   [[$6]m4_define([_m4_c])])])..._m4_c([m4_if(($m-2), [($m-1)],
+#   [[$m]m4_define([_m4_c])])])_m4_c([[$m+1]]_m4_popdef([_m4_c]))
+# We invoke m4_unquote(_m4_c($@)), for concatenation with later text.
+m4_define([_m4_cond],
+[m4_pushdef([_m4_c], [m4_define([_m4_c],
+  _m4_defn([m4_unquote]))]_m4_for([2], m4_eval([$# / 3 * 3 - 1]), [3],
+  [$0_(], [)])[_m4_c(]m4_dquote(m4_dquote(
+  [$]m4_eval([$# / 3 * 3 + 1])))[_m4_popdef([_m4_c]))])m4_unquote(_m4_c($@))])
+
+m4_define([_m4_cond_],
+[$0_(m4_decr([$1]), [$1], m4_incr([$1]))])
+
+m4_define([_m4_cond__],
+[[_m4_c([m4_if(($$1), [($$2)], [[$$3]m4_define([_m4_c])])])]])
+
+# m4_bpatsubsts(STRING, RE1, SUBST1, RE2, SUBST2, ...)
+# ----------------------------------------------------
+# m4 equivalent of
+#
+#   $_ = STRING;
+#   s/RE1/SUBST1/g;
+#   s/RE2/SUBST2/g;
+#   ...
+#
+# m4_bpatsubsts already validated an odd number of arguments; we only
+# need to speed up _m4_bpatsubsts.  To avoid nesting, we build the
+# temporary _m4_p:
+#   m4_define([_m4_p], [$1])m4_define([_m4_p],
+#   m4_bpatsubst(m4_dquote(_m4_defn([_m4_p])), [$2], [$3]))m4_define([_m4_p],
+#   m4_bpatsubst(m4_dquote(_m4_defn([_m4_p])), [$4], [$5]))m4_define([_m4_p],...
+#   m4_bpatsubst(m4_dquote(_m4_defn([_m4_p])), [$m-1], [$m]))m4_unquote(
+#   _m4_defn([_m4_p])_m4_popdef([_m4_p]))
+m4_define([_m4_bpatsubsts],
+[m4_pushdef([_m4_p], [m4_define([_m4_p],
+  ]m4_dquote([$]1)[)]_m4_for([3], [$#], [2], [$0_(],
+  [)])[m4_unquote(_m4_defn([_m4_p])_m4_popdef([_m4_p]))])_m4_p($@)])
+
+m4_define([_m4_bpatsubsts_],
+[$0_(m4_decr([$1]), [$1])])
+
+m4_define([_m4_bpatsubsts__],
+[[m4_define([_m4_p],
+m4_bpatsubst(m4_dquote(_m4_defn([_m4_p])), [$$1], [$$2]))]])
+
+# m4_shiftn(N, ...)
+# -----------------
+# Returns ... shifted N times.  Useful for recursive "varargs" constructs.
+#
+# m4_shiftn already validated arguments; we only need to speed up
+# _m4_shiftn.  If N is 3, then we build the temporary _m4_s, defined as
+#   ,[$5],[$6],...,[$m]_m4_popdef([_m4_s])
+# before calling m4_shift(_m4_s($@)).
+m4_define([_m4_shiftn],
+[m4_if(m4_incr([$1]), [$#], [], [m4_pushdef([_m4_s],
+  _m4_for(m4_eval([$1 + 2]), [$#], [1],
+  [[,]m4_dquote($], [)])[_m4_popdef([_m4_s])])m4_shift(_m4_s($@))])])
+
+# m4_do(STRING, ...)
+# ------------------
+# This macro invokes all its arguments (in sequence, of course).  It is
+# useful for making your macros more structured and readable by dropping
+# unnecessary dnl's and have the macros indented properly.
+#
+# Here, we use the temporary macro _m4_do, defined as
+#   $1[]$2[]...[]$n[]_m4_popdef([_m4_do])
+m4_define([m4_do],
+[m4_if([$#], [0], [],
+       [m4_pushdef([_$0], _m4_for([1], [$#], [1],
+		   [$], [[[]]])[_m4_popdef([_$0])])_$0($@)])])
+
+# m4_dquote_elt(ARGS)
+# -------------------
+# Return ARGS as an unquoted list of double-quoted arguments.
+#
+# _m4_foreach to the rescue.
+m4_define([m4_dquote_elt],
+[m4_if([$#], [0], [], [[[$1]]_m4_foreach([,m4_dquote(], [)], $@)])])
+
+# m4_reverse(ARGS)
+# ----------------
+# Output ARGS in reverse order.
+#
+# Invoke _m4_r($@) with the temporary _m4_r built as
+#   [$m], [$m-1], ..., [$2], [$1]_m4_popdef([_m4_r])
+m4_define([m4_reverse],
+[m4_if([$#], [0], [], [$#], [1], [[$1]],
+[m4_pushdef([_m4_r], [[$$#]]_m4_for(m4_decr([$#]), [1], [-1],
+    [[, ]m4_dquote($], [)])[_m4_popdef([_m4_r])])_m4_r($@)])])
+
+
+# m4_map_args_pair(EXPRESSION, [END-EXPR = EXPRESSION], ARG...)
+# -------------------------------------------------------------
+# Perform a pairwise grouping of consecutive ARGs, by expanding
+# EXPRESSION([ARG1], [ARG2]).  If there are an odd number of ARGs, the
+# final argument is expanded with END-EXPR([ARGn]).
+#
+# Build the temporary macro _m4_map_args_pair, with the $2([$m+1])
+# only output if $# is odd:
+#   $1([$3], [$4])[]$1([$5], [$6])[]...$1([$m-1],
+#   [$m])[]m4_default([$2], [$1])([$m+1])[]_m4_popdef([_m4_map_args_pair])
+m4_define([m4_map_args_pair],
+[m4_if([$#], [0], [m4_fatal([$0: too few arguments: $#])],
+       [$#], [1], [m4_fatal([$0: too few arguments: $#: $1])],
+       [$#], [2], [],
+       [$#], [3], [m4_default([$2], [$1])([$3])[]],
+       [m4_pushdef([_$0], _m4_for([3],
+   m4_eval([$# / 2 * 2 - 1]), [2], [_$0_(], [)])_$0_end(
+   [1], [2], [$#])[_m4_popdef([_$0])])_$0($@)])])
+
+m4_define([_m4_map_args_pair_],
+[$0_([1], [$1], m4_incr([$1]))])
+
+m4_define([_m4_map_args_pair__],
+[[$$1([$$2], [$$3])[]]])
+
+m4_define([_m4_map_args_pair_end],
+[m4_if(m4_eval([$3 & 1]), [1], [[m4_default([$$2], [$$1])([$$3])[]]])])
+
+# m4_join(SEP, ARG1, ARG2...)
+# ---------------------------
+# Produce ARG1SEPARG2...SEPARGn.  Avoid back-to-back SEP when a given ARG
+# is the empty string.  No expansion is performed on SEP or ARGs.
+#
+# Use a self-modifying separator, since we don't know how many
+# arguments might be skipped before a separator is first printed, but
+# be careful if the separator contains $.  _m4_foreach to the rescue.
+m4_define([m4_join],
+[m4_pushdef([_m4_sep], [m4_define([_m4_sep], _m4_defn([m4_echo]))])]dnl
+[_m4_foreach([_$0([$1],], [)], $@)_m4_popdef([_m4_sep])])
+
+m4_define([_m4_join],
+[m4_if([$2], [], [], [_m4_sep([$1])[$2]])])
+
+# m4_joinall(SEP, ARG1, ARG2...)
+# ------------------------------
+# Produce ARG1SEPARG2...SEPARGn.  An empty ARG results in back-to-back SEP.
+# No expansion is performed on SEP or ARGs.
+#
+# A bit easier than m4_join.  _m4_foreach to the rescue.
+m4_define([m4_joinall],
+[[$2]m4_if(m4_eval([$# <= 2]), [1], [],
+	   [_m4_foreach([$1], [], m4_shift($@))])])
+
+# m4_list_cmp(A, B)
+# -----------------
+# Compare the two lists of integer expressions A and B.
+#
+# m4_list_cmp takes care of any side effects; we only override
+# _m4_list_cmp_raw, where we can safely expand lists multiple times.
+# First, insert padding so that both lists are the same length; the
+# trailing +0 is necessary to handle a missing list.  Next, create a
+# temporary macro to perform pairwise comparisons until an inequality
+# is found.  For example, m4_list_cmp([1], [1,2]) creates _m4_cmp as
+#   m4_if(m4_eval([($1) != ($3)]), [1], [m4_cmp([$1], [$3])],
+#         m4_eval([($2) != ($4)]), [1], [m4_cmp([$2], [$4])],
+#         [0]_m4_popdef([_m4_cmp]))
+# then calls _m4_cmp([1+0], [0*2], [1], [2+0])
+m4_define([_m4_list_cmp_raw],
+[m4_if([$1], [$2], 0,
+       [_m4_list_cmp($1+0_m4_list_pad(m4_count($1), m4_count($2)),
+		     $2+0_m4_list_pad(m4_count($2), m4_count($1)))])])
+
+m4_define([_m4_list_pad],
+[m4_if(m4_eval($1 < $2), [1],
+       [_m4_for(m4_incr([$1]), [$2], [1], [,0*])])])
+
+m4_define([_m4_list_cmp],
+[m4_pushdef([_m4_cmp], [m4_if(]_m4_for(
+   [1], m4_eval([$# >> 1]), [1], [$0_(], [,]m4_eval([$# >> 1])[)])[
+      [0]_m4_popdef([_m4_cmp]))])_m4_cmp($@)])
+
+m4_define([_m4_list_cmp_],
+[$0_([$1], m4_eval([$1 + $2]))])
+
+m4_define([_m4_list_cmp__],
+[[m4_eval([($$1) != ($$2)]), [1], [m4_cmp([$$1], [$$2])],
+]])
+
+# m4_max(EXPR, ...)
+# m4_min(EXPR, ...)
+# -----------------
+# Return the decimal value of the maximum (or minimum) in a series of
+# integer expressions.
+#
+# _m4_foreach to the rescue; we only need to replace _m4_minmax.  Here,
+# we need a temporary macro to track the best answer so far, so that
+# the foreach expression is tractable.
+m4_define([_m4_minmax],
+[m4_pushdef([_m4_best], m4_eval([$2]))_m4_foreach(
+  [m4_define([_m4_best], $1(_m4_best,], [))], m4_shift($@))]dnl
+[_m4_best[]_m4_popdef([_m4_best])])
+
+# m4_set_add_all(SET, VALUE...)
+# -----------------------------
+# Add each VALUE into SET.  This is O(n) in the number of VALUEs, and
+# can be faster than calling m4_set_add for each VALUE.
+#
+# _m4_foreach to the rescue.  If no deletions have occurred, then
+# avoid the speed penalty of m4_set_add.
+m4_define([m4_set_add_all],
+[m4_if([$#], [0], [], [$#], [1], [],
+       [m4_define([_m4_set_size($1)], m4_eval(m4_set_size([$1])
+	  + m4_len(_m4_foreach(m4_ifdef([_m4_set_cleanup($1)],
+  [[m4_set_add]], [[_$0]])[([$1],], [)], $@))))])])
+
+m4_define([_m4_set_add_all],
+[m4_ifdef([_m4_set([$1],$2)], [],
+	  [m4_define([_m4_set([$1],$2)],
+		     [1])m4_pushdef([_m4_set([$1])], [$2])-])])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4
new file mode 100644
index 0000000..13ad849
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4
@@ -0,0 +1,2149 @@
+# This file is part of Autoconf.                          -*- Autoconf -*-
+# M4 sugar for common shell constructs.
+# Requires GNU M4 and M4sugar.
+#
+# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,
+# 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Akim Demaille, Pavel Roskin, Alexandre Oliva, Lars J. Aas
+# and many other people.
+
+
+# We heavily use m4's diversions both for the initializations and for
+# required macros, because in both cases we have to issue soon in
+# output something which is discovered late.
+#
+#
+# KILL is only used to suppress output.
+#
+# - BINSH
+#   AC_REQUIRE'd #! /bin/sh line
+# - HEADER-REVISION
+#   RCS keywords etc.
+# - HEADER-COMMENT
+#   Purpose of the script etc.
+# - HEADER-COPYRIGHT
+#   Copyright notice(s)
+# - M4SH-SANITIZE
+#   M4sh's shell setup
+# - M4SH-INIT-FN
+#   M4sh initialization (shell functions)
+# - M4SH-INIT
+#   M4sh initialization (detection code)
+# - BODY
+#   The body of the script.
+
+
+# _m4_divert(DIVERSION-NAME)
+# --------------------------
+# Convert a diversion name into its number.  Otherwise, return
+# DIVERSION-NAME which is supposed to be an actual diversion number.
+# Of course it would be nicer to use m4_case here, instead of zillions
+# of little macros, but it then takes twice longer to run `autoconf'!
+m4_define([_m4_divert(BINSH)],             0)
+m4_define([_m4_divert(HEADER-REVISION)],   1)
+m4_define([_m4_divert(HEADER-COMMENT)],    2)
+m4_define([_m4_divert(HEADER-COPYRIGHT)],  3)
+m4_define([_m4_divert(M4SH-SANITIZE)],     4)
+m4_define([_m4_divert(M4SH-INIT-FN)],      5)
+m4_define([_m4_divert(M4SH-INIT)],         6)
+m4_define([_m4_divert(BODY)],           1000)
+
+# Aaarg.  Yet it starts with compatibility issues...  Libtool wants to
+# use NOTICE to insert its own LIBTOOL-INIT stuff.  People should ask
+# before diving into our internals :(
+m4_copy([_m4_divert(M4SH-INIT)], [_m4_divert(NOTICE)])
+
+
+
+## ------------------------- ##
+## 1. Sanitizing the shell.  ##
+## ------------------------- ##
+# Please maintain lexicographic sorting of this section, ignoring leading _.
+
+# AS_BOURNE_COMPATIBLE
+# --------------------
+# Try to be as Bourne and/or POSIX as possible.
+#
+# This does not set BIN_SH, due to the problems described in
+# <http://lists.gnu.org/archive/html/autoconf-patches/2006-03/msg00081.html>.
+# People who need BIN_SH should set it in their environment before invoking
+# configure; apparently this would include UnixWare, as described in
+# <http://lists.gnu.org/archive/html/bug-autoconf/2006-06/msg00025.html>.
+m4_define([AS_BOURNE_COMPATIBLE],
+[# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+_$0
+])
+
+# _AS_BOURNE_COMPATIBLE
+# ---------------------
+# This is the part of AS_BOURNE_COMPATIBLE which has to be repeated inside
+# each instance.
+m4_define([_AS_BOURNE_COMPATIBLE],
+[AS_IF([test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1],
+ [emulate sh
+  NULLCMD=:
+  [#] Pre-4.2 versions of Zsh do word splitting on ${1+"$[@]"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$[@]"}'='"$[@]"'
+  setopt NO_GLOB_SUBST],
+ [AS_CASE([`(set -o) 2>/dev/null`], [*posix*], [set -o posix])])
+])
+
+
+# _AS_CLEANUP
+# -----------
+# Expanded as the last thing before m4sugar cleanup begins.  Macros
+# may append m4sh cleanup hooks to this as appropriate.
+m4_define([_AS_CLEANUP],
+[m4_divert_text([M4SH-SANITIZE], [_AS_DETECT_BETTER_SHELL])])
+
+
+# AS_COPYRIGHT(TEXT)
+# ------------------
+# Emit TEXT, a copyright notice, as a shell comment near the top of the
+# script.  TEXT is evaluated once; to accomplish that, we do not prepend
+# `# ' but `@%:@ '.
+m4_define([AS_COPYRIGHT],
+[m4_divert_text([HEADER-COPYRIGHT],
+[m4_bpatsubst([
+$1], [^], [@%:@ ])])])
+
+
+# _AS_DETECT_EXPAND(VAR, SET)
+# ---------------------------
+# Assign the contents of VAR from the contents of SET, expanded in such
+# a manner that VAR can be passed to _AS_RUN.  In order to make
+# _AS_LINENO_WORKS operate correctly, we must specially handle the
+# first instance of $LINENO within any line being expanded (the first
+# instance is important to tests using the current shell, leaving
+# remaining instances for tests using a candidate shell).  Bash loses
+# track of line numbers if a double quote contains a newline, hence,
+# we must piece-meal the assignment of VAR such that $LINENO expansion
+# occurs in a single line.
+m4_define([_AS_DETECT_EXPAND],
+[$1="m4_bpatsubst(m4_dquote(AS_ESCAPE(_m4_expand(m4_set_contents([$2], [
+])))), [\\\$LINENO\(.*\)$], [";$1=$$1$LINENO;$1=$$1"\1])"])
+
+
+# _AS_DETECT_REQUIRED(TEST)
+# -------------------------
+# Refuse to execute under a shell that does not pass the given TEST.
+# Does not do AS_REQUIRE for the better-shell detection code.
+#
+# M4sh should never require something not required by POSIX, although
+# other clients are free to do so.
+m4_defun([_AS_DETECT_REQUIRED],
+[m4_set_add([_AS_DETECT_REQUIRED_BODY], [$1 || AS_EXIT])])
+
+
+# _AS_DETECT_SUGGESTED(TEST)
+# --------------------------
+# Prefer to execute under a shell that passes the given TEST.
+# Does not do AS_REQUIRE for the better-shell detection code.
+#
+# M4sh should never suggest something not required by POSIX, although
+# other clients are free to do so.
+m4_defun([_AS_DETECT_SUGGESTED],
+[m4_set_add([_AS_DETECT_SUGGESTED_BODY], [$1 || AS_EXIT])])
+
+
+# _AS_DETECT_SUGGESTED_PRUNE(TEST)
+# --------------------------------
+# If TEST is also a required test, remove it from the set of suggested tests.
+m4_define([_AS_DETECT_SUGGESTED_PRUNE],
+[m4_set_contains([_AS_DETECT_REQUIRED_BODY], [$1],
+		 [m4_set_remove([_AS_DETECT_SUGGESTED_BODY], [$1])])])
+
+
+# _AS_DETECT_BETTER_SHELL
+# -----------------------
+# The real workhorse for detecting a shell with the correct
+# features.
+#
+# In previous versions, we prepended /usr/posix/bin to the path, but that
+# caused a regression on OpenServer 6.0.0
+# <http://lists.gnu.org/archive/html/bug-autoconf/2006-06/msg00017.html>
+# and on HP-UX 11.11, see the failure of test 120 in
+# <http://lists.gnu.org/archive/html/bug-autoconf/2006-10/msg00003.html>
+#
+# FIXME: The code should test for the OSF bug described in
+# <http://lists.gnu.org/archive/html/autoconf-patches/2006-03/msg00081.html>.
+#
+# This code is run outside any trap 0 context, hence we can simplify AS_EXIT.
+m4_defun([_AS_DETECT_BETTER_SHELL],
+dnl Remove any tests from suggested that are also required
+[m4_set_map([_AS_DETECT_SUGGESTED_BODY], [_AS_DETECT_SUGGESTED_PRUNE])]dnl
+[m4_pushdef([AS_EXIT], [exit m4_default(]m4_dquote([$][1])[, 1)])]dnl
+[if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="AS_ESCAPE(_m4_expand([_AS_BOURNE_COMPATIBLE]))"
+  _AS_DETECT_EXPAND([as_required], [_AS_DETECT_REQUIRED_BODY])
+  _AS_DETECT_EXPAND([as_suggested], [_AS_DETECT_SUGGESTED_BODY])
+  AS_IF([_AS_RUN(["$as_required"])],
+	[as_have_required=yes],
+	[as_have_required=no])
+  AS_IF([test x$as_have_required = xyes && _AS_RUN(["$as_suggested"])],
+    [],
+    [_AS_PATH_WALK([/bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH],
+      [case $as_dir in @%:@(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     AS_IF([{ test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    _AS_RUN(["$as_required"], ["$as_shell"])],
+		   [CONFIG_SHELL=$as_shell as_have_required=yes
+		   m4_set_empty([_AS_DETECT_SUGGESTED_BODY], [break 2],
+		     [AS_IF([_AS_RUN(["$as_suggested"], ["$as_shell"])],
+			    [break 2])])])
+	   done;;
+       esac],
+      [AS_IF([{ test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      _AS_RUN(["$as_required"], ["$SHELL"])],
+	     [CONFIG_SHELL=$SHELL as_have_required=yes])])
+
+      AS_IF([test "x$CONFIG_SHELL" != x],
+	[# We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in @%:@ ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$[@]"}])
+
+dnl Unfortunately, $as_me isn't available here.
+    AS_IF([test x$as_have_required = xno],
+      [AS_ECHO(["$[]0: This script requires a shell more modern than all"])
+  AS_ECHO(["$[]0: the shells that I found on your system."])
+  if test x${ZSH_VERSION+set} = xset ; then
+    AS_ECHO(["$[]0: In particular, zsh $ZSH_VERSION has bugs and should"])
+    AS_ECHO(["$[]0: be upgraded to zsh 4.3.4 or later."])
+  else
+    AS_ECHO("m4_text_wrap([Please tell ]_m4_defn([m4_PACKAGE_BUGREPORT])
+m4_ifset([AC_PACKAGE_BUGREPORT], [m4_if(_m4_defn([m4_PACKAGE_BUGREPORT]),
+_m4_defn([AC_PACKAGE_BUGREPORT]), [], [and _m4_defn([AC_PACKAGE_BUGREPORT])])])
+[about your system, including any error possibly output before this message.
+Then install a modern shell, or manually run the script under such a
+shell if you do have one.], [$[]0: ], [], [62])")
+  fi
+  AS_EXIT])])
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+_m4_popdef([AS_EXIT])])# _AS_DETECT_BETTER_SHELL
+
+
+# _AS_PREPARE
+# -----------
+# This macro has a very special status.  Normal use of M4sh relies
+# heavily on AS_REQUIRE, so that needed initializations (such as
+# _AS_TEST_PREPARE) are performed on need, not on demand.  But
+# Autoconf is the first client of M4sh, and for two reasons: configure
+# and config.status.  Relying on AS_REQUIRE is of course fine for
+# configure, but fails for config.status (which is created by
+# configure).  So we need a means to force the inclusion of the
+# various _AS_*_PREPARE on top of config.status.  That's basically why
+# there are so many _AS_*_PREPARE below, and that's also why it is
+# important not to forget some: config.status needs them.
+# List any preparations that create shell functions first, then
+# topologically sort the others by their dependencies.
+#
+# Special case: we do not need _AS_LINENO_PREPARE, because the
+# parent will have substituted $LINENO for us when processing its
+# own invocation of _AS_LINENO_PREPARE.
+#
+# Special case: the full definition of _AS_ERROR_PREPARE is not output
+# unless AS_MESSAGE_LOG_FD is non-empty, although the value of
+# AS_MESSAGE_LOG_FD is not relevant.
+m4_defun([_AS_PREPARE],
+[m4_pushdef([AS_REQUIRE])]dnl
+[m4_pushdef([AS_REQUIRE_SHELL_FN], _m4_defn([_AS_REQUIRE_SHELL_FN])
+)]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD], [-1])]dnl
+[_AS_ERROR_PREPARE
+_m4_popdef([AS_MESSAGE_LOG_FD])]dnl
+[_AS_EXIT_PREPARE
+_AS_UNSET_PREPARE
+_AS_VAR_APPEND_PREPARE
+_AS_VAR_ARITH_PREPARE
+
+_AS_EXPR_PREPARE
+_AS_BASENAME_PREPARE
+_AS_DIRNAME_PREPARE
+_AS_ME_PREPARE
+_AS_CR_PREPARE
+_AS_ECHO_N_PREPARE
+_AS_LN_S_PREPARE
+_AS_MKDIR_P_PREPARE
+_AS_TEST_PREPARE
+_AS_TR_CPP_PREPARE
+_AS_TR_SH_PREPARE
+_m4_popdef([AS_REQUIRE], [AS_REQUIRE_SHELL_FN])])
+
+# AS_PREPARE
+# ----------
+# Output all the M4sh possible initialization into the initialization
+# diversion.  We do not use _AS_PREPARE so that the m4_provide symbols for
+# AS_REQUIRE and AS_REQUIRE_SHELL_FN are defined properly, and so that
+# shell functions are placed in M4SH-INIT-FN.
+m4_defun([AS_PREPARE],
+[m4_divert_push([KILL])
+m4_append_uniq([_AS_CLEANUP],
+  [m4_divert_text([M4SH-INIT-FN], [_AS_ERROR_PREPARE[]])])
+AS_REQUIRE([_AS_EXPR_PREPARE])
+AS_REQUIRE([_AS_BASENAME_PREPARE])
+AS_REQUIRE([_AS_DIRNAME_PREPARE])
+AS_REQUIRE([_AS_ME_PREPARE])
+AS_REQUIRE([_AS_CR_PREPARE])
+AS_REQUIRE([_AS_LINENO_PREPARE])
+AS_REQUIRE([_AS_ECHO_N_PREPARE])
+AS_REQUIRE([_AS_EXIT_PREPARE])
+AS_REQUIRE([_AS_LN_S_PREPARE])
+AS_REQUIRE([_AS_MKDIR_P_PREPARE])
+AS_REQUIRE([_AS_TEST_PREPARE])
+AS_REQUIRE([_AS_TR_CPP_PREPARE])
+AS_REQUIRE([_AS_TR_SH_PREPARE])
+AS_REQUIRE([_AS_UNSET_PREPARE])
+AS_REQUIRE([_AS_VAR_APPEND_PREPARE], [], [M4SH-INIT-FN])
+AS_REQUIRE([_AS_VAR_ARITH_PREPARE], [], [M4SH-INIT-FN])
+m4_divert_pop[]])
+
+
+# AS_REQUIRE(NAME-TO-CHECK, [BODY-TO-EXPAND = NAME-TO-CHECK],
+#            [DIVERSION = M4SH-INIT])
+# -----------------------------------------------------------
+# BODY-TO-EXPAND is some initialization which must be expanded in the
+# given diversion when expanded (required or not).  The expansion
+# goes in the named diversion or an earlier one.
+#
+# Since $2 can be quite large, this is factored for faster execution, giving
+# either m4_require([$1], [$2]) or m4_divert_require(desired, [$1], [$2]).
+m4_defun([AS_REQUIRE],
+[m4_define([_m4_divert_desired], [m4_default_quoted([$3], [M4SH-INIT])])]dnl
+[m4_if(m4_eval(_m4_divert_dump - 0 <= _m4_divert(_m4_divert_desired, [-])),
+       1, [m4_require(],
+	  [m4_divert_require(_m4_divert_desired,]) [$1], [$2])])
+
+# _AS_REQUIRE_SHELL_FN(NAME-TO-CHECK, COMMENT, BODY-TO-EXPAND)
+# ------------------------------------------------------------
+# Core of AS_REQUIRE_SHELL_FN, but without diversion support.
+m4_define([_AS_REQUIRE_SHELL_FN], [
+m4_n([$2])$1 ()
+{
+$3
+} @%:@ $1[]])
+
+# AS_REQUIRE_SHELL_FN(NAME-TO-CHECK, COMMENT, BODY-TO-EXPAND,
+#                     [DIVERSION = M4SH-INIT-FN])
+# -----------------------------------------------------------
+# BODY-TO-EXPAND is the body of a shell function to be emitted in the
+# given diversion when expanded (required or not).  Unlike other
+# xx_REQUIRE macros, BODY-TO-EXPAND is mandatory.  If COMMENT is
+# provided (often via AS_FUNCTION_DESCRIBE), it is listed with a
+# newline before the function name.
+m4_define([AS_REQUIRE_SHELL_FN],
+[m4_provide_if([AS_SHELL_FN_$1], [],
+[AS_REQUIRE([AS_SHELL_FN_$1],
+[m4_provide([AS_SHELL_FN_$1])_$0($@)],
+m4_default_quoted([$4], [M4SH-INIT-FN]))])])
+
+
+# _AS_RUN(TEST, [SHELL])
+# ----------------------
+# Run TEST under the current shell (if one parameter is used)
+# or under the given SHELL, protecting it from syntax errors.
+# Set as_run in order to assist _AS_LINENO_WORKS.
+m4_define([_AS_RUN],
+[m4_ifval([$2], [{ $as_echo "$as_bourne_compatible"$1 | as_run=a $2; }],
+		[(eval $1)]) 2>/dev/null])
+
+
+# _AS_SHELL_FN_WORK
+# -----------------
+# This is a spy to detect "in the wild" shells that do not support shell
+# functions correctly.  It is based on the m4sh.at Autotest testcases.
+m4_define([_AS_SHELL_FN_WORK],
+[as_fn_return () { (exit [$]1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+AS_IF([( set x; as_fn_ret_success y && test x = "[$]1" )], [],
+      [exitcode=1; echo positional parameters were not saved.])
+test x$exitcode = x0[]])# _AS_SHELL_FN_WORK
+
+
+# _AS_SHELL_SANITIZE
+# ------------------
+# This is the prolog that is emitted by AS_INIT and AS_INIT_GENERATED;
+# it is executed prior to shell function definitions, hence the
+# temporary redefinition of AS_EXIT.
+m4_defun([_AS_SHELL_SANITIZE],
+[m4_pushdef([AS_EXIT], [exit m4_default(]m4_dquote([$][1])[, 1)])]dnl
+[m4_text_box([M4sh Initialization.])
+
+AS_BOURNE_COMPATIBLE
+_AS_ECHO_PREPARE
+_AS_PATH_SEPARATOR_PREPARE
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $[0] in @%:@((
+  *[[\\/]]* ) as_myself=$[0] ;;
+  *) _AS_PATH_WALK([],
+		   [test -r "$as_dir/$[0]" && as_myself=$as_dir/$[0] && break])
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$[0]
+fi
+if test ! -f "$as_myself"; then
+  AS_ECHO(["$as_myself: error: cannot find myself; rerun with an absolute file name"]) >&2
+  AS_EXIT
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+_m4_popdef([AS_EXIT])])# _AS_SHELL_SANITIZE
+
+
+# AS_SHELL_SANITIZE
+# -----------------
+# This is only needed for the sake of Libtool, which screws up royally
+# in its usage of M4sh internals.
+m4_define([AS_SHELL_SANITIZE],
+[_AS_SHELL_SANITIZE
+m4_provide_if([AS_INIT], [],
+[m4_provide([AS_INIT])
+_AS_DETECT_REQUIRED([_AS_SHELL_FN_WORK])
+_AS_DETECT_BETTER_SHELL
+_AS_UNSET_PREPARE
+])])
+
+
+## ----------------------------- ##
+## 2. Wrappers around builtins.  ##
+## ----------------------------- ##
+
+# This section is lexicographically sorted.
+
+
+# AS_CASE(WORD, [PATTERN1], [IF-MATCHED1]...[DEFAULT])
+# ----------------------------------------------------
+# Expand into
+# | case WORD in #(
+# |   PATTERN1) IF-MATCHED1 ;; #(
+# |   ...
+# |   *) DEFAULT ;;
+# | esac
+# The shell comments are intentional, to work around people who don't
+# realize the impacts of using insufficient m4 quoting.  This macro
+# always uses : and provides a default case, to work around Solaris
+# /bin/sh bugs regarding the exit status.
+m4_define([_AS_CASE],
+[ [@%:@(]
+  $1[)] :
+    $2 ;;])
+m4_define([_AS_CASE_DEFAULT],
+[ [@%:@(]
+  *[)] :
+    $1 ;;])
+
+m4_defun([AS_CASE],
+[case $1 in[]m4_map_args_pair([_$0], [_$0_DEFAULT],
+   m4_shift($@m4_if(m4_eval([$# & 1]), [1], [,])))
+esac])# AS_CASE
+
+
+# _AS_EXIT_PREPARE
+# ----------------
+# Ensure AS_EXIT and AS_SET_STATUS will work.
+#
+# We cannot simply use "exit N" because some shells (zsh and Solaris sh)
+# will not set $? to N while running the code set by "trap 0"
+# Some shells fork even for (exit N), so we use a helper function
+# to set $? prior to the exit.
+# Then there are shells that don't inherit $? correctly into the start of
+# a shell function, so we must always be given an argument.
+# Other shells don't use `$?' as default for `exit', hence just repeating
+# the exit value can only help improving portability.
+m4_defun([_AS_EXIT_PREPARE],
+[AS_REQUIRE_SHELL_FN([as_fn_set_status],
+  [AS_FUNCTION_DESCRIBE([as_fn_set_status], [STATUS],
+    [Set $? to STATUS, without forking.])], [  return $[]1])]dnl
+[AS_REQUIRE_SHELL_FN([as_fn_exit],
+  [AS_FUNCTION_DESCRIBE([as_fn_exit], [STATUS],
+    [Exit the shell with STATUS, even in a "trap 0" or "set -e" context.])],
+[  set +e
+  as_fn_set_status $[1]
+  exit $[1]])])#_AS_EXIT_PREPARE
+
+
+# AS_EXIT([EXIT-CODE = $?])
+# -------------------------
+# Exit, with status set to EXIT-CODE in the way that it's seen
+# within "trap 0", and without interference from "set -e".  If
+# EXIT-CODE is omitted, then use $?.
+m4_defun([AS_EXIT],
+[AS_REQUIRE([_AS_EXIT_PREPARE])[]as_fn_exit m4_ifval([$1], [$1], [$][?])])
+
+
+# AS_FOR(MACRO, SHELL-VAR, [LIST = "$@"], [BODY = :])
+# ---------------------------------------------------
+# Expand to a shell loop that assigns SHELL-VAR to each of the
+# whitespace-separated entries in LIST (or "$@" if LIST is empty),
+# then executes BODY.  BODY may call break to abort the loop, or
+# continue to proceed with the next element of LIST.  Requires that
+# IFS be set to the normal space-tab-newline.  As an optimization,
+# BODY should access MACRO rather than $SHELL-VAR.  Normally, MACRO
+# expands to $SHELL-VAR, but if LIST contains only a single element
+# that needs no additional shell quoting, then MACRO will expand to
+# that element, thus providing a direct value rather than a shell
+# variable indirection.
+#
+# Only use the optimization if LIST can be used without additional
+# shell quoting in either a literal or double-quoted context (that is,
+# we give up on default IFS chars, parameter expansion, command
+# substitution, shell quoting, globs, or quadrigraphs).  Inline the
+# m4_defn for speed.
+m4_defun([AS_FOR],
+[m4_pushdef([$1], m4_if([$3], [], [[$$2]], m4_translit([$3], ]dnl
+m4_dquote(_m4_defn([m4_cr_symbols2]))[[%+=:,./-]), [], [[$3]], [[$$2]]))]dnl
+[for $2[]m4_ifval([$3], [ in $3])
+do :
+  $4
+done[]_m4_popdef([$1])])
+
+
+# AS_IF(TEST1, [IF-TRUE1 = :]...[IF-FALSE = :])
+# ---------------------------------------------
+# Expand into
+# | if TEST1; then
+# |   IF-TRUE1
+# | elif TEST2; then
+# |   IF-TRUE2
+# [...]
+# | else
+# |   IF-FALSE
+# | fi
+# with simplifications if IF-TRUE1 and/or IF-FALSE is empty.
+#
+m4_define([_AS_IF],
+[elif $1; then :
+  $2
+])
+m4_define([_AS_IF_ELSE],
+[m4_ifnblank([$1],
+[else
+  $1
+])])
+
+m4_defun([AS_IF],
+[if $1; then :
+  $2
+m4_map_args_pair([_$0], [_$0_ELSE], m4_shift2($@))]dnl
+[fi[]])# AS_IF
+
+
+# AS_SET_STATUS(STATUS)
+# ---------------------
+# Set the shell status ($?) to STATUS, without forking.
+m4_defun([AS_SET_STATUS],
+[AS_REQUIRE([_AS_EXIT_PREPARE])[]as_fn_set_status $1])
+
+
+# _AS_UNSET_PREPARE
+# -----------------
+# Define $as_unset to execute AS_UNSET, for backwards compatibility
+# with older versions of M4sh.
+m4_defun([_AS_UNSET_PREPARE],
+[AS_FUNCTION_DESCRIBE([as_fn_unset], [VAR], [Portably unset VAR.])
+as_fn_unset ()
+{
+  AS_UNSET([$[1]])
+}
+as_unset=as_fn_unset])
+
+
+# AS_UNSET(VAR)
+# -------------
+# Unset the env VAR, working around shells that do not allow unsetting
+# a variable that is not already set.  You should not unset MAIL and
+# MAILCHECK, as that triggers a bug in Bash 2.01.
+m4_defun([AS_UNSET],
+[{ AS_LITERAL_WORD_IF([$1], [], [eval ])$1=; unset $1;}])
+
+
+
+
+
+
+## ------------------------------------------ ##
+## 3. Error and warnings at the shell level.  ##
+## ------------------------------------------ ##
+
+
+# AS_MESSAGE_FD
+# -------------
+# Must expand to the fd where messages will be sent.  Defaults to 1,
+# although a script may reassign this value and use exec to either
+# copy stdout to the new fd, or open the new fd on /dev/null.
+m4_define([AS_MESSAGE_FD], [1])
+
+# AS_MESSAGE_LOG_FD
+# -----------------
+# Must expand to either the empty string (when no logging is
+# performed), or to the fd of a log file.  Defaults to empty, although
+# a script may reassign this value and use exec to open a log.  When
+# not empty, messages to AS_MESSAGE_FD are duplicated to the log,
+# along with a LINENO reference.
+m4_define([AS_MESSAGE_LOG_FD])
+
+
+# AS_ORIGINAL_STDIN_FD
+# --------------------
+# Must expand to the fd of the script's original stdin.  Defaults to
+# 0, although the script may reassign this value and use exec to
+# shuffle fd's.
+m4_define([AS_ORIGINAL_STDIN_FD], [0])
+
+
+# AS_ESCAPE(STRING, [CHARS = `\"$])
+# ---------------------------------
+# Add backslash escaping to the CHARS in STRING.  In an effort to
+# optimize use of this macro inside double-quoted shell constructs,
+# the behavior is intentionally undefined if CHARS is longer than 4
+# bytes, or contains bytes outside of the set [`\"$].  However,
+# repeated bytes within the set are permissible (AS_ESCAPE([$1], [""])
+# being a common way to be nice to syntax highlighting).
+#
+# Avoid the m4_bpatsubst if there are no interesting characters to escape.
+# _AS_ESCAPE bypasses argument defaulting.
+m4_define([AS_ESCAPE],
+[_$0([$1], m4_if([$2], [], [[`], [\"$]], [m4_substr([$2], [0], [1]), [$2]]))])
+
+# _AS_ESCAPE(STRING, KEY, SET)
+# ----------------------------
+# Backslash-escape all instances of the single byte KEY or up to four
+# bytes in SET occurring in STRING.  Although a character can occur
+# multiple times, optimum efficiency occurs when KEY and SET are
+# distinct, and when SET does not exceed two bytes.  These particular
+# semantics allow for the fewest number of parses of STRING, as well
+# as taking advantage of the optimizations in m4 1.4.13+ when
+# m4_translit is passed SET of size 2 or smaller.
+m4_define([_AS_ESCAPE],
+[m4_if(m4_index(m4_translit([[$1]], [$3], [$2$2$2$2]), [$2]), [-1],
+       [$0_], [m4_bpatsubst])([$1], [[$2$3]], [\\\&])])
+m4_define([_AS_ESCAPE_], [$1])
+
+
+# _AS_QUOTE(STRING)
+# -----------------
+# If there are quoted (via backslash) backquotes, output STRING
+# literally and warn; otherwise, output STRING with ` and " quoted.
+#
+# Compatibility glue between the old AS_MSG suite which did not
+# quote anything, and the modern suite which quotes the quotes.
+# If STRING contains `\\' or `\$', it's modern.
+# If STRING contains `\"' or `\`', it's old.
+# Otherwise it's modern.
+#
+# Profiling shows that m4_index is 5 to 8x faster than m4_bregexp.  The
+# slower implementation used:
+# m4_bmatch([$1],
+#	    [\\[\\$]], [$2],
+#	    [\\[`"]], [$3],
+#	    [$2])
+# The current implementation caters to the common case of no backslashes,
+# to minimize m4_index expansions (hence the nested if).
+m4_define([_AS_QUOTE],
+[m4_cond([m4_index([$1], [\])], [-1], [_AS_QUOTE_MODERN],
+	 [m4_eval(m4_index(m4_translit([[$1]], [$], [\]), [\\]) >= 0)],
+[1], [_AS_QUOTE_MODERN],
+	 [m4_eval(m4_index(m4_translit([[$1]], ["], [`]), [\`]) >= 0)],dnl"
+[1], [_AS_QUOTE_OLD],
+	 [_AS_QUOTE_MODERN])([$1])])
+
+m4_define([_AS_QUOTE_MODERN],
+[_AS_ESCAPE([$1], [`], [""])])
+
+m4_define([_AS_QUOTE_OLD],
+[m4_warn([obsolete],
+   [back quotes and double quotes must not be escaped in: $1])$1])
+
+
+# _AS_ECHO_UNQUOTED(STRING, [FD = AS_MESSAGE_FD])
+# -----------------------------------------------
+# Perform shell expansions on STRING and echo the string to FD.
+m4_define([_AS_ECHO_UNQUOTED],
+[AS_ECHO(["$1"]) >&m4_default([$2], [AS_MESSAGE_FD])])
+
+
+# _AS_ECHO(STRING, [FD = AS_MESSAGE_FD])
+# --------------------------------------
+# Protect STRING from backquote expansion, echo the result to FD.
+m4_define([_AS_ECHO],
+[_AS_ECHO_UNQUOTED([_AS_QUOTE([$1])], [$2])])
+
+
+# _AS_ECHO_LOG(STRING)
+# --------------------
+# Log the string to AS_MESSAGE_LOG_FD.
+m4_defun_init([_AS_ECHO_LOG],
+[AS_REQUIRE([_AS_LINENO_PREPARE])],
+[_AS_ECHO([$as_me:${as_lineno-$LINENO}: $1], AS_MESSAGE_LOG_FD)])
+
+
+# _AS_ECHO_N_PREPARE
+# ------------------
+# Check whether to use -n, \c, or newline-tab to separate
+# checking messages from result messages.
+# Don't try to cache, since the results of this macro are needed to
+# display the checking message.  In addition, caching something used once
+# has little interest.
+# Idea borrowed from dist 3.0.  Use `*c*,', not `*c,' because if `\c'
+# failed there is also a newline to match.  Use `xy' because `\c' echoed
+# in a command substitution prints only the first character of the output
+# with ksh version M-11/16/88f on AIX 6.1; it needs to be reset by another
+# backquoted echo.
+m4_defun([_AS_ECHO_N_PREPARE],
+[ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in @%:@(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+])# _AS_ECHO_N_PREPARE
+
+
+# _AS_ECHO_N(STRING, [FD = AS_MESSAGE_FD])
+# ----------------------------------------
+# Same as _AS_ECHO, but echo doesn't return to a new line.
+m4_define([_AS_ECHO_N],
+[AS_ECHO_N(["_AS_QUOTE([$1])"]) >&m4_default([$2], [AS_MESSAGE_FD])])
+
+
+# AS_MESSAGE(STRING, [FD = AS_MESSAGE_FD])
+# ----------------------------------------
+# Output "`basename $0`: STRING" to the open file FD, and if logging
+# is enabled, copy it to the log with a reference to LINENO.
+m4_defun_init([AS_MESSAGE],
+[AS_REQUIRE([_AS_ME_PREPARE])],
+[m4_ifval(AS_MESSAGE_LOG_FD,
+	  [{ _AS_ECHO_LOG([$1])
+_AS_ECHO([$as_me: $1], [$2]);}],
+	  [_AS_ECHO([$as_me: $1], [$2])])[]])
+
+
+# AS_WARN(PROBLEM)
+# ----------------
+# Output "`basename $0`: WARNING: PROBLEM" to stderr.
+m4_define([AS_WARN],
+[AS_MESSAGE([WARNING: $1], [2])])# AS_WARN
+
+
+# _AS_ERROR_PREPARE
+# -----------------
+# Output the shell function used by AS_ERROR.  This is designed to be
+# expanded during the m4_wrap cleanup.
+#
+# If AS_MESSAGE_LOG_FD is non-empty at the end of the script, then
+# make this function take optional parameters that use LINENO at the
+# points where AS_ERROR was expanded with non-empty AS_MESSAGE_LOG_FD;
+# otherwise, assume the entire script does not do logging.
+m4_define([_AS_ERROR_PREPARE],
+[AS_REQUIRE_SHELL_FN([as_fn_error],
+  [AS_FUNCTION_DESCRIBE([as_fn_error], [STATUS ERROR]m4_ifval(AS_MESSAGE_LOG_FD,
+      [[ [[LINENO LOG_FD]]]]),
+    [Output "`basename @S|@0`: error: ERROR" to stderr.]
+m4_ifval(AS_MESSAGE_LOG_FD,
+    [[If LINENO and LOG_FD are provided, also output the error to LOG_FD,
+      referencing LINENO.]])
+    [Then exit the script with STATUS, using 1 if that was 0.])],
+[  as_status=$[1]; test $as_status -eq 0 && as_status=1
+m4_ifval(AS_MESSAGE_LOG_FD,
+[m4_pushdef([AS_MESSAGE_LOG_FD], [$[4]])dnl
+  if test "$[4]"; then
+    AS_LINENO_PUSH([$[3]])
+    _AS_ECHO_LOG([error: $[2]])
+  fi
+m4_define([AS_MESSAGE_LOG_FD])], [m4_pushdef([AS_MESSAGE_LOG_FD])])dnl
+  AS_MESSAGE([error: $[2]], [2])
+_m4_popdef([AS_MESSAGE_LOG_FD])dnl
+  AS_EXIT([$as_status])])])
+
+# AS_ERROR(ERROR, [EXIT-STATUS = max($?/1)])
+# ------------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr, then exit the
+# script with EXIT-STATUS.
+m4_defun_init([AS_ERROR],
+[m4_append_uniq([_AS_CLEANUP],
+  [m4_divert_text([M4SH-INIT-FN], [_AS_ERROR_PREPARE[]])])],
+[as_fn_error m4_default([$2], [$?]) "_AS_QUOTE([$1])"m4_ifval(AS_MESSAGE_LOG_FD,
+  [ "$LINENO" AS_MESSAGE_LOG_FD])])
+
+
+# AS_LINENO_PUSH([LINENO])
+# ------------------------
+# If this is the outermost call to AS_LINENO_PUSH, make sure that
+# AS_MESSAGE will print LINENO as the line number.
+m4_defun([AS_LINENO_PUSH],
+[as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack])
+
+
+# AS_LINENO_POP([LINENO])
+# -----------------------
+# If this is call balances the outermost call to AS_LINENO_PUSH,
+# AS_MESSAGE will restart printing $LINENO as the line number.
+#
+# No need to use AS_UNSET, since as_lineno is necessarily set.
+m4_defun([AS_LINENO_POP],
+[eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno])
+
+
+
+## -------------------------------------- ##
+## 4. Portable versions of common tools.  ##
+## -------------------------------------- ##
+
+# This section is lexicographically sorted.
+
+
+# AS_BASENAME(FILE-NAME)
+# ----------------------
+# Simulate the command 'basename FILE-NAME'.  Not all systems have basename.
+# Also see the comments for AS_DIRNAME.
+
+m4_defun([_AS_BASENAME_EXPR],
+[$as_expr X/[]$1 : '.*/\([[^/][^/]*]\)/*$' \| \
+	 X[]$1 : 'X\(//\)$' \| \
+	 X[]$1 : 'X\(/\)' \| .])
+
+m4_defun([_AS_BASENAME_SED],
+[AS_ECHO([X/[]$1]) |
+    sed ['/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q']])
+
+m4_defun_init([AS_BASENAME],
+[AS_REQUIRE([_$0_PREPARE])],
+[$as_basename -- $1 ||
+_AS_BASENAME_EXPR([$1]) 2>/dev/null ||
+_AS_BASENAME_SED([$1])])
+
+
+# _AS_BASENAME_PREPARE
+# --------------------
+# Avoid Solaris 9 /usr/ucb/basename, as `basename /' outputs an empty line.
+# Also, traditional basename mishandles --.  Require here _AS_EXPR_PREPARE,
+# to avoid problems when _AS_BASENAME is called from the M4SH-INIT diversion.
+m4_defun([_AS_BASENAME_PREPARE],
+[AS_REQUIRE([_AS_EXPR_PREPARE])]dnl
+[if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+])# _AS_BASENAME_PREPARE
+
+
+# AS_DIRNAME(FILE-NAME)
+# ---------------------
+# Simulate the command 'dirname FILE-NAME'.  Not all systems have dirname.
+# This macro must be usable from inside ` `.
+#
+# Prefer expr to echo|sed, since expr is usually faster and it handles
+# backslashes and newlines correctly.  However, older expr
+# implementations (e.g. SunOS 4 expr and Solaris 8 /usr/ucb/expr) have
+# a silly length limit that causes expr to fail if the matched
+# substring is longer than 120 bytes.  So fall back on echo|sed if
+# expr fails.
+m4_defun_init([_AS_DIRNAME_EXPR],
+[AS_REQUIRE([_AS_EXPR_PREPARE])],
+[$as_expr X[]$1 : 'X\(.*[[^/]]\)//*[[^/][^/]]*/*$' \| \
+	 X[]$1 : 'X\(//\)[[^/]]' \| \
+	 X[]$1 : 'X\(//\)$' \| \
+	 X[]$1 : 'X\(/\)' \| .])
+
+m4_defun([_AS_DIRNAME_SED],
+[AS_ECHO([X[]$1]) |
+    sed ['/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q']])
+
+m4_defun_init([AS_DIRNAME],
+[AS_REQUIRE([_$0_PREPARE])],
+[$as_dirname -- $1 ||
+_AS_DIRNAME_EXPR([$1]) 2>/dev/null ||
+_AS_DIRNAME_SED([$1])])
+
+
+# _AS_DIRNAME_PREPARE
+# -------------------
+m4_defun([_AS_DIRNAME_PREPARE],
+[AS_REQUIRE([_AS_EXPR_PREPARE])]dnl
+[if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+])# _AS_DIRNAME_PREPARE
+
+
+# AS_ECHO(WORD)
+# -------------
+# Output WORD followed by a newline.  WORD must be a single shell word
+# (typically a quoted string).  The bytes of WORD are output as-is, even
+# if it starts with "-" or contains "\".
+m4_defun_init([AS_ECHO],
+[AS_REQUIRE([_$0_PREPARE])],
+[$as_echo $1])
+
+
+# AS_ECHO_N(WORD)
+# ---------------
+# Like AS_ECHO(WORD), except do not output the trailing newline.
+m4_defun_init([AS_ECHO_N],
+[AS_REQUIRE([_AS_ECHO_PREPARE])],
+[$as_echo_n $1])
+
+
+# _AS_ECHO_PREPARE
+# ----------------
+# Arrange for $as_echo 'FOO' to echo FOO without escape-interpretation;
+# and similarly for $as_echo_n, which omits the trailing newline.
+# 'FOO' is an optional single argument; a missing FOO is treated as empty.
+m4_defun([_AS_ECHO_PREPARE],
+[[as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$][1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$][1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$][1;
+      case $arg in @%:@(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+]])# _AS_ECHO_PREPARE
+
+
+# AS_TEST_X
+# ---------
+# Check whether a file has executable or search permissions.
+m4_defun_init([AS_TEST_X],
+[AS_REQUIRE([_AS_TEST_PREPARE])],
+[$as_test_x $1[]])# AS_TEST_X
+
+
+# AS_EXECUTABLE_P
+# ---------------
+# Check whether a file is a regular file that has executable permissions.
+m4_defun_init([AS_EXECUTABLE_P],
+[AS_REQUIRE([_AS_TEST_PREPARE])],
+[{ test -f $1 && AS_TEST_X([$1]); }])# AS_EXECUTABLE_P
+
+
+# _AS_EXPR_PREPARE
+# ----------------
+# QNX 4.25 expr computes and issue the right result but exits with failure.
+# Tru64 expr mishandles leading zeros in numeric strings.
+# Detect these flaws.
+m4_defun([_AS_EXPR_PREPARE],
+[if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+])# _AS_EXPR_PREPARE
+
+
+# _AS_ME_PREPARE
+# --------------
+# Define $as_me to the basename of the executable file's name.
+m4_defun([AS_ME_PREPARE], [AS_REQUIRE([_$0])])
+m4_defun([_AS_ME_PREPARE],
+[AS_REQUIRE([_AS_BASENAME_PREPARE])]dnl
+[as_me=`AS_BASENAME("$[0]")`
+])
+
+# _AS_LINENO_WORKS
+# ----------------
+# Succeed if the currently executing shell supports LINENO.
+# This macro does not expand to a single shell command, so be careful
+# when using it.  Surrounding the body of this macro with {} would
+# cause "bash -c '_ASLINENO_WORKS'" to fail (with Bash 2.05, anyway),
+# but that bug is irrelevant to our use of LINENO.  We can't use
+# AS_VAR_ARITH, as this is expanded prior to shell functions.
+#
+# Testing for LINENO support is hard; we use _AS_LINENO_WORKS inside
+# _AS_RUN, which sometimes eval's its argument (pdksh gives false
+# negatives if $LINENO is expanded by eval), and sometimes passes the
+# argument to another shell (if the current shell supports LINENO,
+# then expanding $LINENO prior to the string leads to false
+# positives).  Hence, we perform two tests, and coordinate with
+# _AS_DETECT_EXPAND (which ensures that only the first of two LINENO
+# is expanded in advance) and _AS_RUN (which sets $as_run to 'a' when
+# handing the test to another shell), so that we know which test to
+# trust.
+m4_define([_AS_LINENO_WORKS],
+[  as_lineno_1=$LINENO as_lineno_1a=$LINENO
+  as_lineno_2=$LINENO as_lineno_2a=$LINENO
+  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
+  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"'])
+
+
+# _AS_LINENO_PREPARE
+# ------------------
+# If LINENO is not supported by the shell, produce a version of this
+# script where LINENO is hard coded.
+# Comparing LINENO against _oline_ is not a good solution, since in
+# the case of embedded executables (such as config.status within
+# configure) you'd compare LINENO wrt config.status vs. _oline_ wrt
+# configure.
+#
+# AS_ERROR normally uses LINENO if logging, but AS_LINENO_PREPARE uses
+# AS_ERROR.  Besides, if the logging fd is open, we don't want to use
+# $LINENO in the log complaining about broken LINENO.  We break the
+# circular require by changing AS_ERROR and AS_MESSAGE_LOG_FD.
+m4_defun([AS_LINENO_PREPARE], [AS_REQUIRE([_$0])])
+m4_defun([_AS_LINENO_PREPARE],
+[AS_REQUIRE([_AS_CR_PREPARE])]dnl
+[AS_REQUIRE([_AS_ME_PREPARE])]dnl
+[_AS_DETECT_SUGGESTED([_AS_LINENO_WORKS])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[m4_pushdef([AS_ERROR],
+  [{ AS_MESSAGE(]m4_dquote([error: $][1])[, [2]); AS_EXIT([1]); }])]dnl
+dnl Create $as_me.lineno as a copy of $as_myself, but with $LINENO
+dnl uniformly replaced by the line number.  The first 'sed' inserts a
+dnl line-number line after each line using $LINENO; the second 'sed'
+dnl does the real work.  The second script uses 'N' to pair each
+dnl line-number line with the line containing $LINENO, and appends
+dnl trailing '-' during substitution so that $LINENO is not a special
+dnl case at line end.  (Raja R Harinath suggested sed '=', and Paul
+dnl Eggert wrote the scripts with optimization help from Paolo Bonzini).
+[_AS_LINENO_WORKS || {
+[  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
+  sed -n '
+    p
+    /[$]LINENO/=
+  ' <$as_myself |
+    sed '
+      s/[$]LINENO.*/&-/
+      t lineno
+      b
+      :lineno
+      N
+      :loop
+      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
+      t loop
+      s/-\n.*//
+    ' >$as_me.lineno &&
+  chmod +x "$as_me.lineno"] ||
+    AS_ERROR([cannot create $as_me.lineno; rerun with a POSIX shell])
+
+  # Don't try to exec as it changes $[0], causing all sort of problems
+  # (the dirname of $[0] is not the place where we might find the
+  # original and so on.  Autoconf is especially sensitive to this).
+  . "./$as_me.lineno"
+  # Exit status is that of the last command.
+  exit
+}
+_m4_popdef([AS_MESSAGE_LOG_FD], [AS_ERROR])])# _AS_LINENO_PREPARE
+
+
+# _AS_LN_S_PREPARE
+# ----------------
+# Don't use conftest.sym to avoid file name issues on DJGPP, where this
+# would yield conftest.sym.exe for DJGPP < 2.04.  And don't use `conftest'
+# as base name to avoid prohibiting concurrency (e.g., concurrent
+# config.statuses).  On read-only media, assume 'cp -p' and hope we
+# are just running --help anyway.
+m4_defun([_AS_LN_S_PREPARE],
+[rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+])# _AS_LN_S_PREPARE
+
+
+# AS_LN_S(FILE, LINK)
+# -------------------
+# FIXME: Should we add the glue code to handle properly relative symlinks
+# simulated with `ln' or `cp'?
+m4_defun_init([AS_LN_S],
+[AS_REQUIRE([_AS_LN_S_PREPARE])],
+[$as_ln_s $1 $2])
+
+
+# _AS_MKDIR_P
+# -----------
+# Emit code that can be used to emulate `mkdir -p` with plain `mkdir';
+# the code assumes that "$as_dir" contains the directory to create.
+# $as_dir is normalized, so there is no need to worry about using --.
+m4_define([_AS_MKDIR_P],
+[case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`AS_ECHO(["$as_dir"]) | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`AS_DIRNAME("$as_dir")`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || AS_ERROR([cannot create directory $as_dir])
+])
+
+# AS_MKDIR_P(DIR)
+# ---------------
+# Emulate `mkdir -p' with plain `mkdir' if needed.
+m4_defun_init([AS_MKDIR_P],
+[AS_REQUIRE([_$0_PREPARE])],
+[as_dir=$1; as_fn_mkdir_p])# AS_MKDIR_P
+
+
+# _AS_MKDIR_P_PREPARE
+# -------------------
+m4_defun([_AS_MKDIR_P_PREPARE],
+[AS_REQUIRE_SHELL_FN([as_fn_mkdir_p],
+  [AS_FUNCTION_DESCRIBE([as_fn_mkdir_p], [],
+    [Create "$as_dir" as a directory, including parents if necessary.])],
+[
+  _AS_MKDIR_P
+])]dnl
+[if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+])# _AS_MKDIR_P_PREPARE
+
+
+# _AS_PATH_SEPARATOR_PREPARE
+# --------------------------
+# Compute the path separator.
+m4_defun([_AS_PATH_SEPARATOR_PREPARE],
+[# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+])# _AS_PATH_SEPARATOR_PREPARE
+
+
+# _AS_PATH_WALK([PATH = $PATH], BODY, [IF-NOT-FOUND])
+# ---------------------------------------------------
+# Walk through PATH running BODY for each `as_dir'.  If BODY never does a
+# `break', evaluate IF-NOT-FOUND.
+#
+# Still very private as its interface looks quite bad.
+#
+# `$as_dummy' forces splitting on constant user-supplied paths.
+# POSIX.2 field splitting is done only on the result of word
+# expansions, not on literal text.  This closes a longstanding sh security
+# hole.  Optimize it away when not needed, i.e., if there are no literal
+# path separators.
+m4_defun_init([_AS_PATH_WALK],
+[AS_REQUIRE([_AS_PATH_SEPARATOR_PREPARE])],
+[as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+m4_ifvaln([$3], [as_found=false])dnl
+m4_bmatch([$1], [[:;]],
+[as_dummy="$1"
+for as_dir in $as_dummy],
+[for as_dir in m4_default([$1], [$PATH])])
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  m4_ifvaln([$3], [as_found=:])dnl
+  $2
+  m4_ifvaln([$3], [as_found=false])dnl
+done
+m4_ifvaln([$3], [$as_found || { $3; }])dnl
+IFS=$as_save_IFS
+])
+
+
+# AS_SET_CATFILE(VAR, DIR-NAME, FILE-NAME)
+# ----------------------------------------
+# Set VAR to DIR-NAME/FILE-NAME.
+# Optimize the common case where $2 or $3 is '.'.
+m4_define([AS_SET_CATFILE],
+[case $2 in @%:@((
+.) AS_VAR_SET([$1], [$3]);;
+*)
+  case $3 in @%:@(((
+  .) AS_VAR_SET([$1], [$2]);;
+  [[\\/]]* | ?:[[\\/]]* ) AS_VAR_SET([$1], [$3]);;
+  *) AS_VAR_SET([$1], [$2/$3]);;
+  esac;;
+esac[]])# AS_SET_CATFILE
+
+
+# _AS_TEST_PREPARE
+# ----------------
+# Find out whether `test -x' works.  If not, prepare a substitute
+# that should work well enough for most scripts.
+#
+# Here are some of the problems with the substitute.
+# The 'ls' tests whether the owner, not the current user, can execute/search.
+# The eval means '*', '?', and '[' cause inadvertent file name globbing
+# after the 'eval', so jam together as many tokens as we can to minimize
+# the likelihood that the inadvertent globbing will actually do anything.
+# Luckily, this gorp is needed only on really ancient hosts.
+#
+m4_defun([_AS_TEST_PREPARE],
+[if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$[]1"; then
+	test -d "$[]1/.";
+      else
+	case $[]1 in @%:@(
+	-*)set "./$[]1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$[]1" 2>/dev/null` in @%:@((
+	???[[sx]]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+dnl as_executable_p is present for backward compatibility with Libtool
+dnl 1.5.22, but it should go away at some point.
+as_executable_p=$as_test_x
+])# _AS_TEST_PREPARE
+
+
+
+
+## ------------------ ##
+## 5. Common idioms.  ##
+## ------------------ ##
+
+# This section is lexicographically sorted.
+
+
+# AS_BOX(MESSAGE, [FRAME-CHARACTER = `-'])
+# ----------------------------------------
+# Output MESSAGE, a single line text, framed with FRAME-CHARACTER (which
+# must not be `/').
+m4_define([AS_BOX],
+[_$0(m4_expand([$1]), [$2])])
+
+m4_define([_AS_BOX],
+[m4_if(m4_index(m4_translit([[$1]], [`\"], [$$$]), [$]),
+  [-1], [$0_LITERAL], [$0_INDIR])($@)])
+
+
+# _AS_BOX_LITERAL(MESSAGE, [FRAME-CHARACTER = `-'])
+# -------------------------------------------------
+m4_define([_AS_BOX_LITERAL],
+[AS_ECHO(["_AS_ESCAPE(m4_dquote(m4_expand([m4_text_box($@)])), [`], [\"$])"])])
+
+
+# _AS_BOX_INDIR(MESSAGE, [FRAME-CHARACTER = `-'])
+# -----------------------------------------------
+m4_define([_AS_BOX_INDIR],
+[sed 'h;s/./m4_default([$2], [-])/g;s/^.../@%:@@%:@ /;s/...$/ @%:@@%:@/;p;x;p;x' <<_ASBOX
+@%:@@%:@ $1 @%:@@%:@
+_ASBOX])
+
+
+# _AS_CLEAN_DIR(DIR)
+# ------------------
+# Remove all contents from within DIR, including any unwritable
+# subdirectories, but leave DIR itself untouched.
+m4_define([_AS_CLEAN_DIR],
+[if test -d $1; then
+  find $1 -type d ! -perm -700 -exec chmod u+rwx {} \;
+  rm -fr $1/* $1/.[[!.]] $1/.??*
+fi])
+
+
+# AS_FUNCTION_DESCRIBE(NAME, [ARGS], DESCRIPTION, [WRAP-COLUMN = 79])
+# -------------------------------------------------------------------
+# Output a shell comment describing NAME and its arguments ARGS, then
+# a separator line, then the DESCRIPTION wrapped at a decimal
+# WRAP-COLUMN.  The output resembles:
+#  # NAME ARGS
+#  # ---------
+#  # Wrapped DESCRIPTION text
+# NAME and ARGS are expanded, while DESCRIPTION is treated as a
+# whitespace-separated list of strings that are not expanded.
+m4_define([AS_FUNCTION_DESCRIBE],
+[@%:@ $1[]m4_ifval([$2], [ $2])
+@%:@ m4_translit(m4_format([%*s],
+	   m4_decr(m4_qlen(_m4_expand([$1[]m4_ifval([$2], [ $2])
+]))), []), [ ], [-])
+m4_text_wrap([$3], [@%:@ ], [], [$4])])
+
+
+# AS_HELP_STRING(LHS, RHS, [INDENT-COLUMN = 26], [WRAP-COLUMN = 79])
+# ------------------------------------------------------------------
+#
+# Format a help string so that it looks pretty when the user executes
+# "script --help".  This macro takes up to four arguments, a
+# "left hand side" (LHS), a "right hand side" (RHS), a decimal
+# INDENT-COLUMN which is the column where wrapped lines should begin
+# (the default of 26 is recommended), and a decimal WRAP-COLUMN which is
+# the column where lines should wrap (the default of 79 is recommended).
+# LHS is expanded, RHS is not.
+#
+# For backwards compatibility not documented in the manual, INDENT-COLUMN
+# can also be specified as a string of white spaces, whose width
+# determines the indentation column.  Using TABs in INDENT-COLUMN is not
+# recommended, since screen width of TAB is not computed.
+#
+# The resulting string is suitable for use in other macros that require
+# a help string (e.g. AC_ARG_WITH).
+#
+# Here is the sample string from the Autoconf manual (Node: External
+# Software) which shows the proper spacing for help strings.
+#
+#    --with-readline         support fancy command line editing
+#  ^ ^                       ^
+#  | |                       |
+#  | column 2                column 26
+#  |
+#  column 0
+#
+# A help string is made up of a "left hand side" (LHS) and a "right
+# hand side" (RHS).  In the example above, the LHS is
+# "--with-readline", while the RHS is "support fancy command line
+# editing".
+#
+# If the LHS contains more than (INDENT-COLUMN - 3) characters, then the
+# LHS is terminated with a newline so that the RHS starts on a line of its
+# own beginning at INDENT-COLUMN.  In the default case, this corresponds to an
+# LHS with more than 23 characters.
+#
+# Therefore, in the example, if the LHS were instead
+# "--with-readline-blah-blah-blah", then the AS_HELP_STRING macro would
+# expand into:
+#
+#
+#    --with-readline-blah-blah-blah
+#  ^ ^                       support fancy command line editing
+#  | |                       ^
+#  | column 2                |
+#  column 0                  column 26
+#
+#
+# m4_text_wrap hacks^Wworks around the fact that m4_format does not
+# know quadrigraphs.
+#
+m4_define([AS_HELP_STRING],
+[m4_text_wrap([$2], m4_cond([[$3]], [], [                          ],
+			    [m4_eval([$3]+0)], [0], [[$3]],
+			    [m4_format([[%*s]], [$3], [])]),
+	      m4_expand([  $1 ]), [$4])])# AS_HELP_STRING
+
+
+# AS_IDENTIFIER_IF(EXPRESSION, IF-IDENT, IF-NOT-IDENT)
+# ----------------------------------------------------
+# If EXPRESSION serves as an identifier (ie, after removal of @&t@, it
+# matches the regex `^[a-zA-Z_][a-zA-Z_0-9]*$'), execute IF-IDENT,
+# otherwise IF-NOT-IDENT.
+#
+# This is generally faster than the alternative:
+#   m4_bmatch(m4_bpatsubst([[$1]], [@&t@]), ^m4_defn([m4_re_word])$,
+#             [$2], [$3])
+#
+# Rather than expand m4_defn every time AS_IDENTIFIER_IF is expanded, we
+# inline its expansion up front.  Only use a regular expression if we
+# detect a potential quadrigraph.
+#
+# First, check if the entire string matches m4_cr_symbol2.  Only then do
+# we worry if the first character also matches m4_cr_symbol1 (ie. does not
+# match m4_cr_digit).
+m4_define([AS_IDENTIFIER_IF],
+[m4_if(_$0(m4_if(m4_index([$1], [@]), [-1],
+  [[$1]], [m4_bpatsubst([[$1]], [@&t@])])), [-], [$2], [$3])])
+
+m4_define([_AS_IDENTIFIER_IF],
+[m4_cond([[$1]], [], [],
+	 [m4_eval(m4_len(m4_translit([[$1]], ]]dnl
+m4_dquote(m4_dquote(m4_defn([m4_cr_symbols2])))[[)) > 0)], [1], [],
+	 [m4_len(m4_translit(m4_format([[%.1s]], [$1]), ]]dnl
+m4_dquote(m4_dquote(m4_defn([m4_cr_symbols1])))[[))], [0], [-])])
+
+
+# AS_LITERAL_IF(EXPRESSION, IF-LITERAL, IF-NOT-LITERAL,
+#               [IF-SIMPLE-REF = IF-NOT-LITERAL])
+# -----------------------------------------------------
+# If EXPRESSION has no shell indirections ($var or `expr`), expand
+# IF-LITERAL, else IF-NOT-LITERAL.  In some cases, IF-NOT-LITERAL
+# must be complex to safely deal with ``, while a simpler
+# expression IF-SIMPLE-REF can be used if the indirection
+# involves only shell variable expansion (as in ${varname}).
+#
+# EXPRESSION is treated as a literal if it results in the same
+# interpretation whether it is unquoted or contained within double
+# quotes, with the exception that whitespace is ignored (on the
+# assumption that it will be flattened to _).  Therefore, neither `\$'
+# nor `a''b' is a literal, since both backslash and single quotes have
+# different quoting behavior in the two contexts; and `a*' is not a
+# literal, because it has different globbing.  Note, however, that
+# while `${a+b}' is neither a literal nor a simple ref, `a+b' is a
+# literal.  This macro is an *approximation*: it is possible that
+# there are some EXPRESSIONs which the shell would treat as literals,
+# but which this macro does not recognize.
+#
+# Why do we reject EXPRESSION expanding with `[' or `]' as a literal?
+# Because AS_TR_SH is MUCH faster if it can use m4_translit on literals
+# instead of m4_bpatsubst; but m4_translit is much tougher to do safely
+# if `[' is translated.  That, and file globbing matters.
+#
+# Note that the quadrigraph @S|@ can result in non-literals, but outright
+# rejecting all @ would make AC_INIT complain on its bug report address.
+#
+# We used to use m4_bmatch(m4_quote($1), [[`$]], [$3], [$2]), but
+# profiling shows that it is faster to use m4_translit.
+#
+# Because the translit is stripping quotes, it must also neutralize
+# anything that might be in a macro name, as well as comments, commas,
+# or unbalanced parentheses.  Valid shell variable characters and
+# unambiguous literal characters are deleted (`a.b'), and remaining
+# characters are normalized into `$' if they can form simple refs
+# (${a}), `+' if they can potentially form literals (a+b), ``' if they
+# can interfere with m4 parsing, or left alone otherwise.  If both `$'
+# and `+' are left, it is treated as a complex reference (${a+b}),
+# even though it could technically be a simple reference (${a}+b).
+# _AS_LITERAL_IF_ only has to check for an empty string after removing
+# one of the two normalized characters.
+#
+# Rather than expand m4_defn every time AS_LITERAL_IF is expanded, we
+# inline its expansion up front.  _AS_LITERAL_IF expands to the name
+# of a macro that takes three arguments: IF-SIMPLE-REF,
+# IF-NOT-LITERAL, IF-LITERAL.  It also takes an optional argument of
+# any additional characters to allow as literals (useful for AS_TR_SH
+# and AS_TR_CPP to perform inline conversion of whitespace to _).  The
+# order of the arguments allows reuse of m4_default.
+m4_define([AS_LITERAL_IF],
+[_$0(m4_expand([$1]), [	 ][
+])([$4], [$3], [$2])])
+
+m4_define([_AS_LITERAL_IF],
+[m4_if(m4_index([$1], [@S|@]), [-1], [$0_(m4_translit([$1],
+  [-:%/@{}[]#(),.$2]]]m4_dquote(m4_dquote(m4_defn([m4_cr_symbols2])))[[,
+  [+++++$$`````]))], [$0_NO])])
+
+m4_define([_AS_LITERAL_IF_],
+[m4_if(m4_translit([$1], [+]), [], [$0YES],
+       m4_translit([$1], [$]), [], [m4_default], [$0NO])])
+
+m4_define([_AS_LITERAL_IF_YES], [$3])
+m4_define([_AS_LITERAL_IF_NO], [$2])
+
+# AS_LITERAL_WORD_IF(EXPRESSION, IF-LITERAL, IF-NOT-LITERAL,
+#                    [IF-SIMPLE-REF = IF-NOT-LITERAL])
+# ----------------------------------------------------------
+# Like AS_LITERAL_IF, except that spaces and tabs in EXPRESSION
+# are treated as non-literal.
+m4_define([AS_LITERAL_WORD_IF],
+[_AS_LITERAL_IF(m4_expand([$1]))([$4], [$3], [$2])])
+
+# AS_LITERAL_HEREDOC_IF(EXPRESSION, IF-LITERAL, IF-NOT-LITERAL)
+# -------------------------------------------------------------
+# Like AS_LITERAL_IF, except that a string is considered literal
+# if it results in the same output in both quoted and unquoted
+# here-documents.
+m4_define([AS_LITERAL_HEREDOC_IF],
+[_$0(m4_expand([$1]))([$2], [$3])])
+
+m4_define([_AS_LITERAL_HEREDOC_IF],
+[m4_if(m4_index([$1], [@S|@]), [-1],
+  [m4_if(m4_index(m4_translit([[$1]], [\`], [$]), [$]), [-1],
+    [$0_YES], [$0_NO])],
+  [$0_NO])])
+
+m4_define([_AS_LITERAL_HEREDOC_IF_YES], [$1])
+m4_define([_AS_LITERAL_HEREDOC_IF_NO], [$2])
+
+
+# AS_TMPDIR(PREFIX, [DIRECTORY = $TMPDIR [= /tmp]])
+# -------------------------------------------------
+# Create as safely as possible a temporary directory in DIRECTORY
+# which name is inspired by PREFIX (should be 2-4 chars max).
+#
+# Even though $tmp does not fit our normal naming scheme of $as_*,
+# it is a documented part of the public API and must not be changed.
+m4_define([AS_TMPDIR],
+[# Create a (secure) tmp directory for tmp files.
+m4_if([$2], [], [: "${TMPDIR:=/tmp}"])
+{
+  tmp=`(umask 077 && mktemp -d "m4_default([$2],
+    [$TMPDIR])/$1XXXXXX") 2>/dev/null` &&
+  test -d "$tmp"
+}  ||
+{
+  tmp=m4_default([$2], [$TMPDIR])/$1$$-$RANDOM
+  (umask 077 && mkdir "$tmp")
+} || AS_ERROR([cannot create a temporary directory in m4_default([$2],
+	      [$TMPDIR])])])# AS_TMPDIR
+
+
+# AS_UNAME
+# --------
+# Try to describe this machine.  Meant for logs.
+m4_define([AS_UNAME],
+[{
+cat <<_ASUNAME
+m4_text_box([Platform.])
+
+hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
+
+/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
+/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
+/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
+/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
+
+_ASUNAME
+
+_AS_PATH_WALK([$PATH], [AS_ECHO(["PATH: $as_dir"])])
+}])
+
+
+# _AS_VERSION_COMPARE_PREPARE
+# ---------------------------
+# Output variables for comparing version numbers.
+m4_defun([_AS_VERSION_COMPARE_PREPARE],
+[[as_awk_strverscmp='
+  # Use only awk features that work with 7th edition Unix awk (1978).
+  # My, what an old awk you have, Mr. Solaris!
+  END {
+    while (length(v1) && length(v2)) {
+      # Set d1 to be the next thing to compare from v1, and likewise for d2.
+      # Normally this is a single character, but if v1 and v2 contain digits,
+      # compare them as integers and fractions as strverscmp does.
+      if (v1 ~ /^[0-9]/ && v2 ~ /^[0-9]/) {
+	# Split v1 and v2 into their leading digit string components d1 and d2,
+	# and advance v1 and v2 past the leading digit strings.
+	for (len1 = 1; substr(v1, len1 + 1) ~ /^[0-9]/; len1++) continue
+	for (len2 = 1; substr(v2, len2 + 1) ~ /^[0-9]/; len2++) continue
+	d1 = substr(v1, 1, len1); v1 = substr(v1, len1 + 1)
+	d2 = substr(v2, 1, len2); v2 = substr(v2, len2 + 1)
+	if (d1 ~ /^0/) {
+	  if (d2 ~ /^0/) {
+	    # Compare two fractions.
+	    while (d1 ~ /^0/ && d2 ~ /^0/) {
+	      d1 = substr(d1, 2); len1--
+	      d2 = substr(d2, 2); len2--
+	    }
+	    if (len1 != len2 && ! (len1 && len2 && substr(d1, 1, 1) == substr(d2, 1, 1))) {
+	      # The two components differ in length, and the common prefix
+	      # contains only leading zeros.  Consider the longer to be less.
+	      d1 = -len1
+	      d2 = -len2
+	    } else {
+	      # Otherwise, compare as strings.
+	      d1 = "x" d1
+	      d2 = "x" d2
+	    }
+	  } else {
+	    # A fraction is less than an integer.
+	    exit 1
+	  }
+	} else {
+	  if (d2 ~ /^0/) {
+	    # An integer is greater than a fraction.
+	    exit 2
+	  } else {
+	    # Compare two integers.
+	    d1 += 0
+	    d2 += 0
+	  }
+	}
+      } else {
+	# The normal case, without worrying about digits.
+	d1 = substr(v1, 1, 1); v1 = substr(v1, 2)
+	d2 = substr(v2, 1, 1); v2 = substr(v2, 2)
+      }
+      if (d1 < d2) exit 1
+      if (d1 > d2) exit 2
+    }
+    # Beware Solaris /usr/xgp4/bin/awk (at least through Solaris 10),
+    # which mishandles some comparisons of empty strings to integers.
+    if (length(v2)) exit 1
+    if (length(v1)) exit 2
+  }
+']])# _AS_VERSION_COMPARE_PREPARE
+
+
+# AS_VERSION_COMPARE(VERSION-1, VERSION-2,
+#                    [ACTION-IF-LESS], [ACTION-IF-EQUAL], [ACTION-IF-GREATER])
+# ----------------------------------------------------------------------------
+# Compare two strings possibly containing shell variables as version strings.
+#
+# This usage is portable even to ancient awk,
+# so don't worry about finding a "nice" awk version.
+m4_defun_init([AS_VERSION_COMPARE],
+[AS_REQUIRE([_$0_PREPARE])],
+[as_arg_v1=$1
+as_arg_v2=$2
+awk "$as_awk_strverscmp" v1="$as_arg_v1" v2="$as_arg_v2" /dev/null
+AS_CASE([$?],
+	[1], [$3],
+	[0], [$4],
+	[2], [$5])])# AS_VERSION_COMPARE
+
+
+
+## --------------------------------------- ##
+## 6. Common m4/sh character translation.  ##
+## --------------------------------------- ##
+
+# The point of this section is to provide high level macros comparable
+# to m4's `translit' primitive, but m4/sh polymorphic.
+# Transliteration of literal strings should be handled by m4, while
+# shell variables' content will be translated at runtime (tr or sed).
+
+
+# _AS_CR_PREPARE
+# --------------
+# Output variables defining common character ranges.
+# See m4_cr_letters etc.
+m4_defun([_AS_CR_PREPARE],
+[# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+])
+
+
+# _AS_TR_SH_PREPARE
+# -----------------
+m4_defun([_AS_TR_SH_PREPARE],
+[AS_REQUIRE([_AS_CR_PREPARE])]dnl
+[# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[[^_$as_cr_alnum]]%_%g'"
+])
+
+
+# AS_TR_SH(EXPRESSION)
+# --------------------
+# Transform EXPRESSION into a valid shell variable name.
+# sh/m4 polymorphic.
+# Be sure to update the definition of `$as_tr_sh' if you change this.
+#
+# AS_LITERAL_IF guarantees that a literal does not have any nested quotes,
+# once $1 is expanded.  m4_translit silently uses only the first occurrence
+# of a character that appears multiple times in argument 2, since we know
+# that m4_cr_not_symbols2 also contains [ and ].  m4_translit also silently
+# ignores characters in argument 3 that do not match argument 2; we use this
+# fact to skip worrying about the length of m4_cr_not_symbols2.
+#
+# For speed, we inline the literal definitions that can be computed up front.
+m4_defun_init([AS_TR_SH],
+[AS_REQUIRE([_$0_PREPARE])],
+[_$0(m4_expand([$1]))])
+
+m4_define([_AS_TR_SH],
+[_AS_LITERAL_IF([$1], [*][	 ][
+])([], [$0_INDIR], [$0_LITERAL])([$1])])
+
+m4_define([_AS_TR_SH_LITERAL],
+[m4_translit([[$1]],
+  [*+[]]]m4_dquote(m4_defn([m4_cr_not_symbols2]))[,
+  [pp[]]]m4_dquote(m4_for(,1,255,,[[_]]))[)])
+
+m4_define([_AS_TR_SH_INDIR],
+[`AS_ECHO(["_AS_ESCAPE([[$1]], [`], [\])"]) | $as_tr_sh`])
+
+
+# _AS_TR_CPP_PREPARE
+# ------------------
+m4_defun([_AS_TR_CPP_PREPARE],
+[AS_REQUIRE([_AS_CR_PREPARE])]dnl
+[# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[[^_$as_cr_alnum]]%_%g'"
+])
+
+
+# AS_TR_CPP(EXPRESSION)
+# ---------------------
+# Map EXPRESSION to an upper case string which is valid as rhs for a
+# `#define'.  sh/m4 polymorphic.  Be sure to update the definition
+# of `$as_tr_cpp' if you change this.
+#
+# See implementation comments in AS_TR_SH.
+m4_defun_init([AS_TR_CPP],
+[AS_REQUIRE([_$0_PREPARE])],
+[_$0(m4_expand([$1]))])
+
+m4_define([_AS_TR_CPP],
+[_AS_LITERAL_IF([$1], [*][	 ][
+])([], [$0_INDIR], [$0_LITERAL])([$1])])
+
+m4_define([_AS_TR_CPP_LITERAL],
+[m4_translit([[$1]],
+  [*[]]]m4_dquote(m4_defn([m4_cr_letters])m4_defn([m4_cr_not_symbols2]))[,
+  [P[]]]m4_dquote(m4_defn([m4_cr_LETTERS])m4_for(,1,255,,[[_]]))[)])
+
+m4_define([_AS_TR_CPP_INDIR],
+[`AS_ECHO(["_AS_ESCAPE([[$1]], [`], [\])"]) | $as_tr_cpp`])
+
+
+# _AS_TR_PREPARE
+# --------------
+m4_defun([_AS_TR_PREPARE],
+[AS_REQUIRE([_AS_TR_SH_PREPARE])AS_REQUIRE([_AS_TR_CPP_PREPARE])])
+
+
+
+
+## ------------------------------------------------------ ##
+## 7. Common m4/sh handling of variables (indirections).  ##
+## ------------------------------------------------------ ##
+
+
+# The purpose of this section is to provide a uniform API for
+# reading/setting sh variables with or without indirection.
+# Typically, one can write
+#   AS_VAR_SET(var, val)
+# or
+#   AS_VAR_SET(as_$var, val)
+# and expect the right thing to happen.  In the descriptions below,
+# a literal name matches the regex [a-zA-Z_][a-zA-Z0-9_]*, an
+# indirect name is a shell expression that produces a literal name
+# when passed through eval, and a polymorphic name is either type.
+
+
+# _AS_VAR_APPEND_PREPARE
+# ----------------------
+# Define as_fn_append to the optimum definition for the current
+# shell (bash and zsh provide the += assignment operator to avoid
+# quadratic append growth over repeated appends).
+m4_defun([_AS_VAR_APPEND_PREPARE],
+[AS_FUNCTION_DESCRIBE([as_fn_append], [VAR VALUE],
+[Append the text in VALUE to the end of the definition contained in
+VAR.  Take advantage of any shell optimizations that allow amortized
+linear growth over repeated appends, instead of the typical quadratic
+growth present in naive implementations.])
+AS_IF([_AS_RUN(["AS_ESCAPE(m4_quote(_AS_VAR_APPEND_WORKS))"])],
+[eval 'as_fn_append ()
+  {
+    eval $[]1+=\$[]2
+  }'],
+[as_fn_append ()
+  {
+    eval $[]1=\$$[]1\$[]2
+  }]) # as_fn_append
+])
+
+# _AS_VAR_APPEND_WORKS
+# --------------------
+# Output a shell test to discover whether += works.
+m4_define([_AS_VAR_APPEND_WORKS],
+[as_var=1; as_var+=2; test x$as_var = x12])
+
+# AS_VAR_APPEND(VAR, VALUE)
+# -------------------------
+# Append the shell expansion of VALUE to the end of the existing
+# contents of the polymorphic shell variable VAR, taking advantage of
+# any shell optimizations that allow repeated appends to result in
+# amortized linear scaling rather than quadratic behavior.  This macro
+# is not worth the overhead unless the expected final size of the
+# contents of VAR outweigh the typical VALUE size of repeated appends.
+# Note that unlike AS_VAR_SET, VALUE must be properly quoted to avoid
+# field splitting and file name expansion.
+m4_defun_init([AS_VAR_APPEND],
+[AS_REQUIRE([_AS_VAR_APPEND_PREPARE], [], [M4SH-INIT-FN])],
+[as_fn_append $1 $2])
+
+
+# _AS_VAR_ARITH_PREPARE
+# ---------------------
+# Define as_fn_arith to the optimum definition for the current
+# shell (using POSIX $(()) where supported).
+m4_defun([_AS_VAR_ARITH_PREPARE],
+[AS_FUNCTION_DESCRIBE([as_fn_arith], [ARG...],
+[Perform arithmetic evaluation on the ARGs, and store the result in
+the global $as_val.  Take advantage of shells that can avoid forks.
+The arguments must be portable across $(()) and expr.])
+AS_IF([_AS_RUN(["AS_ESCAPE(m4_quote(_AS_VAR_ARITH_WORKS))"])],
+[eval 'as_fn_arith ()
+  {
+    as_val=$(( $[]* ))
+  }'],
+[as_fn_arith ()
+  {
+    as_val=`expr "$[]@" || test $? -eq 1`
+  }]) # as_fn_arith
+])
+
+# _AS_VAR_ARITH_WORKS
+# -------------------
+# Output a shell test to discover whether $(()) works.
+m4_define([_AS_VAR_ARITH_WORKS],
+[test $(( 1 + 1 )) = 2])
+
+# AS_VAR_ARITH(VAR, EXPR)
+# -----------------------
+# Perform the arithmetic evaluation of the arguments in EXPR, and set
+# contents of the polymorphic shell variable VAR to the result, taking
+# advantage of any shell optimizations that perform arithmetic without
+# forks.  Note that numbers occuring within EXPR must be written in
+# decimal, and without leading zeroes; variables containing numbers
+# must be expanded prior to arithmetic evaluation; the first argument
+# must not be a negative number; there is no portable equality
+# operator; and operators must be given as separate arguments and
+# properly quoted.
+m4_defun_init([AS_VAR_ARITH],
+[_AS_DETECT_SUGGESTED([_AS_VAR_ARITH_WORKS])]dnl
+[AS_REQUIRE([_AS_VAR_ARITH_PREPARE], [], [M4SH-INIT-FN])],
+[as_fn_arith $2 && AS_VAR_SET([$1], [$as_val])])
+
+
+# AS_VAR_COPY(DEST, SOURCE)
+# -------------------------
+# Set the polymorphic shell variable DEST to the contents of the polymorphic
+# shell variable SOURCE.
+m4_define([AS_VAR_COPY],
+[AS_LITERAL_WORD_IF([$1[]$2], [$1=$$2], [eval $1=\$$2])])
+
+
+# AS_VAR_GET(VARIABLE)
+# --------------------
+# Get the value of the shell VARIABLE.
+# Evaluates to $VARIABLE if there is no indirection in VARIABLE,
+# else to the appropriate `eval' sequence.
+# This macro is deprecated because it sometimes mishandles trailing newlines;
+# use AS_VAR_COPY instead.
+m4_define([AS_VAR_GET],
+[AS_LITERAL_WORD_IF([$1],
+	       [$$1],
+  [`eval 'as_val=${'_AS_ESCAPE([[$1]], [`], [\])'};AS_ECHO(["$as_val"])'`])])
+
+
+# AS_VAR_IF(VARIABLE, VALUE, IF-TRUE, IF-FALSE)
+# ---------------------------------------------
+# Implement a shell `if test $VARIABLE = VALUE; then-else'.
+# Polymorphic, and avoids sh expansion error upon interrupt or term signal.
+m4_define([AS_VAR_IF],
+[AS_LITERAL_WORD_IF([$1],
+  [AS_IF(m4_ifval([$2], [[test "x$$1" = x[]$2]], [[${$1:+false} :]])],
+  [AS_VAR_COPY([as_val], [$1])
+   AS_IF(m4_ifval([$2], [[test "x$as_val" = x[]$2]], [[${as_val:+false} :]])],
+  [AS_IF(m4_ifval([$2],
+    [[eval test \"x\$"$1"\" = x"_AS_ESCAPE([$2], [`], [\"$])"]],
+    [[eval \${$1:+false} :]])]),
+[$3], [$4])])
+
+
+# AS_VAR_PUSHDEF and AS_VAR_POPDEF
+# --------------------------------
+#
+
+# Sometimes we may have to handle literals (e.g. `stdlib.h'), while at
+# other moments, the same code may have to get the value from a
+# variable (e.g., `ac_header').  To have a uniform handling of both
+# cases, when a new value is about to be processed, declare a local
+# variable, e.g.:
+#
+#   AS_VAR_PUSHDEF([header], [ac_cv_header_$1])
+#
+# and then in the body of the macro, use `header' as is.  It is of
+# first importance to use `AS_VAR_*' to access this variable.
+#
+# If the value `$1' was a literal (e.g. `stdlib.h'), then `header' is
+# in fact the value `ac_cv_header_stdlib_h'.  If `$1' was indirect,
+# then `header's value in m4 is in fact `$as_header', the shell
+# variable that holds all of the magic to get the expansion right.
+#
+# At the end of the block, free the variable with
+#
+#   AS_VAR_POPDEF([header])
+
+
+# AS_VAR_POPDEF(VARNAME)
+# ----------------------
+# Free the shell variable accessor VARNAME.  To be dnl'ed.
+m4_define([AS_VAR_POPDEF],
+[m4_popdef([$1])])
+
+
+# AS_VAR_PUSHDEF(VARNAME, VALUE)
+# ------------------------------
+# Define the m4 macro VARNAME to an accessor to the shell variable
+# named VALUE.  VALUE does not need to be a valid shell variable name:
+# the transliteration is handled here.  To be dnl'ed.
+#
+# AS_TR_SH attempts to play with diversions if _AS_TR_SH_PREPARE has
+# not been expanded.  However, users are expected to do subsequent
+# calls that trigger AS_LITERAL_IF([VARNAME]), and that macro performs
+# expansion inside an argument collection context, where diversions
+# don't work.  Therefore, we must require the preparation ourselves.
+m4_defun_init([AS_VAR_PUSHDEF],
+[AS_REQUIRE([_AS_TR_SH_PREPARE])],
+[_$0([$1], m4_expand([$2]))])
+
+m4_define([_AS_VAR_PUSHDEF],
+[_AS_LITERAL_IF([$2], [	 ][
+])([], [as_$1=_AS_TR_SH_INDIR([$2])
+m4_pushdef([$1], [$as_[$1]])],
+[m4_pushdef([$1], [_AS_TR_SH_LITERAL([$2])])])])
+
+
+# AS_VAR_SET(VARIABLE, VALUE)
+# ---------------------------
+# Set the contents of the polymorphic shell VARIABLE to the shell
+# expansion of VALUE.  VALUE is immune to field splitting and file
+# name expansion.
+m4_define([AS_VAR_SET],
+[AS_LITERAL_WORD_IF([$1],
+	       [$1=$2],
+	       [eval "$1=_AS_ESCAPE([$2], [`], [\"$])"])])
+
+
+# AS_VAR_SET_IF(VARIABLE, IF-TRUE, IF-FALSE)
+# ------------------------------------------
+# Implement a shell `if-then-else' depending whether VARIABLE is set
+# or not.  Polymorphic.
+m4_define([AS_VAR_SET_IF],
+[AS_IF([AS_VAR_TEST_SET([$1])], [$2], [$3])])
+
+
+# AS_VAR_TEST_SET(VARIABLE)
+# -------------------------
+# Expands into an expression which is true if VARIABLE
+# is set.  Polymorphic.
+m4_define([AS_VAR_TEST_SET],
+[AS_LITERAL_WORD_IF([$1],
+  [${$1+:} false],
+  [{ as_var=$1; eval \${$as_var+:} false; }],
+  [eval \${$1+:} false])])
+
+
+## -------------------- ##
+## 8. Setting M4sh up.  ##
+## -------------------- ##
+
+
+# AS_INIT_GENERATED(FILE, [COMMENT])
+# ----------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_defun([AS_INIT_GENERATED],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[as_write_fail=0
+cat >$1 <<_ASEOF || as_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || as_write_fail=1
+_AS_SHELL_SANITIZE
+_AS_PREPARE
+m4_if(AS_MESSAGE_FD, [1], [], [exec AS_MESSAGE_FD>&1
+])]dnl
+[m4_text_box([Main body of $1 script.])
+_ASEOF
+test $as_write_fail = 0 && chmod +x $1[]dnl
+_m4_popdef([AS_MESSAGE_LOG_FD])])# AS_INIT_GENERATED
+
+
+# AS_INIT
+# -------
+# Initialize m4sh.
+m4_define([AS_INIT],
+[# Wrap our cleanup prior to m4sugar's cleanup.
+m4_wrap([_AS_CLEANUP])
+m4_init
+m4_provide([AS_INIT])
+
+# Forbidden tokens and exceptions.
+m4_pattern_forbid([^_?AS_])
+
+# Bangshe and minimal initialization.
+m4_divert_text([BINSH], [@%:@! /bin/sh])
+m4_divert_text([HEADER-COMMENT],
+	       [@%:@ Generated from __file__ by m4_PACKAGE_STRING.])
+m4_divert_text([M4SH-SANITIZE], [_AS_SHELL_SANITIZE])
+m4_divert_text([M4SH-INIT-FN], [m4_text_box([M4sh Shell Functions.])])
+
+# Let's go!
+m4_divert([BODY])dnl
+m4_text_box([Main body of script.])
+_AS_DETECT_REQUIRED([_AS_SHELL_FN_WORK])dnl
+AS_REQUIRE([_AS_UNSET_PREPARE], [], [M4SH-INIT-FN])dnl
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4f b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4f
new file mode 100644
index 0000000..8fad3ac
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sh.m4f
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4
new file mode 100644
index 0000000..282cd79
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4
@@ -0,0 +1,3301 @@
+divert(-1)#                                                  -*- Autoconf -*-
+# This file is part of Autoconf.
+# Base M4 layer.
+# Requires GNU M4.
+#
+# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
+# 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This file is part of Autoconf.  This program is free
+# software; you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the Autoconf Configure Script Exception,
+# version 3.0, as published by the Free Software Foundation.
+#
+# You should have received a copy of the GNU General Public License
+# and a copy of the Autoconf Configure Script Exception along with
+# this program; see the files COPYINGv3 and COPYING.EXCEPTION
+# respectively.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Akim Demaille.
+
+# Set the quotes, whatever the current quoting system.
+changequote()
+changequote([, ])
+
+# Some old m4's don't support m4exit.  But they provide
+# equivalent functionality by core dumping because of the
+# long macros we define.
+ifdef([__gnu__], ,
+[errprint(M4sugar requires GNU M4. Install it before installing M4sugar or
+set the M4 environment variable to its absolute file name.)
+m4exit(2)])
+
+
+## ------------------------------- ##
+## 1. Simulate --prefix-builtins.  ##
+## ------------------------------- ##
+
+# m4_define
+# m4_defn
+# m4_undefine
+define([m4_define],   defn([define]))
+define([m4_defn],     defn([defn]))
+define([m4_undefine], defn([undefine]))
+
+m4_undefine([define])
+m4_undefine([defn])
+m4_undefine([undefine])
+
+
+# m4_copy(SRC, DST)
+# -----------------
+# Define DST as the definition of SRC.
+# What's the difference between:
+# 1. m4_copy([from], [to])
+# 2. m4_define([to], [from($@)])
+# Well, obviously 1 is more expensive in space.  Maybe 2 is more expensive
+# in time, but because of the space cost of 1, it's not that obvious.
+# Nevertheless, one huge difference is the handling of `$0'.  If `from'
+# uses `$0', then with 1, `to''s `$0' is `to', while it is `from' in 2.
+# The user would certainly prefer to see `to'.
+#
+# This definition is in effect during m4sugar initialization, when
+# there are no pushdef stacks; later on, we redefine it to something
+# more powerful for all other clients to use.
+m4_define([m4_copy],
+[m4_define([$2], m4_defn([$1]))])
+
+
+# m4_rename(SRC, DST)
+# -------------------
+# Rename the macro SRC to DST.
+m4_define([m4_rename],
+[m4_copy([$1], [$2])m4_undefine([$1])])
+
+
+# m4_rename_m4(MACRO-NAME)
+# ------------------------
+# Rename MACRO-NAME to m4_MACRO-NAME.
+m4_define([m4_rename_m4],
+[m4_rename([$1], [m4_$1])])
+
+
+# m4_copy_unm4(m4_MACRO-NAME)
+# ---------------------------
+# Copy m4_MACRO-NAME to MACRO-NAME.
+m4_define([m4_copy_unm4],
+[m4_copy([$1], m4_bpatsubst([$1], [^m4_\(.*\)], [[\1]]))])
+
+
+# Some m4 internals have names colliding with tokens we might use.
+# Rename them a` la `m4 --prefix-builtins'.  Conditionals first, since
+# some subsequent renames are conditional.
+m4_rename_m4([ifdef])
+m4_rename([ifelse], [m4_if])
+
+m4_rename_m4([builtin])
+m4_rename_m4([changecom])
+m4_rename_m4([changequote])
+m4_ifdef([changeword],dnl conditionally available in 1.4.x
+[m4_undefine([changeword])])
+m4_rename_m4([debugfile])
+m4_rename_m4([debugmode])
+m4_rename_m4([decr])
+m4_rename_m4([divnum])
+m4_rename_m4([dumpdef])
+m4_rename_m4([errprint])
+m4_rename_m4([esyscmd])
+m4_rename_m4([eval])
+m4_rename_m4([format])
+m4_undefine([include])
+m4_rename_m4([incr])
+m4_rename_m4([index])
+m4_rename_m4([indir])
+m4_rename_m4([len])
+m4_rename([m4exit], [m4_exit])
+m4_undefine([m4wrap])
+m4_ifdef([mkstemp],dnl added in M4 1.4.8
+[m4_rename_m4([mkstemp])
+m4_copy([m4_mkstemp], [m4_maketemp])
+m4_undefine([maketemp])],
+[m4_rename_m4([maketemp])
+m4_copy([m4_maketemp], [m4_mkstemp])])
+m4_rename([patsubst], [m4_bpatsubst])
+m4_rename_m4([popdef])
+m4_rename_m4([pushdef])
+m4_rename([regexp], [m4_bregexp])
+m4_rename_m4([shift])
+m4_undefine([sinclude])
+m4_rename_m4([substr])
+m4_ifdef([symbols],dnl present only in alpha-quality 1.4o
+[m4_rename_m4([symbols])])
+m4_rename_m4([syscmd])
+m4_rename_m4([sysval])
+m4_rename_m4([traceoff])
+m4_rename_m4([traceon])
+m4_rename_m4([translit])
+
+# _m4_defn(ARG)
+# -------------
+# _m4_defn is for internal use only - it bypasses the wrapper, so it
+# must only be used on one argument at a time, and only on macros
+# known to be defined.  Make sure this still works if the user renames
+# m4_defn but not _m4_defn.
+m4_copy([m4_defn], [_m4_defn])
+
+# _m4_divert_raw(NUM)
+# -------------------
+# _m4_divert_raw is for internal use only.  Use this instead of
+# m4_builtin([divert], NUM), so that tracing diversion flow is easier.
+m4_rename([divert], [_m4_divert_raw])
+
+# _m4_popdef(ARG...)
+# ------------------
+# _m4_popdef is for internal use only - it bypasses the wrapper, so it
+# must only be used on macros known to be defined.  Make sure this
+# still works if the user renames m4_popdef but not _m4_popdef.
+m4_copy([m4_popdef], [_m4_popdef])
+
+# _m4_undefine(ARG...)
+# --------------------
+# _m4_undefine is for internal use only - it bypasses the wrapper, so
+# it must only be used on macros known to be defined.  Make sure this
+# still works if the user renames m4_undefine but not _m4_undefine.
+m4_copy([m4_undefine], [_m4_undefine])
+
+# _m4_undivert(NUM...)
+# --------------------
+# _m4_undivert is for internal use only, and should always be given
+# arguments.  Use this instead of m4_builtin([undivert], NUM...), so
+# that tracing diversion flow is easier.
+m4_rename([undivert], [_m4_undivert])
+
+
+## ------------------- ##
+## 2. Error messages.  ##
+## ------------------- ##
+
+
+# m4_location
+# -----------
+# Output the current file, colon, and the current line number.
+m4_define([m4_location],
+[__file__:__line__])
+
+
+# m4_errprintn(MSG)
+# -----------------
+# Same as `errprint', but with the missing end of line.
+m4_define([m4_errprintn],
+[m4_errprint([$1
+])])
+
+
+# m4_warning(MSG)
+# ---------------
+# Warn the user.
+m4_define([m4_warning],
+[m4_errprintn(m4_location[: warning: $1])])
+
+
+# m4_fatal(MSG, [EXIT-STATUS])
+# ----------------------------
+# Fatal the user.                                                      :)
+m4_define([m4_fatal],
+[m4_errprintn(m4_location[: error: $1]
+m4_expansion_stack)m4_exit(m4_if([$2],, 1, [$2]))])
+
+
+# m4_assert(EXPRESSION, [EXIT-STATUS = 1])
+# ----------------------------------------
+# This macro ensures that EXPRESSION evaluates to true, and exits if
+# EXPRESSION evaluates to false.
+m4_define([m4_assert],
+[m4_if(m4_eval([$1]), 0,
+       [m4_fatal([assert failed: $1], [$2])])])
+
+
+
+## ------------- ##
+## 3. Warnings.  ##
+## ------------- ##
+
+
+# _m4_warn(CATEGORY, MESSAGE, [STACK-TRACE])
+# ------------------------------------------
+# Report a MESSAGE to the user if the CATEGORY of warnings is enabled.
+# This is for traces only.
+# If present, STACK-TRACE is a \n-separated list of "LOCATION: MESSAGE",
+# where the last line (and no other) ends with "the top level".
+#
+# Within m4, the macro is a no-op.  This macro really matters
+# when autom4te post-processes the trace output.
+m4_define([_m4_warn], [])
+
+
+# m4_warn(CATEGORY, MESSAGE)
+# --------------------------
+# Report a MESSAGE to the user if the CATEGORY of warnings is enabled.
+m4_define([m4_warn],
+[_m4_warn([$1], [$2],
+m4_ifdef([_m4_expansion_stack], [m4_expansion_stack]))])
+
+
+
+## ------------------- ##
+## 4. File inclusion.  ##
+## ------------------- ##
+
+
+# We also want to neutralize include (and sinclude for symmetry),
+# but we want to extend them slightly: warn when a file is included
+# several times.  This is, in general, a dangerous operation, because
+# too many people forget to quote the first argument of m4_define.
+#
+# For instance in the following case:
+#   m4_define(foo, [bar])
+# then a second reading will turn into
+#   m4_define(bar, [bar])
+# which is certainly not what was meant.
+
+# m4_include_unique(FILE)
+# -----------------------
+# Declare that the FILE was loading; and warn if it has already
+# been included.
+m4_define([m4_include_unique],
+[m4_ifdef([m4_include($1)],
+	  [m4_warn([syntax], [file `$1' included several times])])dnl
+m4_define([m4_include($1)])])
+
+
+# m4_include(FILE)
+# ----------------
+# Like the builtin include, but warns against multiple inclusions.
+m4_define([m4_include],
+[m4_include_unique([$1])dnl
+m4_builtin([include], [$1])])
+
+
+# m4_sinclude(FILE)
+# -----------------
+# Like the builtin sinclude, but warns against multiple inclusions.
+m4_define([m4_sinclude],
+[m4_include_unique([$1])dnl
+m4_builtin([sinclude], [$1])])
+
+
+
+## ------------------------------------ ##
+## 5. Additional branching constructs.  ##
+## ------------------------------------ ##
+
+# Both `m4_ifval' and `m4_ifset' tests against the empty string.  The
+# difference is that `m4_ifset' is specialized on macros.
+#
+# In case of arguments of macros, eg. $1, it makes little difference.
+# In the case of a macro `FOO', you don't want to check `m4_ifval(FOO,
+# TRUE)', because if `FOO' expands with commas, there is a shifting of
+# the arguments.  So you want to run `m4_ifval([FOO])', but then you just
+# compare the *string* `FOO' against `', which, of course fails.
+#
+# So you want the variation `m4_ifset' that expects a macro name as $1.
+# If this macro is both defined and defined to a non empty value, then
+# it runs TRUE, etc.
+
+
+# m4_ifblank(COND, [IF-BLANK], [IF-TEXT])
+# m4_ifnblank(COND, [IF-TEXT], [IF-BLANK])
+# ----------------------------------------
+# If COND is empty, or consists only of blanks (space, tab, newline),
+# then expand IF-BLANK, otherwise expand IF-TEXT.  This differs from
+# m4_ifval only if COND has just whitespace, but it helps optimize in
+# spite of users who mistakenly leave trailing space after what they
+# thought was an empty argument:
+#   macro(
+#         []
+#        )
+#
+# Writing one macro in terms of the other causes extra overhead, so
+# we inline both definitions.
+m4_define([m4_ifblank],
+[m4_if(m4_translit([[$1]],  [ ][	][
+]), [], [$2], [$3])])
+
+m4_define([m4_ifnblank],
+[m4_if(m4_translit([[$1]],  [ ][	][
+]), [], [$3], [$2])])
+
+
+# m4_ifval(COND, [IF-TRUE], [IF-FALSE])
+# -------------------------------------
+# If COND is not the empty string, expand IF-TRUE, otherwise IF-FALSE.
+# Comparable to m4_ifdef.
+m4_define([m4_ifval],
+[m4_if([$1], [], [$3], [$2])])
+
+
+# m4_n(TEXT)
+# ----------
+# If TEXT is not empty, return TEXT and a new line, otherwise nothing.
+m4_define([m4_n],
+[m4_if([$1],
+       [], [],
+	   [$1
+])])
+
+
+# m4_ifvaln(COND, [IF-TRUE], [IF-FALSE])
+# --------------------------------------
+# Same as `m4_ifval', but add an extra newline to IF-TRUE or IF-FALSE
+# unless that argument is empty.
+m4_define([m4_ifvaln],
+[m4_if([$1],
+       [],   [m4_n([$3])],
+	     [m4_n([$2])])])
+
+
+# m4_ifset(MACRO, [IF-TRUE], [IF-FALSE])
+# --------------------------------------
+# If MACRO has no definition, or of its definition is the empty string,
+# expand IF-FALSE, otherwise IF-TRUE.
+m4_define([m4_ifset],
+[m4_ifdef([$1],
+	  [m4_ifval(_m4_defn([$1]), [$2], [$3])],
+	  [$3])])
+
+
+# m4_ifndef(NAME, [IF-NOT-DEFINED], [IF-DEFINED])
+# -----------------------------------------------
+m4_define([m4_ifndef],
+[m4_ifdef([$1], [$3], [$2])])
+
+
+# m4_case(SWITCH, VAL1, IF-VAL1, VAL2, IF-VAL2, ..., DEFAULT)
+# -----------------------------------------------------------
+# m4 equivalent of
+# switch (SWITCH)
+# {
+#   case VAL1:
+#     IF-VAL1;
+#     break;
+#   case VAL2:
+#     IF-VAL2;
+#     break;
+#   ...
+#   default:
+#     DEFAULT;
+#     break;
+# }.
+# All the values are optional, and the macro is robust to active
+# symbols properly quoted.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_case],
+[m4_if([$#], 0, [],
+       [$#], 1, [],
+       [$#], 2, [$2],
+       [$1], [$2], [$3],
+       [$0([$1], m4_shift3($@))])])
+
+
+# m4_bmatch(SWITCH, RE1, VAL1, RE2, VAL2, ..., DEFAULT)
+# -----------------------------------------------------
+# m4 equivalent of
+#
+# if (SWITCH =~ RE1)
+#   VAL1;
+# elif (SWITCH =~ RE2)
+#   VAL2;
+# elif ...
+#   ...
+# else
+#   DEFAULT
+#
+# All the values are optional, and the macro is robust to active symbols
+# properly quoted.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_bmatch],
+[m4_if([$#], 0, [m4_fatal([$0: too few arguments: $#])],
+       [$#], 1, [m4_fatal([$0: too few arguments: $#: $1])],
+       [$#], 2, [$2],
+       [m4_if(m4_bregexp([$1], [$2]), -1, [$0([$1], m4_shift3($@))],
+	      [$3])])])
+
+# m4_argn(N, ARGS...)
+# -------------------
+# Extract argument N (greater than 0) from ARGS.  Example:
+#   m4_define([b], [B])
+#   m4_argn([2], [a], [b], [c]) => b
+#
+# Rather than using m4_car(m4_shiftn([$1], $@)), we exploit the fact that
+# GNU m4 can directly reference any argument, through an indirect macro.
+m4_define([m4_argn],
+[m4_assert([0 < $1])]dnl
+[m4_pushdef([_$0], [_m4_popdef([_$0])]m4_dquote([$]m4_incr([$1])))_$0($@)])
+
+
+# m4_car(ARGS...)
+# m4_cdr(ARGS...)
+# ---------------
+# Manipulate m4 lists.  m4_car returns the first argument.  m4_cdr
+# bundles all but the first argument into a quoted list.  These two
+# macros are generally used with list arguments, with quoting removed
+# to break the list into multiple m4 ARGS.
+m4_define([m4_car], [[$1]])
+m4_define([m4_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+
+# _m4_cdr(ARGS...)
+# ----------------
+# Like m4_cdr, except include a leading comma unless only one argument
+# remains.  Why?  Because comparing a large list against [] is more
+# expensive in expansion time than comparing the number of arguments; so
+# _m4_cdr can be used to reduce the number of arguments when it is time
+# to end recursion.
+m4_define([_m4_cdr],
+[m4_if([$#], 1, [],
+       [, m4_dquote(m4_shift($@))])])
+
+
+
+# m4_cond(TEST1, VAL1, IF-VAL1, TEST2, VAL2, IF-VAL2, ..., [DEFAULT])
+# -------------------------------------------------------------------
+# Similar to m4_if, except that each TEST is expanded when encountered.
+# If the expansion of TESTn matches the string VALn, the result is IF-VALn.
+# The result is DEFAULT if no tests passed.  This macro allows
+# short-circuiting of expensive tests, where it pays to arrange quick
+# filter tests to run first.
+#
+# For an example, consider a previous implementation of _AS_QUOTE_IFELSE:
+#
+#    m4_if(m4_index([$1], [\]), [-1], [$2],
+#          m4_eval(m4_index([$1], [\\]) >= 0), [1], [$2],
+#          m4_eval(m4_index([$1], [\$]) >= 0), [1], [$2],
+#          m4_eval(m4_index([$1], [\`]) >= 0), [1], [$3],
+#          m4_eval(m4_index([$1], [\"]) >= 0), [1], [$3],
+#          [$2])
+#
+# Here, m4_index is computed 5 times, and m4_eval 4, even if $1 contains
+# no backslash.  It is more efficient to do:
+#
+#    m4_cond([m4_index([$1], [\])], [-1], [$2],
+#            [m4_eval(m4_index([$1], [\\]) >= 0)], [1], [$2],
+#            [m4_eval(m4_index([$1], [\$]) >= 0)], [1], [$2],
+#            [m4_eval(m4_index([$1], [\`]) >= 0)], [1], [$3],
+#            [m4_eval(m4_index([$1], [\"]) >= 0)], [1], [$3],
+#            [$2])
+#
+# In the common case of $1 with no backslash, only one m4_index expansion
+# occurs, and m4_eval is avoided altogether.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_cond],
+[m4_if([$#], [0], [m4_fatal([$0: cannot be called without arguments])],
+       [$#], [1], [$1],
+       m4_eval([$# % 3]), [2], [m4_fatal([$0: missing an argument])],
+       [_$0($@)])])
+
+m4_define([_m4_cond],
+[m4_if(($1), [($2)], [$3],
+       [$#], [3], [],
+       [$#], [4], [$4],
+       [$0(m4_shift3($@))])])
+
+
+## ---------------------------------------- ##
+## 6. Enhanced version of some primitives.  ##
+## ---------------------------------------- ##
+
+# m4_bpatsubsts(STRING, RE1, SUBST1, RE2, SUBST2, ...)
+# ----------------------------------------------------
+# m4 equivalent of
+#
+#   $_ = STRING;
+#   s/RE1/SUBST1/g;
+#   s/RE2/SUBST2/g;
+#   ...
+#
+# All the values are optional, and the macro is robust to active symbols
+# properly quoted.
+#
+# I would have liked to name this macro `m4_bpatsubst', unfortunately,
+# due to quotation problems, I need to double quote $1 below, therefore
+# the anchors are broken :(  I can't let users be trapped by that.
+#
+# Recall that m4_shift3 always results in an argument.  Hence, we need
+# to distinguish between a final deletion vs. ending recursion.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_bpatsubsts],
+[m4_if([$#], 0, [m4_fatal([$0: too few arguments: $#])],
+       [$#], 1, [m4_fatal([$0: too few arguments: $#: $1])],
+       [$#], 2, [m4_unquote(m4_builtin([patsubst], [[$1]], [$2]))],
+       [$#], 3, [m4_unquote(m4_builtin([patsubst], [[$1]], [$2], [$3]))],
+       [_$0($@m4_if(m4_eval($# & 1), 0, [,]))])])
+m4_define([_m4_bpatsubsts],
+[m4_if([$#], 2, [$1],
+       [$0(m4_builtin([patsubst], [[$1]], [$2], [$3]),
+	   m4_shift3($@))])])
+
+
+# m4_copy(SRC, DST)
+# -----------------
+# Define the pushdef stack DST as a copy of the pushdef stack SRC;
+# give an error if DST is already defined.  This is particularly nice
+# for copying self-modifying pushdef stacks, where the top definition
+# includes one-shot initialization that is later popped to the normal
+# definition.  This version intentionally does nothing if SRC is
+# undefined.
+#
+# Some macros simply can't be renamed with this method: namely, anything
+# involved in the implementation of m4_stack_foreach_sep.
+m4_define([m4_copy],
+[m4_ifdef([$2], [m4_fatal([$0: won't overwrite defined macro: $2])],
+	  [m4_stack_foreach_sep([$1], [m4_pushdef([$2],], [)])])]dnl
+[m4_ifdef([m4_location($1)], [m4_define([m4_location($2)], m4_location)])])
+
+
+# m4_copy_force(SRC, DST)
+# m4_rename_force(SRC, DST)
+# -------------------------
+# Like m4_copy/m4_rename, except blindly overwrite any existing DST.
+# Note that m4_copy_force tolerates undefined SRC, while m4_rename_force
+# does not.
+m4_define([m4_copy_force],
+[m4_ifdef([$2], [_m4_undefine([$2])])m4_copy($@)])
+
+m4_define([m4_rename_force],
+[m4_ifdef([$2], [_m4_undefine([$2])])m4_rename($@)])
+
+
+# m4_define_default(MACRO, VALUE)
+# -------------------------------
+# If MACRO is undefined, set it to VALUE.
+m4_define([m4_define_default],
+[m4_ifndef([$1], [m4_define($@)])])
+
+
+# m4_default(EXP1, EXP2)
+# m4_default_nblank(EXP1, EXP2)
+# -----------------------------
+# Returns EXP1 if not empty/blank, otherwise EXP2.  Expand the result.
+#
+# m4_default is called on hot paths, so inline the contents of m4_ifval,
+# for one less round of expansion.
+m4_define([m4_default],
+[m4_if([$1], [], [$2], [$1])])
+
+m4_define([m4_default_nblank],
+[m4_ifblank([$1], [$2], [$1])])
+
+
+# m4_default_quoted(EXP1, EXP2)
+# m4_default_nblank_quoted(EXP1, EXP2)
+# ------------------------------------
+# Returns EXP1 if non empty/blank, otherwise EXP2.  Leave the result quoted.
+#
+# For comparison:
+#   m4_define([active], [ACTIVE])
+#   m4_default([active], [default]) => ACTIVE
+#   m4_default([], [active]) => ACTIVE
+#   -m4_default([ ], [active])- => - -
+#   -m4_default_nblank([ ], [active])- => -ACTIVE-
+#   m4_default_quoted([active], [default]) => active
+#   m4_default_quoted([], [active]) => active
+#   -m4_default_quoted([ ], [active])- => - -
+#   -m4_default_nblank_quoted([ ], [active])- => -active-
+#
+# m4_default macro is called on hot paths, so inline the contents of m4_ifval,
+# for one less round of expansion.
+m4_define([m4_default_quoted],
+[m4_if([$1], [], [[$2]], [[$1]])])
+
+m4_define([m4_default_nblank_quoted],
+[m4_ifblank([$1], [[$2]], [[$1]])])
+
+
+# m4_defn(NAME)
+# -------------
+# Like the original, except guarantee a warning when using something which is
+# undefined (unlike M4 1.4.x).  This replacement is not a full-featured
+# replacement: if any of the defined macros contain unbalanced quoting, but
+# when pasted together result in a well-quoted string, then only native m4
+# support is able to get it correct.  But that's where quadrigraphs come in
+# handy, if you really need unbalanced quotes inside your macros.
+#
+# This macro is called frequently, so minimize the amount of additional
+# expansions by skipping m4_ifndef.  Better yet, if __m4_version__ exists,
+# (added in M4 1.6), then let m4 do the job for us (see m4_init).
+m4_define([m4_defn],
+[m4_if([$#], [0], [[$0]],
+       [$#], [1], [m4_ifdef([$1], [_m4_defn([$1])],
+			    [m4_fatal([$0: undefined macro: $1])])],
+       [m4_map_args([$0], $@)])])
+
+
+# m4_dumpdef(NAME...)
+# -------------------
+# In m4 1.4.x, dumpdef writes to the current debugfile, rather than
+# stderr.  This in turn royally confuses autom4te; so we follow the
+# lead of newer m4 and always dump to stderr.  Unlike the original,
+# this version requires an argument, since there is no convenient way
+# in m4 1.4.x to grab the names of all defined macros.  Newer m4
+# always dumps to stderr, regardless of the current debugfile; it also
+# provides m4symbols as a way to grab all current macro names.  But
+# dumpdefs is not frequently called, so we don't need to worry about
+# conditionally using these newer features.  Also, this version
+# doesn't sort multiple arguments.
+#
+# If we detect m4 1.6 or newer, then provide an alternate definition,
+# installed during m4_init, that allows builtins through.
+# Unfortunately, there is no nice way in m4 1.4.x to dump builtins.
+m4_define([m4_dumpdef],
+[m4_if([$#], [0], [m4_fatal([$0: missing argument])],
+       [$#], [1], [m4_ifdef([$1], [m4_errprintn(
+  [$1:	]m4_dquote(_m4_defn([$1])))], [m4_fatal([$0: undefined macro: $1])])],
+       [m4_map_args([$0], $@)])])
+
+m4_define([_m4_dumpdef],
+[m4_if([$#], [0], [m4_fatal([$0: missing argument])],
+       [$#], [1], [m4_builtin([dumpdef], [$1])],
+       [m4_map_args_sep([m4_builtin([dumpdef],], [)], [], $@)])])
+
+
+# m4_dumpdefs(NAME...)
+# --------------------
+# Similar to `m4_dumpdef(NAME)', but if NAME was m4_pushdef'ed, display its
+# value stack (most recent displayed first).  Also, this version silently
+# ignores undefined macros, rather than erroring out.
+#
+# This macro cheats, because it relies on the current definition of NAME
+# while the second argument of m4_stack_foreach_lifo is evaluated (which
+# would be undefined according to the API).
+m4_define([m4_dumpdefs],
+[m4_if([$#], [0], [m4_fatal([$0: missing argument])],
+       [$#], [1], [m4_stack_foreach_lifo([$1], [m4_dumpdef([$1])m4_ignore])],
+       [m4_map_args([$0], $@)])])
+
+# m4_esyscmd_s(COMMAND)
+# ---------------------
+# Like m4_esyscmd, except strip any trailing newlines, thus behaving
+# more like shell command substitution.
+m4_define([m4_esyscmd_s],
+[m4_chomp_all(m4_esyscmd([$1]))])
+
+
+# m4_popdef(NAME)
+# ---------------
+# Like the original, except guarantee a warning when using something which is
+# undefined (unlike M4 1.4.x).
+#
+# This macro is called frequently, so minimize the amount of additional
+# expansions by skipping m4_ifndef.  Better yet, if __m4_version__ exists,
+# (added in M4 1.6), then let m4 do the job for us (see m4_init).
+m4_define([m4_popdef],
+[m4_if([$#], [0], [[$0]],
+       [$#], [1], [m4_ifdef([$1], [_m4_popdef([$1])],
+			    [m4_fatal([$0: undefined macro: $1])])],
+       [m4_map_args([$0], $@)])])
+
+
+# m4_shiftn(N, ...)
+# -----------------
+# Returns ... shifted N times.  Useful for recursive "varargs" constructs.
+#
+# Autoconf does not use this macro, because it is inherently slower than
+# calling the common cases of m4_shift2 or m4_shift3 directly.  But it
+# might as well be fast for other clients, such as Libtool.  One way to
+# do this is to expand $@ only once in _m4_shiftn (otherwise, for long
+# lists, the expansion of m4_if takes twice as much memory as what the
+# list itself occupies, only to throw away the unused branch).  The end
+# result is strictly equivalent to
+#   m4_if([$1], 1, [m4_shift(,m4_shift(m4_shift($@)))],
+#         [_m4_shiftn(m4_decr([$1]), m4_shift(m4_shift($@)))])
+# but with the final `m4_shift(m4_shift($@)))' shared between the two
+# paths.  The first leg uses a no-op m4_shift(,$@) to balance out the ().
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_shiftn],
+[m4_assert(0 < $1 && $1 < $#)_$0($@)])
+
+m4_define([_m4_shiftn],
+[m4_if([$1], 1, [m4_shift(],
+       [$0(m4_decr([$1])]), m4_shift(m4_shift($@)))])
+
+# m4_shift2(...)
+# m4_shift3(...)
+# --------------
+# Returns ... shifted twice, and three times.  Faster than m4_shiftn.
+m4_define([m4_shift2], [m4_shift(m4_shift($@))])
+m4_define([m4_shift3], [m4_shift(m4_shift(m4_shift($@)))])
+
+# _m4_shift2(...)
+# _m4_shift3(...)
+# ---------------
+# Like m4_shift2 or m4_shift3, except include a leading comma unless shifting
+# consumes all arguments.  Why?  Because in recursion, it is nice to
+# distinguish between 1 element left and 0 elements left, based on how many
+# arguments this shift expands to.
+m4_define([_m4_shift2],
+[m4_if([$#], [2], [],
+       [, m4_shift(m4_shift($@))])])
+m4_define([_m4_shift3],
+[m4_if([$#], [3], [],
+       [, m4_shift(m4_shift(m4_shift($@)))])])
+
+
+# m4_undefine(NAME)
+# -----------------
+# Like the original, except guarantee a warning when using something which is
+# undefined (unlike M4 1.4.x).
+#
+# This macro is called frequently, so minimize the amount of additional
+# expansions by skipping m4_ifndef.  Better yet, if __m4_version__ exists,
+# (added in M4 1.6), then let m4 do the job for us (see m4_init).
+m4_define([m4_undefine],
+[m4_if([$#], [0], [[$0]],
+       [$#], [1], [m4_ifdef([$1], [_m4_undefine([$1])],
+			    [m4_fatal([$0: undefined macro: $1])])],
+       [m4_map_args([$0], $@)])])
+
+# _m4_wrap(PRE, POST)
+# -------------------
+# Helper macro for m4_wrap and m4_wrap_lifo.  Allows nested calls to
+# m4_wrap within wrapped text.  Use _m4_defn and _m4_popdef for speed.
+m4_define([_m4_wrap],
+[m4_ifdef([$0_text],
+	  [m4_define([$0_text], [$1]_m4_defn([$0_text])[$2])],
+	  [m4_builtin([m4wrap], [m4_unquote(
+  _m4_defn([$0_text])_m4_popdef([$0_text]))])m4_define([$0_text], [$1$2])])])
+
+# m4_wrap(TEXT)
+# -------------
+# Append TEXT to the list of hooks to be executed at the end of input.
+# Whereas the order of the original may be LIFO in the underlying m4,
+# this version is always FIFO.
+m4_define([m4_wrap],
+[_m4_wrap([], [$1[]])])
+
+# m4_wrap_lifo(TEXT)
+# ------------------
+# Prepend TEXT to the list of hooks to be executed at the end of input.
+# Whereas the order of m4_wrap may be FIFO in the underlying m4, this
+# version is always LIFO.
+m4_define([m4_wrap_lifo],
+[_m4_wrap([$1[]])])
+
+## ------------------------- ##
+## 7. Quoting manipulation.  ##
+## ------------------------- ##
+
+
+# m4_apply(MACRO, LIST)
+# ---------------------
+# Invoke MACRO, with arguments provided from the quoted list of
+# comma-separated quoted arguments.  If LIST is empty, invoke MACRO
+# without arguments.  The expansion will not be concatenated with
+# subsequent text.
+m4_define([m4_apply],
+[m4_if([$2], [], [$1], [$1($2)])[]])
+
+# _m4_apply(MACRO, LIST)
+# ----------------------
+# Like m4_apply, except do nothing if LIST is empty.
+m4_define([_m4_apply],
+[m4_if([$2], [], [], [$1($2)[]])])
+
+
+# m4_count(ARGS)
+# --------------
+# Return a count of how many ARGS are present.
+m4_define([m4_count], [$#])
+
+
+# m4_curry(MACRO, ARG...)
+# -----------------------
+# Perform argument currying.  The expansion of this macro is another
+# macro that takes exactly one argument, appends it to the end of the
+# original ARG list, then invokes MACRO.  For example:
+#   m4_curry([m4_curry], [m4_reverse], [1])([2])([3]) => 3, 2, 1
+# Not quite as practical as m4_incr, but you could also do:
+#   m4_define([add], [m4_eval(([$1]) + ([$2]))])
+#   m4_define([add_one], [m4_curry([add], [1])])
+#   add_one()([2]) => 3
+m4_define([m4_curry], [$1(m4_shift($@,)_$0])
+m4_define([_m4_curry],               [[$1])])
+
+
+# m4_do(STRING, ...)
+# ------------------
+# This macro invokes all its arguments (in sequence, of course).  It is
+# useful for making your macros more structured and readable by dropping
+# unnecessary dnl's and have the macros indented properly.  No concatenation
+# occurs after a STRING; use m4_unquote(m4_join(,STRING)) for that.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_do],
+[m4_if([$#], 0, [],
+       [$#], 1, [$1[]],
+       [$1[]$0(m4_shift($@))])])
+
+
+# m4_dquote(ARGS)
+# ---------------
+# Return ARGS as a quoted list of quoted arguments.
+m4_define([m4_dquote],  [[$@]])
+
+
+# m4_dquote_elt(ARGS)
+# -------------------
+# Return ARGS as an unquoted list of double-quoted arguments.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_dquote_elt],
+[m4_if([$#], [0], [],
+       [$#], [1], [[[$1]]],
+       [[[$1]],$0(m4_shift($@))])])
+
+
+# m4_echo(ARGS)
+# -------------
+# Return the ARGS, with the same level of quoting.  Whitespace after
+# unquoted commas are consumed.
+m4_define([m4_echo], [$@])
+
+
+# m4_expand(ARG)
+# _m4_expand(ARG)
+# ---------------
+# Return the expansion of ARG as a single string.  Unlike
+# m4_quote($1), this preserves whitespace following single-quoted
+# commas that appear within ARG.  It also deals with shell case
+# statements.
+#
+#   m4_define([active], [ACT, IVE])
+#   m4_define([active2], [[ACT, IVE]])
+#   m4_quote(active, active2)
+#   => ACT,IVE,ACT, IVE
+#   m4_expand([active, active2])
+#   => ACT, IVE, ACT, IVE
+#
+# Unfortunately, due to limitations in m4, ARG must expand to
+# something with balanced quotes (use quadrigraphs to get around
+# this), and should not contain the unlikely delimiters -=<{( or
+# )}>=-.  It is possible to have unbalanced quoted `(' or `)', as well
+# as unbalanced unquoted `)'.  m4_expand can handle unterminated
+# comments or dnl on the final line, at the expense of speed; it also
+# aids in detecting attempts to incorrectly change the current
+# diversion inside ARG.  Meanwhile, _m4_expand is faster but must be
+# given a terminated expansion, and has no safety checks for
+# mis-diverted text.
+#
+# Exploit that extra unquoted () will group unquoted commas and the
+# following whitespace.  m4_bpatsubst can't handle newlines inside $1,
+# and m4_substr strips quoting.  So we (ab)use m4_changequote, using
+# temporary quotes to remove the delimiters that conveniently included
+# the unquoted () that were added prior to the changequote.
+#
+# Thanks to shell case statements, too many people are prone to pass
+# underquoted `)', so we try to detect that by passing a marker as a
+# fourth argument; if the marker is not present, then we assume that
+# we encountered an early `)', and re-expand the first argument, but
+# this time with one more `(' in the second argument and in the
+# open-quote delimiter.  We must also ignore the slop from the
+# previous try.  The final macro is thus half line-noise, half art.
+m4_define([m4_expand],
+[m4_pushdef([m4_divert], _m4_defn([_m4_divert_unsafe]))]dnl
+[m4_pushdef([m4_divert_push], _m4_defn([_m4_divert_unsafe]))]dnl
+[m4_chomp(_$0([$1
+]))_m4_popdef([m4_divert], [m4_divert_push])])
+
+m4_define([_m4_expand], [$0_([$1], [(], -=<{($1)}>=-, [}>=-])])
+
+m4_define([_m4_expand_],
+[m4_if([$4], [}>=-],
+       [m4_changequote([-=<{$2], [)}>=-])$3m4_changequote([, ])],
+       [$0([$1], [($2], -=<{($2$1)}>=-, [}>=-])m4_ignore$2])])
+
+
+# m4_ignore(ARGS)
+# ---------------
+# Expands to nothing.  Useful for conditionally ignoring an arbitrary
+# number of arguments (see _m4_list_cmp for an example).
+m4_define([m4_ignore])
+
+
+# m4_make_list(ARGS)
+# ------------------
+# Similar to m4_dquote, this creates a quoted list of quoted ARGS.  This
+# version is less efficient than m4_dquote, but separates each argument
+# with a comma and newline, rather than just comma, for readability.
+# When developing an m4sugar algorithm, you could temporarily use
+#   m4_pushdef([m4_dquote],m4_defn([m4_make_list]))
+# around your code to make debugging easier.
+m4_define([m4_make_list], [m4_join([,
+], m4_dquote_elt($@))])
+
+
+# m4_noquote(STRING)
+# ------------------
+# Return the result of ignoring all quotes in STRING and invoking the
+# macros it contains.  Amongst other things, this is useful for enabling
+# macro invocations inside strings with [] blocks (for instance regexps
+# and help-strings).  On the other hand, since all quotes are disabled,
+# any macro expanded during this time that relies on nested [] quoting
+# will likely crash and burn.  This macro is seldom useful; consider
+# m4_unquote or m4_expand instead.
+m4_define([m4_noquote],
+[m4_changequote([-=<{(],[)}>=-])$1-=<{()}>=-m4_changequote([,])])
+
+
+# m4_quote(ARGS)
+# --------------
+# Return ARGS as a single argument.  Any whitespace after unquoted commas
+# is stripped.  There is always output, even when there were no arguments.
+#
+# It is important to realize the difference between `m4_quote(exp)' and
+# `[exp]': in the first case you obtain the quoted *result* of the
+# expansion of EXP, while in the latter you just obtain the string
+# `exp'.
+m4_define([m4_quote],  [[$*]])
+
+
+# _m4_quote(ARGS)
+# ---------------
+# Like m4_quote, except that when there are no arguments, there is no
+# output.  For conditional scenarios (such as passing _m4_quote as the
+# macro name in m4_mapall), this feature can be used to distinguish between
+# one argument of the empty string vs. no arguments.  However, in the
+# normal case with arguments present, this is less efficient than m4_quote.
+m4_define([_m4_quote],
+[m4_if([$#], [0], [], [[$*]])])
+
+
+# m4_reverse(ARGS)
+# ----------------
+# Output ARGS in reverse order.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_reverse],
+[m4_if([$#], [0], [], [$#], [1], [[$1]],
+       [$0(m4_shift($@)), [$1]])])
+
+
+# m4_unquote(ARGS)
+# ----------------
+# Remove one layer of quotes from each ARG, performing one level of
+# expansion.  For one argument, m4_unquote([arg]) is more efficient than
+# m4_do([arg]), but for multiple arguments, the difference is that
+# m4_unquote separates arguments with commas while m4_do concatenates.
+# Follow this macro with [] if concatenation with subsequent text is
+# undesired.
+m4_define([m4_unquote], [$*])
+
+
+## -------------------------- ##
+## 8. Implementing m4 loops.  ##
+## -------------------------- ##
+
+
+# m4_for(VARIABLE, FIRST, LAST, [STEP = +/-1], EXPRESSION)
+# --------------------------------------------------------
+# Expand EXPRESSION defining VARIABLE to FROM, FROM + 1, ..., TO with
+# increments of STEP.  Both limits are included, and bounds are
+# checked for consistency.  The algorithm is robust to indirect
+# VARIABLE names.  Changing VARIABLE inside EXPRESSION will not impact
+# the number of iterations.
+#
+# Uses _m4_defn for speed, and avoid dnl in the macro body.  Factor
+# the _m4_for call so that EXPRESSION is only parsed once.
+m4_define([m4_for],
+[m4_pushdef([$1], m4_eval([$2]))]dnl
+[m4_cond([m4_eval(([$3]) > ([$2]))], 1,
+	   [m4_pushdef([_m4_step], m4_eval(m4_default_quoted([$4],
+	      1)))m4_assert(_m4_step > 0)_$0(_m4_defn([$1]),
+  m4_eval((([$3]) - ([$2])) / _m4_step * _m4_step + ([$2])), _m4_step,],
+	 [m4_eval(([$3]) < ([$2]))], 1,
+	   [m4_pushdef([_m4_step], m4_eval(m4_default_quoted([$4],
+	      -1)))m4_assert(_m4_step < 0)_$0(_m4_defn([$1]),
+  m4_eval((([$2]) - ([$3])) / -(_m4_step) * _m4_step + ([$2])), _m4_step,],
+	 [m4_pushdef([_m4_step])_$0(_m4_defn([$1]), _m4_defn([$1]), 0,])]dnl
+[[m4_define([$1],], [)$5])m4_popdef([_m4_step], [$1])])
+
+# _m4_for(COUNT, LAST, STEP, PRE, POST)
+# -------------------------------------
+# Core of the loop, no consistency checks, all arguments are plain
+# numbers.  Expand PRE[COUNT]POST, then alter COUNT by STEP and
+# iterate if COUNT is not LAST.
+m4_define([_m4_for],
+[$4[$1]$5[]m4_if([$1], [$2], [],
+		 [$0(m4_eval([$1 + $3]), [$2], [$3], [$4], [$5])])])
+
+
+# Implementing `foreach' loops in m4 is much more tricky than it may
+# seem.  For example, the old M4 1.4.4 manual had an incorrect example,
+# which looked like this (when translated to m4sugar):
+#
+# | # foreach(VAR, (LIST), STMT)
+# | m4_define([foreach],
+# |   [m4_pushdef([$1])_foreach([$1], [$2], [$3])m4_popdef([$1])])
+# | m4_define([_arg1], [$1])
+# | m4_define([_foreach],
+# |   [m4_if([$2], [()], ,
+# |     [m4_define([$1], _arg1$2)$3[]_foreach([$1], (m4_shift$2), [$3])])])
+#
+# But then if you run
+#
+# | m4_define(a, 1)
+# | m4_define(b, 2)
+# | m4_define(c, 3)
+# | foreach([f], [([a], [(b], [c)])], [echo f
+# | ])
+#
+# it gives
+#
+#  => echo 1
+#  => echo (2,3)
+#
+# which is not what is expected.
+#
+# Of course the problem is that many quotes are missing.  So you add
+# plenty of quotes at random places, until you reach the expected
+# result.  Alternatively, if you are a quoting wizard, you directly
+# reach the following implementation (but if you really did, then
+# apply to the maintenance of m4sugar!).
+#
+# | # foreach(VAR, (LIST), STMT)
+# | m4_define([foreach], [m4_pushdef([$1])_foreach($@)m4_popdef([$1])])
+# | m4_define([_arg1], [[$1]])
+# | m4_define([_foreach],
+# |  [m4_if($2, [()], ,
+# |    [m4_define([$1], [_arg1$2])$3[]_foreach([$1], [(m4_shift$2)], [$3])])])
+#
+# which this time answers
+#
+#  => echo a
+#  => echo (b
+#  => echo c)
+#
+# Bingo!
+#
+# Well, not quite.
+#
+# With a better look, you realize that the parens are more a pain than
+# a help: since anyway you need to quote properly the list, you end up
+# with always using an outermost pair of parens and an outermost pair
+# of quotes.  Rejecting the parens both eases the implementation, and
+# simplifies the use:
+#
+# | # foreach(VAR, (LIST), STMT)
+# | m4_define([foreach], [m4_pushdef([$1])_foreach($@)m4_popdef([$1])])
+# | m4_define([_arg1], [$1])
+# | m4_define([_foreach],
+# |  [m4_if($2, [], ,
+# |    [m4_define([$1], [_arg1($2)])$3[]_foreach([$1], [m4_shift($2)], [$3])])])
+#
+#
+# Now, just replace the `$2' with `m4_quote($2)' in the outer `m4_if'
+# to improve robustness, and you come up with a nice implementation
+# that doesn't require extra parentheses in the user's LIST.
+#
+# But wait -  now the algorithm is quadratic, because every recursion of
+# the algorithm keeps the entire LIST and merely adds another m4_shift to
+# the quoted text.  If the user has a lot of elements in LIST, you can
+# bring the system to its knees with the memory m4 then requires, or trip
+# the m4 --nesting-limit recursion factor.  The only way to avoid
+# quadratic growth is ensure m4_shift is expanded prior to the recursion.
+# Hence the design below.
+#
+# The M4 manual now includes a chapter devoted to this issue, with
+# the lessons learned from m4sugar.  And still, this design is only
+# optimal for M4 1.6; see foreach.m4 for yet more comments on why
+# M4 1.4.x uses yet another implementation.
+
+
+# m4_foreach(VARIABLE, LIST, EXPRESSION)
+# --------------------------------------
+#
+# Expand EXPRESSION assigning each value of the LIST to VARIABLE.
+# LIST should have the form `item_1, item_2, ..., item_n', i.e. the
+# whole list must *quoted*.  Quote members too if you don't want them
+# to be expanded.
+#
+# This macro is robust to active symbols:
+#      | m4_define(active, [ACT, IVE])
+#      | m4_foreach(Var, [active, active], [-Var-])
+#     => -ACT--IVE--ACT--IVE-
+#
+#      | m4_foreach(Var, [[active], [active]], [-Var-])
+#     => -ACT, IVE--ACT, IVE-
+#
+#      | m4_foreach(Var, [[[active]], [[active]]], [-Var-])
+#     => -active--active-
+#
+# This macro is called frequently, so avoid extra expansions such as
+# m4_ifval and dnl.  Also, since $2 might be quite large, try to use it
+# as little as possible in _m4_foreach; each extra use requires that much
+# more memory for expansion.  So, rather than directly compare $2 against
+# [] and use m4_car/m4_cdr for recursion, we instead unbox the list (which
+# requires swapping the argument order in the helper), insert an ignored
+# third argument, and use m4_shift3 to detect when recursion is complete,
+# at which point this looks very much like m4_map_args.
+m4_define([m4_foreach],
+[m4_if([$2], [], [],
+       [m4_pushdef([$1])_$0([m4_define([$1],], [)$3], [],
+  $2)m4_popdef([$1])])])
+
+# _m4_foreach(PRE, POST, IGNORED, ARG...)
+# ---------------------------------------
+# Form the common basis of the m4_foreach and m4_map macros.  For each
+# ARG, expand PRE[ARG]POST[].  The IGNORED argument makes recursion
+# easier, and must be supplied rather than implicit.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([_m4_foreach],
+[m4_if([$#], [3], [],
+       [$1[$4]$2[]$0([$1], [$2], m4_shift3($@))])])
+
+
+# m4_foreach_w(VARIABLE, LIST, EXPRESSION)
+# ----------------------------------------
+# Like m4_foreach, but the list is whitespace separated.  Depending on
+# EXPRESSION, it may be more efficient to use m4_map_args_w.
+#
+# This macro is robust to active symbols:
+#    m4_foreach_w([Var], [ active
+#    b	act\
+#    ive  ], [-Var-])end
+#    => -active--b--active-end
+#
+# This used to use a slower implementation based on m4_foreach:
+#   m4_foreach([$1], m4_split(m4_normalize([$2]), [ ]), [$3])
+m4_define([m4_foreach_w],
+[m4_pushdef([$1])m4_map_args_w([$2],
+  [m4_define([$1],], [)$3])m4_popdef([$1])])
+
+
+# m4_map(MACRO, LIST)
+# m4_mapall(MACRO, LIST)
+# ----------------------
+# Invoke MACRO($1), MACRO($2) etc. where $1, $2... are the elements of
+# LIST.  $1, $2... must in turn be lists, appropriate for m4_apply.
+# If LIST contains an empty sublist, m4_map skips the expansion of
+# MACRO, while m4_mapall expands MACRO with no arguments.
+#
+# Since LIST may be quite large, we want to minimize how often it
+# appears in the expansion.  Rather than use m4_car/m4_cdr iteration,
+# we unbox the list, and use _m4_foreach for iteration.  For m4_map,
+# an empty list behaves like an empty sublist and gets ignored; for
+# m4_mapall, we must special-case the empty list.
+m4_define([m4_map],
+[_m4_foreach([_m4_apply([$1],], [)], [], $2)])
+
+m4_define([m4_mapall],
+[m4_if([$2], [], [],
+       [_m4_foreach([m4_apply([$1],], [)], [], $2)])])
+
+
+# m4_map_sep(MACRO, [SEPARATOR], LIST)
+# m4_mapall_sep(MACRO, [SEPARATOR], LIST)
+# ---------------------------------------
+# Invoke MACRO($1), SEPARATOR, MACRO($2), ..., MACRO($N) where $1,
+# $2... $N are the elements of LIST, and are in turn lists appropriate
+# for m4_apply.  SEPARATOR is expanded, in order to allow the creation
+# of a list of arguments by using a single-quoted comma as the
+# separator.  For each empty sublist, m4_map_sep skips the expansion
+# of MACRO and SEPARATOR, while m4_mapall_sep expands MACRO with no
+# arguments.
+#
+# For m4_mapall_sep, merely expand the first iteration without the
+# separator, then include separator as part of subsequent recursion;
+# but avoid extra expansion of LIST's side-effects via a helper macro.
+# For m4_map_sep, things are trickier - we don't know if the first
+# list element is an empty sublist, so we must define a self-modifying
+# helper macro and use that as the separator instead.
+m4_define([m4_map_sep],
+[m4_pushdef([m4_Sep], [m4_define([m4_Sep], _m4_defn([m4_unquote]))])]dnl
+[_m4_foreach([_m4_apply([m4_Sep([$2])[]$1],], [)], [], $3)m4_popdef([m4_Sep])])
+
+m4_define([m4_mapall_sep],
+[m4_if([$3], [], [], [_$0([$1], [$2], $3)])])
+
+m4_define([_m4_mapall_sep],
+[m4_apply([$1], [$3])_m4_foreach([m4_apply([$2[]$1],], [)], m4_shift2($@))])
+
+# m4_map_args(EXPRESSION, ARG...)
+# -------------------------------
+# Expand EXPRESSION([ARG]) for each argument.  More efficient than
+#   m4_foreach([var], [ARG...], [EXPRESSION(m4_defn([var]))])
+# Shorthand for m4_map_args_sep([EXPRESSION(], [)], [], ARG...).
+m4_define([m4_map_args],
+[m4_if([$#], [0], [m4_fatal([$0: too few arguments: $#])],
+       [$#], [1], [],
+       [$#], [2], [$1([$2])[]],
+       [_m4_foreach([$1(], [)], $@)])])
+
+
+# m4_map_args_pair(EXPRESSION, [END-EXPR = EXPRESSION], ARG...)
+# -------------------------------------------------------------
+# Perform a pairwise grouping of consecutive ARGs, by expanding
+# EXPRESSION([ARG1], [ARG2]).  If there are an odd number of ARGs, the
+# final argument is expanded with END-EXPR([ARGn]).
+#
+# For example:
+#   m4_define([show], [($*)m4_newline])dnl
+#   m4_map_args_pair([show], [], [a], [b], [c], [d], [e])dnl
+#   => (a,b)
+#   => (c,d)
+#   => (e)
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_map_args_pair],
+[m4_if([$#], [0], [m4_fatal([$0: too few arguments: $#])],
+       [$#], [1], [m4_fatal([$0: too few arguments: $#: $1])],
+       [$#], [2], [],
+       [$#], [3], [m4_default([$2], [$1])([$3])[]],
+       [$#], [4], [$1([$3], [$4])[]],
+       [$1([$3], [$4])[]$0([$1], [$2], m4_shift(m4_shift3($@)))])])
+
+
+# m4_map_args_sep([PRE], [POST], [SEP], ARG...)
+# ---------------------------------------------
+# Expand PRE[ARG]POST for each argument, with SEP between arguments.
+m4_define([m4_map_args_sep],
+[m4_if([$#], [0], [m4_fatal([$0: too few arguments: $#])],
+       [$#], [1], [],
+       [$#], [2], [],
+       [$#], [3], [],
+       [$#], [4], [$1[$4]$2[]],
+       [$1[$4]$2[]_m4_foreach([$3[]$1], [$2], m4_shift3($@))])])
+
+
+# m4_map_args_w(STRING, [PRE], [POST], [SEP])
+# -------------------------------------------
+# Perform the expansion of PRE[word]POST[] for each word in STRING
+# separated by whitespace.  More efficient than:
+#   m4_foreach_w([var], [STRING], [PRE[]m4_defn([var])POST])
+# Additionally, expand SEP between words.
+#
+# As long as we have to use m4_bpatsubst to split the string, we might
+# as well make it also apply PRE and POST; this avoids iteration
+# altogether.  But we must be careful of any \ in PRE or POST.
+# _m4_strip returns a quoted string, but that's okay, since it also
+# supplies an empty leading and trailing argument due to our
+# intentional whitespace around STRING.  We use m4_substr to strip the
+# empty elements and remove the extra layer of quoting.
+m4_define([m4_map_args_w],
+[_$0(_m4_split([ ]m4_flatten([$1])[ ], [[	 ]+],
+	       m4_if(m4_index([$2$3$4], [\]), [-1], [[$3[]$4[]$2]],
+		     [m4_bpatsubst([[$3[]$4[]$2]], [\\], [\\\\])])),
+     m4_len([[]$3[]$4]), m4_len([$4[]$2[]]))])
+
+m4_define([_m4_map_args_w],
+[m4_substr([$1], [$2], m4_eval(m4_len([$1]) - [$2] - [$3]))])
+
+
+# m4_stack_foreach(MACRO, FUNC)
+# m4_stack_foreach_lifo(MACRO, FUNC)
+# ----------------------------------
+# Pass each stacked definition of MACRO to the one-argument macro FUNC.
+# m4_stack_foreach proceeds in FIFO order, while m4_stack_foreach_lifo
+# processes the topmost definitions first.  In addition, FUNC should
+# not push or pop definitions of MACRO, and should not expect anything about
+# the active definition of MACRO (it will not be the topmost, and may not
+# be the one passed to FUNC either).
+#
+# Some macros simply can't be examined with this method: namely,
+# anything involved in the implementation of _m4_stack_reverse.
+m4_define([m4_stack_foreach],
+[_m4_stack_reverse([$1], [m4_tmp-$1])]dnl
+[_m4_stack_reverse([m4_tmp-$1], [$1], [$2(_m4_defn([m4_tmp-$1]))])])
+
+m4_define([m4_stack_foreach_lifo],
+[_m4_stack_reverse([$1], [m4_tmp-$1], [$2(_m4_defn([m4_tmp-$1]))])]dnl
+[_m4_stack_reverse([m4_tmp-$1], [$1])])
+
+# m4_stack_foreach_sep(MACRO, [PRE], [POST], [SEP])
+# m4_stack_foreach_sep_lifo(MACRO, [PRE], [POST], [SEP])
+# ------------------------------------------------------
+# Similar to m4_stack_foreach and m4_stack_foreach_lifo, in that every
+# definition of a pushdef stack will be visited.  But rather than
+# passing the definition as a single argument to a macro, this variant
+# expands the concatenation of PRE[]definition[]POST, and expands SEP
+# between consecutive expansions.  Note that m4_stack_foreach([a], [b])
+# is equivalent to m4_stack_foreach_sep([a], [b(], [)]).
+m4_define([m4_stack_foreach_sep],
+[_m4_stack_reverse([$1], [m4_tmp-$1])]dnl
+[_m4_stack_reverse([m4_tmp-$1], [$1], [$2[]_m4_defn([m4_tmp-$1])$3], [$4[]])])
+
+m4_define([m4_stack_foreach_sep_lifo],
+[_m4_stack_reverse([$1], [m4_tmp-$1], [$2[]_m4_defn([m4_tmp-$1])$3], [$4[]])]dnl
+[_m4_stack_reverse([m4_tmp-$1], [$1])])
+
+
+# _m4_stack_reverse(OLD, NEW, [ACTION], [SEP])
+# --------------------------------------------
+# A recursive worker for pushdef stack manipulation.  Destructively
+# copy the OLD stack into the NEW, and expanding ACTION for each
+# iteration.  After the first iteration, SEP is promoted to the front
+# of ACTION (note that SEP should include a trailing [] if it is to
+# avoid interfering with ACTION).  The current definition is examined
+# after the NEW has been pushed but before OLD has been popped; this
+# order is important, as ACTION is permitted to operate on either
+# _m4_defn([OLD]) or _m4_defn([NEW]).  Since the operation is
+# destructive, this macro is generally used twice, with a temporary
+# macro name holding the swapped copy.
+m4_define([_m4_stack_reverse],
+[m4_ifdef([$1], [m4_pushdef([$2],
+  _m4_defn([$1]))$3[]_m4_popdef([$1])$0([$1], [$2], [$4$3])])])
+
+
+
+## --------------------------- ##
+## 9. More diversion support.  ##
+## --------------------------- ##
+
+
+# m4_cleardivert(DIVERSION-NAME...)
+# ---------------------------------
+# Discard any text in DIVERSION-NAME.
+#
+# This works even inside m4_expand.
+m4_define([m4_cleardivert],
+[m4_if([$#], [0], [m4_fatal([$0: missing argument])],
+       [_m4_divert_raw([-1])m4_undivert($@)_m4_divert_raw(
+	 _m4_divert(_m4_defn([_m4_divert_diversion]), [-]))])])
+
+
+# _m4_divert(DIVERSION-NAME or NUMBER, [NOWARN])
+# ----------------------------------------------
+# If DIVERSION-NAME is the name of a diversion, return its number,
+# otherwise if it is a NUMBER return it.  Issue a warning about
+# the use of a number instead of a name, unless NOWARN is provided.
+m4_define([_m4_divert],
+[m4_ifdef([_m4_divert($1)],
+	  [m4_indir([_m4_divert($1)])],
+	  [m4_if([$2], [], [m4_warn([syntax],
+	     [prefer named diversions])])$1])])
+
+# KILL is only used to suppress output.
+m4_define([_m4_divert(KILL)],           -1)
+
+# The empty diversion name is a synonym for 0.
+m4_define([_m4_divert()],                0)
+
+
+# m4_divert_stack
+# ---------------
+# Print the diversion stack, if it's nonempty.  The caller is
+# responsible for any leading or trailing newline.
+m4_define([m4_divert_stack],
+[m4_stack_foreach_sep_lifo([_m4_divert_stack], [], [], [
+])])
+
+
+# m4_divert_stack_push(MACRO-NAME, DIVERSION-NAME)
+# ------------------------------------------------
+# Form an entry of the diversion stack from caller MACRO-NAME and
+# entering DIVERSION-NAME and push it.
+m4_define([m4_divert_stack_push],
+[m4_pushdef([_m4_divert_stack], m4_location[: $1: $2])])
+
+
+# m4_divert(DIVERSION-NAME)
+# -------------------------
+# Change the diversion stream to DIVERSION-NAME.
+m4_define([m4_divert],
+[m4_popdef([_m4_divert_stack])]dnl
+[m4_define([_m4_divert_diversion], [$1])]dnl
+[m4_divert_stack_push([$0], [$1])]dnl
+[_m4_divert_raw(_m4_divert([$1]))])
+
+
+# m4_divert_push(DIVERSION-NAME, [NOWARN])
+# ----------------------------------------
+# Change the diversion stream to DIVERSION-NAME, while stacking old values.
+# For internal use only: if NOWARN is not empty, DIVERSION-NAME can be a
+# number instead of a name.
+m4_define([m4_divert_push],
+[m4_divert_stack_push([$0], [$1])]dnl
+[m4_pushdef([_m4_divert_diversion], [$1])]dnl
+[_m4_divert_raw(_m4_divert([$1], [$2]))])
+
+
+# m4_divert_pop([DIVERSION-NAME])
+# -------------------------------
+# Change the diversion stream to its previous value, unstacking it.
+# If specified, verify we left DIVERSION-NAME.
+# When we pop the last value from the stack, we divert to -1.
+m4_define([m4_divert_pop],
+[m4_if([$1], [], [],
+       [$1], _m4_defn([_m4_divert_diversion]), [],
+       [m4_fatal([$0($1): diversion mismatch:
+]m4_divert_stack)])]dnl
+[_m4_popdef([_m4_divert_stack], [_m4_divert_diversion])]dnl
+[m4_ifdef([_m4_divert_diversion], [],
+	   [m4_fatal([too many m4_divert_pop])])]dnl
+[_m4_divert_raw(_m4_divert(_m4_defn([_m4_divert_diversion]), [-]))])
+
+
+# m4_divert_text(DIVERSION-NAME, CONTENT)
+# ---------------------------------------
+# Output CONTENT into DIVERSION-NAME (which may be a number actually).
+# An end of line is appended for free to CONTENT.
+m4_define([m4_divert_text],
+[m4_divert_push([$1])$2
+m4_divert_pop([$1])])
+
+
+# m4_divert_once(DIVERSION-NAME, CONTENT)
+# ---------------------------------------
+# Output CONTENT into DIVERSION-NAME once, if not already there.
+# An end of line is appended for free to CONTENT.
+m4_define([m4_divert_once],
+[m4_expand_once([m4_divert_text([$1], [$2])])])
+
+
+# _m4_divert_unsafe(DIVERSION-NAME)
+# ---------------------------------
+# Issue a warning that the attempt to change the current diversion to
+# DIVERSION-NAME is unsafe, because this macro is being expanded
+# during argument collection of m4_expand.
+m4_define([_m4_divert_unsafe],
+[m4_fatal([$0: cannot change diversion to `$1' inside m4_expand])])
+
+
+# m4_undivert(DIVERSION-NAME...)
+# ------------------------------
+# Undivert DIVERSION-NAME.  Unlike the M4 version, this requires at
+# least one DIVERSION-NAME; also, due to support for named diversions,
+# this should not be used to undivert files.
+m4_define([m4_undivert],
+[m4_if([$#], [0], [m4_fatal([$0: missing argument])],
+       [$#], [1], [_m4_undivert(_m4_divert([$1]))],
+       [m4_map_args([$0], $@)])])
+
+
+## --------------------------------------------- ##
+## 10. Defining macros with bells and whistles.  ##
+## --------------------------------------------- ##
+
+# `m4_defun' is basically `m4_define' but it equips the macro with the
+# needed machinery for `m4_require'.  A macro must be m4_defun'd if
+# either it is m4_require'd, or it m4_require's.
+#
+# Two things deserve attention and are detailed below:
+#  1. Implementation of m4_require
+#  2. Keeping track of the expansion stack
+#
+# 1. Implementation of m4_require
+# ===============================
+#
+# Of course m4_defun calls m4_provide, so that a macro which has
+# been expanded is not expanded again when m4_require'd, but the
+# difficult part is the proper expansion of macros when they are
+# m4_require'd.
+#
+# The implementation is based on three ideas, (i) using diversions to
+# prepare the expansion of the macro and its dependencies (by Franc,ois
+# Pinard), (ii) expand the most recently m4_require'd macros _after_
+# the previous macros (by Axel Thimm), and (iii) track instances of
+# provide before require (by Eric Blake).
+#
+#
+# The first idea: why use diversions?
+# -----------------------------------
+#
+# When a macro requires another, the other macro is expanded in new
+# diversion, GROW.  When the outer macro is fully expanded, we first
+# undivert the most nested diversions (GROW - 1...), and finally
+# undivert GROW.  To understand why we need several diversions,
+# consider the following example:
+#
+# | m4_defun([TEST1], [Test...m4_require([TEST2])1])
+# | m4_defun([TEST2], [Test...m4_require([TEST3])2])
+# | m4_defun([TEST3], [Test...3])
+#
+# Because m4_require is not required to be first in the outer macros, we
+# must keep the expansions of the various levels of m4_require separated.
+# Right before executing the epilogue of TEST1, we have:
+#
+#	   GROW - 2: Test...3
+#	   GROW - 1: Test...2
+#	   GROW:     Test...1
+#	   BODY:
+#
+# Finally the epilogue of TEST1 undiverts GROW - 2, GROW - 1, and
+# GROW into the regular flow, BODY.
+#
+#	   GROW - 2:
+#	   GROW - 1:
+#	   GROW:
+#	   BODY:        Test...3; Test...2; Test...1
+#
+# (The semicolons are here for clarification, but of course are not
+# emitted.)  This is what Autoconf 2.0 (I think) to 2.13 (I'm sure)
+# implement.
+#
+#
+# The second idea: first required first out
+# -----------------------------------------
+#
+# The natural implementation of the idea above is buggy and produces
+# very surprising results in some situations.  Let's consider the
+# following example to explain the bug:
+#
+# | m4_defun([TEST1],  [m4_require([TEST2a])m4_require([TEST2b])])
+# | m4_defun([TEST2a], [])
+# | m4_defun([TEST2b], [m4_require([TEST3])])
+# | m4_defun([TEST3],  [m4_require([TEST2a])])
+# |
+# | AC_INIT
+# | TEST1
+#
+# The dependencies between the macros are:
+#
+#		 3 --- 2b
+#		/        \              is m4_require'd by
+#	       /          \       left -------------------- right
+#	    2a ------------ 1
+#
+# If you strictly apply the rules given in the previous section you get:
+#
+#	   GROW - 2: TEST3
+#	   GROW - 1: TEST2a; TEST2b
+#	   GROW:     TEST1
+#	   BODY:
+#
+# (TEST2a, although required by TEST3 is not expanded in GROW - 3
+# because is has already been expanded before in GROW - 1, so it has
+# been AC_PROVIDE'd, so it is not expanded again) so when you undivert
+# the stack of diversions, you get:
+#
+#	   GROW - 2:
+#	   GROW - 1:
+#	   GROW:
+#	   BODY:        TEST3; TEST2a; TEST2b; TEST1
+#
+# i.e., TEST2a is expanded after TEST3 although the latter required the
+# former.
+#
+# Starting from 2.50, we use an implementation provided by Axel Thimm.
+# The idea is simple: the order in which macros are emitted must be the
+# same as the one in which macros are expanded.  (The bug above can
+# indeed be described as: a macro has been m4_provide'd before its
+# dependent, but it is emitted after: the lack of correlation between
+# emission and expansion order is guilty).
+#
+# How to do that?  You keep the stack of diversions to elaborate the
+# macros, but each time a macro is fully expanded, emit it immediately.
+#
+# In the example above, when TEST2a is expanded, but it's epilogue is
+# not run yet, you have:
+#
+#	   GROW - 2:
+#	   GROW - 1: TEST2a
+#	   GROW:     Elaboration of TEST1
+#	   BODY:
+#
+# The epilogue of TEST2a emits it immediately:
+#
+#	   GROW - 2:
+#	   GROW - 1:
+#	   GROW:     Elaboration of TEST1
+#	   BODY:     TEST2a
+#
+# TEST2b then requires TEST3, so right before the epilogue of TEST3, you
+# have:
+#
+#	   GROW - 2: TEST3
+#	   GROW - 1: Elaboration of TEST2b
+#	   GROW:     Elaboration of TEST1
+#	   BODY:      TEST2a
+#
+# The epilogue of TEST3 emits it:
+#
+#	   GROW - 2:
+#	   GROW - 1: Elaboration of TEST2b
+#	   GROW:     Elaboration of TEST1
+#	   BODY:     TEST2a; TEST3
+#
+# TEST2b is now completely expanded, and emitted:
+#
+#	   GROW - 2:
+#	   GROW - 1:
+#	   GROW:     Elaboration of TEST1
+#	   BODY:     TEST2a; TEST3; TEST2b
+#
+# and finally, TEST1 is finished and emitted:
+#
+#	   GROW - 2:
+#	   GROW - 1:
+#	   GROW:
+#	   BODY:     TEST2a; TEST3; TEST2b: TEST1
+#
+# The idea is simple, but the implementation is a bit involved.  If
+# you are like me, you will want to see the actual functioning of this
+# implementation to be convinced.  The next section gives the full
+# details.
+#
+#
+# The Axel Thimm implementation at work
+# -------------------------------------
+#
+# We consider the macros above, and this configure.ac:
+#
+#	    AC_INIT
+#	    TEST1
+#
+# You should keep the definitions of _m4_defun_pro, _m4_defun_epi, and
+# m4_require at hand to follow the steps.
+#
+# This implementation tries not to assume that the current diversion is
+# BODY, so as soon as a macro (m4_defun'd) is expanded, we first
+# record the current diversion under the name _m4_divert_dump (denoted
+# DUMP below for short).  This introduces an important difference with
+# the previous versions of Autoconf: you cannot use m4_require if you
+# are not inside an m4_defun'd macro, and especially, you cannot
+# m4_require directly from the top level.
+#
+# We have not tried to simulate the old behavior (better yet, we
+# diagnose it), because it is too dangerous: a macro m4_require'd from
+# the top level is expanded before the body of `configure', i.e., before
+# any other test was run.  I let you imagine the result of requiring
+# AC_STDC_HEADERS for instance, before AC_PROG_CC was actually run....
+#
+# After AC_INIT was run, the current diversion is BODY.
+# * AC_INIT was run
+#   DUMP:                undefined
+#   diversion stack:     BODY |-
+#
+# * TEST1 is expanded
+# The prologue of TEST1 sets _m4_divert_dump, which is the diversion
+# where the current elaboration will be dumped, to the current
+# diversion.  It also m4_divert_push to GROW, where the full
+# expansion of TEST1 and its dependencies will be elaborated.
+#   DUMP:        BODY
+#   BODY:        empty
+#   diversions:  GROW, BODY |-
+#
+# * TEST1 requires TEST2a
+# _m4_require_call m4_divert_pushes another temporary diversion,
+# GROW - 1, and expands TEST2a in there.
+#   DUMP:        BODY
+#   BODY:        empty
+#   GROW - 1:    TEST2a
+#   diversions:  GROW - 1, GROW, BODY |-
+# Then the content of the temporary diversion is moved to DUMP and the
+# temporary diversion is popped.
+#   DUMP:        BODY
+#   BODY:        TEST2a
+#   diversions:  GROW, BODY |-
+#
+# * TEST1 requires TEST2b
+# Again, _m4_require_call pushes GROW - 1 and heads to expand TEST2b.
+#   DUMP:        BODY
+#   BODY:        TEST2a
+#   diversions:  GROW - 1, GROW, BODY |-
+#
+# * TEST2b requires TEST3
+# _m4_require_call pushes GROW - 2 and expands TEST3 here.
+# (TEST3 requires TEST2a, but TEST2a has already been m4_provide'd, so
+# nothing happens.)
+#   DUMP:        BODY
+#   BODY:        TEST2a
+#   GROW - 2:    TEST3
+#   diversions:  GROW - 2, GROW - 1, GROW, BODY |-
+# Then the diversion is appended to DUMP, and popped.
+#   DUMP:        BODY
+#   BODY:        TEST2a; TEST3
+#   diversions:  GROW - 1, GROW, BODY |-
+#
+# * TEST1 requires TEST2b (contd.)
+# The content of TEST2b is expanded...
+#   DUMP:        BODY
+#   BODY:        TEST2a; TEST3
+#   GROW - 1:    TEST2b,
+#   diversions:  GROW - 1, GROW, BODY |-
+# ... and moved to DUMP.
+#   DUMP:        BODY
+#   BODY:        TEST2a; TEST3; TEST2b
+#   diversions:  GROW, BODY |-
+#
+# * TEST1 is expanded: epilogue
+# TEST1's own content is in GROW...
+#   DUMP:        BODY
+#   BODY:        TEST2a; TEST3; TEST2b
+#   GROW:        TEST1
+#   diversions:  BODY |-
+# ... and it's epilogue moves it to DUMP and then undefines DUMP.
+#   DUMP:       undefined
+#   BODY:       TEST2a; TEST3; TEST2b; TEST1
+#   diversions: BODY |-
+#
+#
+# The third idea: track macros provided before they were required
+# ---------------------------------------------------------------
+#
+# Using just the first two ideas, Autoconf 2.50 through 2.63 still had
+# a subtle bug for more than seven years.  Let's consider the
+# following example to explain the bug:
+#
+# | m4_defun([TEST1], [1])
+# | m4_defun([TEST2], [2[]m4_require([TEST1])])
+# | m4_defun([TEST3], [3 TEST1 m4_require([TEST2])])
+# | TEST3
+#
+# After the prologue of TEST3, we are collecting text in GROW with the
+# intent of dumping it in BODY during the epilogue.  Next, we
+# encounter the direct invocation of TEST1, which provides the macro
+# in place in GROW.  From there, we encounter a requirement for TEST2,
+# which must be collected in a new diversion.  While expanding TEST2,
+# we encounter a requirement for TEST1, but since it has already been
+# expanded, the Axel Thimm algorithm states that we can treat it as a
+# no-op.  But that would lead to an end result of `2 3 1', meaning
+# that we have once again output a macro (TEST2) prior to its
+# requirements (TEST1).
+#
+# The problem can only occur if a single defun'd macro first provides,
+# then later indirectly requires, the same macro.  Note that directly
+# expanding then requiring a macro is okay: because the dependency was
+# met, the require phase can be a no-op.  For that matter, the outer
+# macro can even require two helpers, where the first helper expands
+# the macro, and the second helper indirectly requires the macro.
+# Out-of-order expansion is only present if the inner macro is
+# required by something that will be hoisted in front of where the
+# direct expansion occurred.  In other words, we must be careful not
+# to warn on:
+#
+# | m4_defun([TEST4], [4])
+# | m4_defun([TEST5], [5 TEST4 m4_require([TEST4])])
+# | TEST5 => 5 4
+#
+# or even the more complex:
+#
+# | m4_defun([TEST6], [6])
+# | m4_defun([TEST7], [7 TEST6])
+# | m4_defun([TEST8], [8 m4_require([TEST6])])
+# | m4_defun([TEST9], [9 m4_require([TEST8])])
+# | m4_defun([TEST10], [10 m4_require([TEST7]) m4_require([TEST9])])
+# | TEST10 => 7 6 8 9 10
+#
+# So, to detect whether a require was direct or indirect, m4_defun and
+# m4_require track the name of the macro that caused a diversion to be
+# created (using the stack _m4_diverting, coupled with an O(1) lookup
+# _m4_diverting([NAME])), and m4_provide stores the name associated
+# with the diversion at which a macro was provided.  A require call is
+# direct if it occurs within the same diversion where the macro was
+# provided, or if the diversion associated with the providing context
+# has been collected.
+#
+# The implementation of the warning involves tracking the set of
+# macros which have been provided since the start of the outermost
+# defun'd macro (the set is named _m4_provide).  When starting an
+# outermost macro, the set is emptied; when a macro is provided, it is
+# added to the set; when require expands the body of a macro, it is
+# removed from the set; and when a macro is indirectly required, the
+# set is checked.  If a macro is in the set, then it has been provided
+# before it was required, and we satisfy dependencies by expanding the
+# macro as if it had never been provided; in the example given above,
+# this means we now output `1 2 3 1'.  Meanwhile, a warning is issued
+# to inform the user that her macros trigger the bug in older autoconf
+# versions, and that her output file now contains redundant contents
+# (and possibly new problems, if the repeated macro was not
+# idempotent).  Meanwhile, macros defined by m4_defun_once instead of
+# m4_defun are idempotent, avoiding any warning or duplicate output.
+#
+#
+# 2. Keeping track of the expansion stack
+# =======================================
+#
+# When M4 expansion goes wrong it is often extremely hard to find the
+# path amongst macros that drove to the failure.  What is needed is
+# the stack of macro `calls'. One could imagine that GNU M4 would
+# maintain a stack of macro expansions, unfortunately it doesn't, so
+# we do it by hand.  This is of course extremely costly, but the help
+# this stack provides is worth it.  Nevertheless to limit the
+# performance penalty this is implemented only for m4_defun'd macros,
+# not for define'd macros.
+#
+# Each time we enter an m4_defun'd macros, we add a definition in
+# _m4_expansion_stack, and when we exit the macro, we remove it (thanks
+# to pushdef/popdef).  m4_stack_foreach is used to print the expansion
+# stack in the rare cases when it's needed.
+#
+# In addition, we want to detect circular m4_require dependencies.
+# Each time we expand a macro FOO we define _m4_expanding(FOO); and
+# m4_require(BAR) simply checks whether _m4_expanding(BAR) is defined.
+
+
+# m4_expansion_stack
+# ------------------
+# Expands to the entire contents of the expansion stack.  The caller
+# must supply a trailing newline.  This macro always prints a
+# location; check whether _m4_expansion_stack is defined to filter out
+# the case when no defun'd macro is in force.
+m4_define([m4_expansion_stack],
+[m4_stack_foreach_sep_lifo([_$0], [_$0_entry(], [)
+])m4_location[: the top level]])
+
+# _m4_expansion_stack_entry(MACRO)
+# --------------------------------
+# Format an entry for MACRO found on the expansion stack.
+m4_define([_m4_expansion_stack_entry],
+[_m4_defn([m4_location($1)])[: $1 is expanded from...]])
+
+# m4_expansion_stack_push(MACRO)
+# ------------------------------
+# Form an entry of the expansion stack on entry to MACRO and push it.
+m4_define([m4_expansion_stack_push],
+[m4_pushdef([_m4_expansion_stack], [$1])])
+
+
+# _m4_divert(GROW)
+# ----------------
+# This diversion is used by the m4_defun/m4_require machinery.  It is
+# important to keep room before GROW because for each nested
+# AC_REQUIRE we use an additional diversion (i.e., two m4_require's
+# will use GROW - 2.  More than 3 levels has never seemed to be
+# needed.)
+#
+# ...
+# - GROW - 2
+#   m4_require'd code, 2 level deep
+# - GROW - 1
+#   m4_require'd code, 1 level deep
+# - GROW
+#   m4_defun'd macros are elaborated here.
+
+m4_define([_m4_divert(GROW)],       10000)
+
+
+# _m4_defun_pro(MACRO-NAME)
+# -------------------------
+# The prologue for Autoconf macros.
+#
+# This is called frequently, so minimize the number of macro invocations
+# by avoiding dnl and m4_defn overhead.
+m4_define([_m4_defun_pro],
+[m4_ifdef([_m4_expansion_stack], [], [_m4_defun_pro_outer([$1])])]dnl
+[m4_expansion_stack_push([$1])m4_pushdef([_m4_expanding($1)])])
+
+m4_define([_m4_defun_pro_outer],
+[m4_set_delete([_m4_provide])]dnl
+[m4_pushdef([_m4_diverting([$1])])m4_pushdef([_m4_diverting], [$1])]dnl
+[m4_pushdef([_m4_divert_dump], m4_divnum)m4_divert_push([GROW])])
+
+# _m4_defun_epi(MACRO-NAME)
+# -------------------------
+# The Epilogue for Autoconf macros.  MACRO-NAME only helps tracing
+# the PRO/EPI pairs.
+#
+# This is called frequently, so minimize the number of macro invocations
+# by avoiding dnl and m4_popdef overhead.
+m4_define([_m4_defun_epi],
+[_m4_popdef([_m4_expanding($1)], [_m4_expansion_stack])]dnl
+[m4_ifdef([_m4_expansion_stack], [], [_m4_defun_epi_outer([$1])])]dnl
+[m4_provide([$1])])
+
+m4_define([_m4_defun_epi_outer],
+[_m4_popdef([_m4_divert_dump], [_m4_diverting([$1])], [_m4_diverting])]dnl
+[m4_divert_pop([GROW])m4_undivert([GROW])])
+
+
+# _m4_divert_dump
+# ---------------
+# If blank, we are outside of any defun'd macro.  Otherwise, expands
+# to the diversion number (not name) where require'd macros should be
+# moved once completed.
+m4_define([_m4_divert_dump])
+
+
+# m4_divert_require(DIVERSION, NAME-TO-CHECK, [BODY-TO-EXPAND])
+# -------------------------------------------------------------
+# Same as m4_require, but BODY-TO-EXPAND goes into the named DIVERSION;
+# requirements still go in the current diversion though.
+#
+m4_define([m4_divert_require],
+[m4_ifdef([_m4_expanding($2)],
+  [m4_fatal([$0: circular dependency of $2])])]dnl
+[m4_if(_m4_divert_dump, [],
+  [m4_fatal([$0($2): cannot be used outside of an m4_defun'd macro])])]dnl
+[m4_provide_if([$2], [],
+  [_m4_require_call([$2], [$3], _m4_divert([$1], [-]))])])
+
+
+# m4_defun(NAME, EXPANSION, [MACRO = m4_define])
+# ----------------------------------------------
+# Define a macro NAME which automatically provides itself.  Add
+# machinery so the macro automatically switches expansion to the
+# diversion stack if it is not already using it, prior to EXPANSION.
+# In this case, once finished, it will bring back all the code
+# accumulated in the diversion stack.  This, combined with m4_require,
+# achieves the topological ordering of macros.  We don't use this
+# macro to define some frequently called macros that are not involved
+# in ordering constraints, to save m4 processing.
+#
+# MACRO is an undocumented argument; when set to m4_pushdef, and NAME
+# is already defined, the new definition is added to the pushdef
+# stack, rather than overwriting the current definition.  It can thus
+# be used to write self-modifying macros, which pop themselves to a
+# previously m4_define'd definition so that subsequent use of the
+# macro is faster.
+m4_define([m4_defun],
+[m4_define([m4_location($1)], m4_location)]dnl
+[m4_default([$3], [m4_define])([$1],
+  [_m4_defun_pro(]m4_dquote($[0])[)$2[]_m4_defun_epi(]m4_dquote($[0])[)])])
+
+
+# m4_defun_init(NAME, INIT, COMMON)
+# ---------------------------------
+# Like m4_defun, but split EXPANSION into two portions: INIT which is
+# done only the first time NAME is invoked, and COMMON which is
+# expanded every time.
+#
+# For now, the COMMON definition is always m4_define'd, giving an even
+# lighter-weight definition.  m4_defun allows self-providing, but once
+# a macro is provided, m4_require no longer cares if it is m4_define'd
+# or m4_defun'd.  m4_defun also provides location tracking to identify
+# dependency bugs, but once the INIT has been expanded, we know there
+# are no dependency bugs.  However, if a future use needs COMMON to be
+# m4_defun'd, we can add a parameter, similar to the third parameter
+# to m4_defun.
+m4_define([m4_defun_init],
+[m4_define([$1], [$3[]])m4_defun([$1],
+   [$2[]_m4_popdef(]m4_dquote($[0])[)m4_indir(]m4_dquote($[0])dnl
+[m4_if(]m4_dquote($[#])[, [0], [], ]m4_dquote([,$]@)[))], [m4_pushdef])])
+
+
+# m4_defun_once(NAME, EXPANSION)
+# ------------------------------
+# Like m4_defun, but guarantee that EXPANSION only happens once
+# (thereafter, using NAME is a no-op).
+#
+# If _m4_divert_dump is empty, we are called at the top level;
+# otherwise, we must ensure that we are required in front of the
+# current defun'd macro.  Use a helper macro so that EXPANSION need
+# only occur once in the definition of NAME, since it might be large.
+m4_define([m4_defun_once],
+[m4_define([m4_location($1)], m4_location)]dnl
+[m4_define([$1], [_m4_defun_once([$1], [$2], m4_if(_m4_divert_dump, [],
+  [[_m4_defun_pro([$1])m4_unquote(], [)_m4_defun_epi([$1])]],
+m4_ifdef([_m4_diverting([$1])], [-]), [-], [[m4_unquote(], [)]],
+  [[_m4_require_call([$1],], [, _m4_divert_dump)]]))])])
+
+m4_define([_m4_defun_once],
+[m4_pushdef([$1])$3[$2[]m4_provide([$1])]$4])
+
+
+# m4_pattern_forbid(ERE, [WHY])
+# -----------------------------
+# Declare that no token matching the forbidden extended regular
+# expression ERE should be seen in the output unless...
+m4_define([m4_pattern_forbid], [])
+
+
+# m4_pattern_allow(ERE)
+# ---------------------
+# ... that token also matches the allowed extended regular expression ERE.
+# Both used via traces.
+m4_define([m4_pattern_allow], [])
+
+
+## --------------------------------- ##
+## 11. Dependencies between macros.  ##
+## --------------------------------- ##
+
+
+# m4_before(THIS-MACRO-NAME, CALLED-MACRO-NAME)
+# ---------------------------------------------
+# Issue a warning if CALLED-MACRO-NAME was called before THIS-MACRO-NAME.
+m4_define([m4_before],
+[m4_provide_if([$2],
+	       [m4_warn([syntax], [$2 was called before $1])])])
+
+
+# m4_require(NAME-TO-CHECK, [BODY-TO-EXPAND = NAME-TO-CHECK])
+# -----------------------------------------------------------
+# If NAME-TO-CHECK has never been expanded (actually, if it is not
+# m4_provide'd), expand BODY-TO-EXPAND *before* the current macro
+# expansion; follow the expansion with a newline.  Once expanded, emit
+# it in _m4_divert_dump.  Keep track of the m4_require chain in
+# _m4_expansion_stack.
+#
+# The normal cases are:
+#
+# - NAME-TO-CHECK == BODY-TO-EXPAND
+#   Which you can use for regular macros with or without arguments, e.g.,
+#     m4_require([AC_PROG_CC], [AC_PROG_CC])
+#     m4_require([AC_CHECK_HEADERS(limits.h)], [AC_CHECK_HEADERS(limits.h)])
+#   which is just the same as
+#     m4_require([AC_PROG_CC])
+#     m4_require([AC_CHECK_HEADERS(limits.h)])
+#
+# - BODY-TO-EXPAND == m4_indir([NAME-TO-CHECK])
+#   In the case of macros with irregular names.  For instance:
+#     m4_require([AC_LANG_COMPILER(C)], [indir([AC_LANG_COMPILER(C)])])
+#   which means `if the macro named `AC_LANG_COMPILER(C)' (the parens are
+#   part of the name, it is not an argument) has not been run, then
+#   call it.'
+#   Had you used
+#     m4_require([AC_LANG_COMPILER(C)], [AC_LANG_COMPILER(C)])
+#   then m4_require would have tried to expand `AC_LANG_COMPILER(C)', i.e.,
+#   call the macro `AC_LANG_COMPILER' with `C' as argument.
+#
+#   You could argue that `AC_LANG_COMPILER', when it receives an argument
+#   such as `C' should dispatch the call to `AC_LANG_COMPILER(C)'.  But this
+#   `extension' prevents `AC_LANG_COMPILER' from having actual arguments that
+#   it passes to `AC_LANG_COMPILER(C)'.
+#
+# This is called frequently, so minimize the number of macro invocations
+# by avoiding dnl and other overhead on the common path.
+m4_define([m4_require],
+[m4_ifdef([_m4_expanding($1)],
+  [m4_fatal([$0: circular dependency of $1])])]dnl
+[m4_if(_m4_divert_dump, [],
+  [m4_fatal([$0($1): cannot be used outside of an ]dnl
+m4_if([$0], [m4_require], [[m4_defun]], [[AC_DEFUN]])['d macro])])]dnl
+[m4_provide_if([$1], [m4_set_contains([_m4_provide], [$1],
+    [_m4_require_check([$1], _m4_defn([m4_provide($1)]), [$0])], [m4_ignore])],
+  [_m4_require_call])([$1], [$2], _m4_divert_dump)])
+
+
+# _m4_require_call(NAME-TO-CHECK, [BODY-TO-EXPAND = NAME-TO-CHECK],
+#                  DIVERSION-NUMBER)
+# -----------------------------------------------------------------
+# If m4_require decides to expand the body, it calls this macro.  The
+# expansion is placed in DIVERSION-NUMBER.
+#
+# This is called frequently, so minimize the number of macro invocations
+# by avoiding dnl and other overhead on the common path.
+m4_define([_m4_require_call],
+[m4_pushdef([_m4_divert_grow], m4_decr(_m4_divert_grow))]dnl
+[m4_pushdef([_m4_diverting([$1])])m4_pushdef([_m4_diverting], [$1])]dnl
+[m4_divert_push(_m4_divert_grow, [-])]dnl
+[m4_if([$2], [], [$1], [$2])
+m4_provide_if([$1], [m4_set_remove([_m4_provide], [$1])],
+  [m4_warn([syntax], [$1 is m4_require'd but not m4_defun'd])])]dnl
+[_m4_divert_raw($3)_m4_undivert(_m4_divert_grow)]dnl
+[m4_divert_pop(_m4_divert_grow)_m4_popdef([_m4_divert_grow],
+[_m4_diverting([$1])], [_m4_diverting])])
+
+
+# _m4_require_check(NAME-TO-CHECK, OWNER, CALLER)
+# -----------------------------------------------
+# NAME-TO-CHECK has been identified as previously expanded in the
+# diversion owned by OWNER.  If this is a problem, warn on behalf of
+# CALLER and return _m4_require_call; otherwise return m4_ignore.
+m4_define([_m4_require_check],
+[m4_if(_m4_defn([_m4_diverting]), [$2], [m4_ignore],
+       m4_ifdef([_m4_diverting([$2])], [-]), [-], [m4_warn([syntax],
+   [$3: `$1' was expanded before it was required])_m4_require_call],
+       [m4_ignore])])
+
+
+# _m4_divert_grow
+# ---------------
+# The counter for _m4_require_call.
+m4_define([_m4_divert_grow], _m4_divert([GROW]))
+
+
+# m4_expand_once(TEXT, [WITNESS = TEXT])
+# --------------------------------------
+# If TEXT has never been expanded, expand it *here*.  Use WITNESS as
+# as a memory that TEXT has already been expanded.
+m4_define([m4_expand_once],
+[m4_provide_if(m4_default_quoted([$2], [$1]),
+	       [],
+	       [m4_provide(m4_default_quoted([$2], [$1]))[]$1])])
+
+
+# m4_provide(MACRO-NAME)
+# ----------------------
+m4_define([m4_provide],
+[m4_ifdef([m4_provide($1)], [],
+[m4_set_add([_m4_provide], [$1], [m4_define([m4_provide($1)],
+  m4_ifdef([_m4_diverting], [_m4_defn([_m4_diverting])]))])])])
+
+
+# m4_provide_if(MACRO-NAME, IF-PROVIDED, IF-NOT-PROVIDED)
+# -------------------------------------------------------
+# If MACRO-NAME is provided do IF-PROVIDED, else IF-NOT-PROVIDED.
+# The purpose of this macro is to provide the user with a means to
+# check macros which are provided without letting her know how the
+# information is coded.
+m4_define([m4_provide_if],
+[m4_ifdef([m4_provide($1)],
+	  [$2], [$3])])
+
+
+## --------------------- ##
+## 12. Text processing.  ##
+## --------------------- ##
+
+
+# m4_cr_letters
+# m4_cr_LETTERS
+# m4_cr_Letters
+# -------------
+m4_define([m4_cr_letters], [abcdefghijklmnopqrstuvwxyz])
+m4_define([m4_cr_LETTERS], [ABCDEFGHIJKLMNOPQRSTUVWXYZ])
+m4_define([m4_cr_Letters],
+m4_defn([m4_cr_letters])dnl
+m4_defn([m4_cr_LETTERS])dnl
+)
+
+
+# m4_cr_digits
+# ------------
+m4_define([m4_cr_digits], [0123456789])
+
+
+# m4_cr_alnum
+# -----------
+m4_define([m4_cr_alnum],
+m4_defn([m4_cr_Letters])dnl
+m4_defn([m4_cr_digits])dnl
+)
+
+
+# m4_cr_symbols1
+# m4_cr_symbols2
+# --------------
+m4_define([m4_cr_symbols1],
+m4_defn([m4_cr_Letters])dnl
+_)
+
+m4_define([m4_cr_symbols2],
+m4_defn([m4_cr_symbols1])dnl
+m4_defn([m4_cr_digits])dnl
+)
+
+# m4_cr_all
+# ---------
+# The character range representing everything, with `-' as the last
+# character, since it is special to m4_translit.  Use with care, because
+# it contains characters special to M4 (fortunately, both ASCII and EBCDIC
+# have [] in order, so m4_defn([m4_cr_all]) remains a valid string).  It
+# also contains characters special to terminals, so it should never be
+# displayed in an error message.  Also, attempts to map [ and ] to other
+# characters via m4_translit must deal with the fact that m4_translit does
+# not add quotes to the output.
+#
+# In EBCDIC, $ is immediately followed by *, which leads to problems
+# if m4_cr_all is inlined into a macro definition; so swap them.
+#
+# It is mainly useful in generating inverted character range maps, for use
+# in places where m4_translit is faster than an equivalent m4_bpatsubst;
+# the regex `[^a-z]' is equivalent to:
+#  m4_translit(m4_dquote(m4_defn([m4_cr_all])), [a-z])
+m4_define([m4_cr_all],
+m4_translit(m4_dquote(m4_format(m4_dquote(m4_for(
+  ,1,255,,[[%c]]))m4_for([i],1,255,,[,i]))), [$*-], [*$])-)
+
+
+# _m4_define_cr_not(CATEGORY)
+# ---------------------------
+# Define m4_cr_not_CATEGORY as the inverse of m4_cr_CATEGORY.
+m4_define([_m4_define_cr_not],
+[m4_define([m4_cr_not_$1],
+	   m4_translit(m4_dquote(m4_defn([m4_cr_all])),
+		       m4_defn([m4_cr_$1])))])
+
+
+# m4_cr_not_letters
+# m4_cr_not_LETTERS
+# m4_cr_not_Letters
+# m4_cr_not_digits
+# m4_cr_not_alnum
+# m4_cr_not_symbols1
+# m4_cr_not_symbols2
+# ------------------
+# Inverse character sets
+_m4_define_cr_not([letters])
+_m4_define_cr_not([LETTERS])
+_m4_define_cr_not([Letters])
+_m4_define_cr_not([digits])
+_m4_define_cr_not([alnum])
+_m4_define_cr_not([symbols1])
+_m4_define_cr_not([symbols2])
+
+
+# m4_newline([STRING])
+# --------------------
+# Expands to a newline, possibly followed by STRING.  Exists mostly for
+# formatting reasons.
+m4_define([m4_newline], [
+$1])
+
+
+# m4_re_escape(STRING)
+# --------------------
+# Escape RE active characters in STRING.
+m4_define([m4_re_escape],
+[m4_bpatsubst([$1],
+	      [[][*+.?\^$]], [\\\&])])
+
+
+# m4_re_string
+# ------------
+# Regexp for `[a-zA-Z_0-9]*'
+# m4_dquote provides literal [] for the character class.
+m4_define([m4_re_string],
+m4_dquote(m4_defn([m4_cr_symbols2]))dnl
+[*]dnl
+)
+
+
+# m4_re_word
+# ----------
+# Regexp for `[a-zA-Z_][a-zA-Z_0-9]*'
+m4_define([m4_re_word],
+m4_dquote(m4_defn([m4_cr_symbols1]))dnl
+m4_defn([m4_re_string])dnl
+)
+
+
+# m4_tolower(STRING)
+# m4_toupper(STRING)
+# ------------------
+# These macros convert STRING to lowercase or uppercase.
+#
+# Rather than expand the m4_defn each time, we inline them up front.
+m4_define([m4_tolower],
+[m4_translit([[$1]], ]m4_dquote(m4_defn([m4_cr_LETTERS]))[,
+		     ]m4_dquote(m4_defn([m4_cr_letters]))[)])
+m4_define([m4_toupper],
+[m4_translit([[$1]], ]m4_dquote(m4_defn([m4_cr_letters]))[,
+		     ]m4_dquote(m4_defn([m4_cr_LETTERS]))[)])
+
+
+# m4_split(STRING, [REGEXP])
+# --------------------------
+# Split STRING into an m4 list of quoted elements.  The elements are
+# quoted with [ and ].  Beginning spaces and end spaces *are kept*.
+# Use m4_strip to remove them.
+#
+# REGEXP specifies where to split.  Default is [\t ]+.
+#
+# If STRING is empty, the result is an empty list.
+#
+# Pay attention to the m4_changequotes.  When m4 reads the definition of
+# m4_split, it still has quotes set to [ and ].  Luckily, these are matched
+# in the macro body, so the definition is stored correctly.  Use the same
+# alternate quotes as m4_noquote; it must be unlikely to appear in $1.
+#
+# Also, notice that $1 is quoted twice, since we want the result to
+# be quoted.  Then you should understand that the argument of
+# patsubst is -=<{(STRING)}>=- (i.e., with additional -=<{( and )}>=-).
+#
+# This macro is safe on active symbols, i.e.:
+#   m4_define(active, ACTIVE)
+#   m4_split([active active ])end
+#   => [active], [active], []end
+#
+# Optimize on regex of ` ' (space), since m4_foreach_w already guarantees
+# that the list contains single space separators, and a common case is
+# splitting a single-element list.  This macro is called frequently,
+# so avoid unnecessary dnl inside the definition.
+m4_define([m4_split],
+[m4_if([$1], [], [],
+       [$2], [ ], [m4_if(m4_index([$1], [ ]), [-1], [[[$1]]],
+			 [_$0([$1], [$2], [, ])])],
+       [$2], [], [_$0([$1], [[	 ]+], [, ])],
+       [_$0([$1], [$2], [, ])])])
+
+m4_define([_m4_split],
+[m4_changequote([-=<{(],[)}>=-])]dnl
+[[m4_bpatsubst(-=<{(-=<{($1)}>=-)}>=-, -=<{($2)}>=-,
+	       -=<{(]$3[)}>=-)]m4_changequote([, ])])
+
+
+# m4_chomp(STRING)
+# m4_chomp_all(STRING)
+# --------------------
+# Return STRING quoted, but without a trailing newline.  m4_chomp
+# removes at most one newline, while m4_chomp_all removes all
+# consecutive trailing newlines.  Embedded newlines are not touched,
+# and a trailing backslash-newline leaves just a trailing backslash.
+#
+# m4_bregexp is slower than m4_index, and we don't always want to
+# remove all newlines; hence the two variants.  We massage characters
+# to give a nicer pattern to match, particularly since m4_bregexp is
+# line-oriented.  Both versions must guarantee a match, to avoid bugs
+# with precision -1 in m4_format in older m4.
+m4_define([m4_chomp],
+[m4_format([[%.*s]], m4_index(m4_translit([[$1]], [
+/.], [/  ])[./.], [/.]), [$1])])
+
+m4_define([m4_chomp_all],
+[m4_format([[%.*s]], m4_bregexp(m4_translit([[$1]], [
+/], [/ ]), [/*$]), [$1])])
+
+
+# m4_flatten(STRING)
+# ------------------
+# If STRING contains end of lines, replace them with spaces.  If there
+# are backslashed end of lines, remove them.  This macro is safe with
+# active symbols.
+#    m4_define(active, ACTIVE)
+#    m4_flatten([active
+#    act\
+#    ive])end
+#    => active activeend
+#
+# In m4, m4_bpatsubst is expensive, so first check for a newline.
+m4_define([m4_flatten],
+[m4_if(m4_index([$1], [
+]), [-1], [[$1]],
+       [m4_translit(m4_bpatsubst([[[$1]]], [\\
+]), [
+], [ ])])])
+
+
+# m4_strip(STRING)
+# ----------------
+# Expands into STRING with tabs and spaces singled out into a single
+# space, and removing leading and trailing spaces.
+#
+# This macro is robust to active symbols.
+#    m4_define(active, ACTIVE)
+#    m4_strip([  active <tab> <tab>active ])end
+#    => active activeend
+#
+# First, notice that we guarantee trailing space.  Why?  Because regular
+# expressions are greedy, and `.* ?' would always group the space into the
+# .* portion.  The algorithm is simpler by avoiding `?' at the end.  The
+# algorithm correctly strips everything if STRING is just ` '.
+#
+# Then notice the second pattern: it is in charge of removing the
+# leading/trailing spaces.  Why not just `[^ ]'?  Because they are
+# applied to over-quoted strings, i.e. more or less [STRING], due
+# to the limitations of m4_bpatsubsts.  So the leading space in STRING
+# is the *second* character; equally for the trailing space.
+m4_define([m4_strip],
+[m4_bpatsubsts([$1 ],
+	       [[	 ]+], [ ],
+	       [^. ?\(.*\) .$], [[[\1]]])])
+
+
+# m4_normalize(STRING)
+# --------------------
+# Apply m4_flatten and m4_strip to STRING.
+#
+# The argument is quoted, so that the macro is robust to active symbols:
+#
+#    m4_define(active, ACTIVE)
+#    m4_normalize([  act\
+#    ive
+#    active ])end
+#    => active activeend
+
+m4_define([m4_normalize],
+[m4_strip(m4_flatten([$1]))])
+
+
+
+# m4_join(SEP, ARG1, ARG2...)
+# ---------------------------
+# Produce ARG1SEPARG2...SEPARGn.  Avoid back-to-back SEP when a given ARG
+# is the empty string.  No expansion is performed on SEP or ARGs.
+#
+# Since the number of arguments to join can be arbitrarily long, we
+# want to avoid having more than one $@ in the macro definition;
+# otherwise, the expansion would require twice the memory of the already
+# long list.  Hence, m4_join merely looks for the first non-empty element,
+# and outputs just that element; while _m4_join looks for all non-empty
+# elements, and outputs them following a separator.  The final trick to
+# note is that we decide between recursing with $0 or _$0 based on the
+# nested m4_if ending with `_'.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift2($@))])])
+m4_define([_m4_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift2($@))])])
+
+# m4_joinall(SEP, ARG1, ARG2...)
+# ------------------------------
+# Produce ARG1SEPARG2...SEPARGn.  An empty ARG results in back-to-back SEP.
+# No expansion is performed on SEP or ARGs.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_joinall], [[$2]_$0([$1], m4_shift($@))])
+m4_define([_m4_joinall],
+[m4_if([$#], [2], [], [[$1$3]$0([$1], m4_shift2($@))])])
+
+# m4_combine([SEPARATOR], PREFIX-LIST, [INFIX], SUFFIX...)
+# --------------------------------------------------------
+# Produce the pairwise combination of every element in the quoted,
+# comma-separated PREFIX-LIST with every element from the SUFFIX arguments.
+# Each pair is joined with INFIX, and pairs are separated by SEPARATOR.
+# No expansion occurs on SEPARATOR, INFIX, or elements of either list.
+#
+# For example:
+#   m4_combine([, ], [[a], [b], [c]], [-], [1], [2], [3])
+#   => a-1, a-2, a-3, b-1, b-2, b-3, c-1, c-2, c-3
+#
+# This definition is a bit hairy; the thing to realize is that we want
+# to construct m4_map_args_sep([[prefix$3]], [], [[$1]], m4_shift3($@))
+# as the inner loop, using each prefix generated by the outer loop,
+# and without recalculating m4_shift3 every outer iteration.
+m4_define([m4_combine],
+[m4_if([$2], [], [], m4_eval([$# > 3]), [1],
+[m4_map_args_sep([m4_map_args_sep(m4_dquote(], [)[[$3]], [], [[$1]],]]]dnl
+[m4_dquote(m4_dquote(m4_shift3($@)))[[)], [[$1]], $2)])])
+
+
+# m4_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR`'STRING'
+# at the end.  It is valid to use this macro with MACRO-NAME undefined,
+# in which case no SEPARATOR is added.  Be aware that the criterion is
+# `not being defined', and not `not being empty'.
+#
+# Note that neither STRING nor SEPARATOR are expanded here; rather, when
+# you expand MACRO-NAME, they will be expanded at that point in time.
+#
+# This macro is robust to active symbols.  It can be used to grow
+# strings.
+#
+#    | m4_define(active, ACTIVE)dnl
+#    | m4_append([sentence], [This is an])dnl
+#    | m4_append([sentence], [ active ])dnl
+#    | m4_append([sentence], [symbol.])dnl
+#    | sentence
+#    | m4_undefine([active])dnl
+#    | sentence
+#    => This is an ACTIVE symbol.
+#    => This is an active symbol.
+#
+# It can be used to define hooks.
+#
+#    | m4_define(active, ACTIVE)dnl
+#    | m4_append([hooks], [m4_define([act1], [act2])])dnl
+#    | m4_append([hooks], [m4_define([act2], [active])])dnl
+#    | m4_undefine([active])dnl
+#    | act1
+#    | hooks
+#    | act1
+#    => act1
+#    =>
+#    => active
+#
+# It can also be used to create lists, although this particular usage was
+# broken prior to autoconf 2.62.
+#    | m4_append([list], [one], [, ])dnl
+#    | m4_append([list], [two], [, ])dnl
+#    | m4_append([list], [three], [, ])dnl
+#    | list
+#    | m4_dquote(list)
+#    => one, two, three
+#    => [one],[two],[three]
+#
+# Note that m4_append can benefit from amortized O(n) m4 behavior, if
+# the underlying m4 implementation is smart enough to avoid copying existing
+# contents when enlarging a macro's definition into any pre-allocated storage
+# (m4 1.4.x unfortunately does not implement this optimization).  We do
+# not implement m4_prepend, since it is inherently O(n^2) (pre-allocated
+# storage only occurs at the end of a macro, so the existing contents must
+# always be moved).
+#
+# Use _m4_defn for speed.
+m4_define([m4_append],
+[m4_define([$1], m4_ifdef([$1], [_m4_defn([$1])[$3]])[$2])])
+
+
+# m4_append_uniq(MACRO-NAME, STRING, [SEPARATOR], [IF-UNIQ], [IF-DUP])
+# --------------------------------------------------------------------
+# Like `m4_append', but append only if not yet present.  Additionally,
+# expand IF-UNIQ if STRING was appended, or IF-DUP if STRING was already
+# present.  Also, warn if SEPARATOR is not empty and occurs within STRING,
+# as the algorithm no longer guarantees uniqueness.
+#
+# Note that while m4_append can be O(n) (depending on the quality of the
+# underlying M4 implementation), m4_append_uniq is inherently O(n^2)
+# because each append operation searches the entire string.
+m4_define([m4_append_uniq],
+[m4_ifval([$3], [m4_if(m4_index([$2], [$3]), [-1], [],
+		       [m4_warn([syntax],
+				[$0: `$2' contains `$3'])])])_$0($@)])
+m4_define([_m4_append_uniq],
+[m4_ifdef([$1],
+	  [m4_if(m4_index([$3]_m4_defn([$1])[$3], [$3$2$3]), [-1],
+		 [m4_append([$1], [$2], [$3])$4], [$5])],
+	  [m4_define([$1], [$2])$4])])
+
+# m4_append_uniq_w(MACRO-NAME, STRINGS)
+# -------------------------------------
+# For each of the words in the whitespace separated list STRINGS, append
+# only the unique strings to the definition of MACRO-NAME.
+#
+# Use _m4_defn for speed.
+m4_define([m4_append_uniq_w],
+[m4_map_args_w([$2], [_m4_append_uniq([$1],], [, [ ])])])
+
+
+# m4_escape(STRING)
+# -----------------
+# Output quoted STRING, but with embedded #, $, [ and ] turned into
+# quadrigraphs.
+#
+# It is faster to check if STRING is already good using m4_translit
+# than to blindly perform four m4_bpatsubst.
+#
+# Because the translit is stripping quotes, it must also neutralize
+# anything that might be in a macro name, as well as comments, commas,
+# and parentheses.  All the problem characters are unified so that a
+# single m4_index can scan the result.
+#
+# Rather than expand m4_defn every time m4_escape is expanded, we
+# inline its expansion up front.
+m4_define([m4_escape],
+[m4_if(m4_index(m4_translit([$1],
+   [[]#,()]]m4_dquote(m4_defn([m4_cr_symbols2]))[, [$$$]), [$]),
+  [-1], [m4_echo], [_$0])([$1])])
+
+m4_define([_m4_escape],
+[m4_changequote([-=<{(],[)}>=-])]dnl
+[m4_bpatsubst(m4_bpatsubst(m4_bpatsubst(m4_bpatsubst(
+	  -=<{(-=<{(-=<{(-=<{(-=<{($1)}>=-)}>=-)}>=-)}>=-)}>=-,
+	-=<{(#)}>=-, -=<{(@%:@)}>=-),
+      -=<{(\[)}>=-, -=<{(@<:@)}>=-),
+    -=<{(\])}>=-, -=<{(@:>@)}>=-),
+  -=<{(\$)}>=-, -=<{(@S|@)}>=-)m4_changequote([,])])
+
+
+# m4_text_wrap(STRING, [PREFIX], [FIRST-PREFIX], [WIDTH])
+# -------------------------------------------------------
+# Expands into STRING wrapped to hold in WIDTH columns (default = 79).
+# If PREFIX is given, each line is prefixed with it.  If FIRST-PREFIX is
+# specified, then the first line is prefixed with it.  As a special case,
+# if the length of FIRST-PREFIX is greater than that of PREFIX, then
+# FIRST-PREFIX will be left alone on the first line.
+#
+# No expansion occurs on the contents STRING, PREFIX, or FIRST-PREFIX,
+# although quadrigraphs are correctly recognized.  More precisely,
+# you may redefine m4_qlen to recognize whatever escape sequences that
+# you will post-process.
+#
+# Typical outputs are:
+#
+# m4_text_wrap([Short string */], [   ], [/* ], 20)
+#  => /* Short string */
+#
+# m4_text_wrap([Much longer string */], [   ], [/* ], 20)
+#  => /* Much longer
+#  =>    string */
+#
+# m4_text_wrap([Short doc.], [          ], [  --short ], 30)
+#  =>   --short Short doc.
+#
+# m4_text_wrap([Short doc.], [          ], [  --too-wide ], 30)
+#  =>   --too-wide
+#  =>           Short doc.
+#
+# m4_text_wrap([Super long documentation.], [          ], [  --too-wide ], 30)
+#  =>   --too-wide
+#  =>      Super long
+#  =>      documentation.
+#
+# FIXME: there is no checking of a longer PREFIX than WIDTH, but do
+# we really want to bother with people trying each single corner
+# of a software?
+#
+# This macro does not leave a trailing space behind the last word of a line,
+# which complicates it a bit.  The algorithm is otherwise stupid and simple:
+# all the words are preceded by m4_Separator which is defined to empty for
+# the first word, and then ` ' (single space) for all the others.
+#
+# The algorithm uses a helper that uses $2 through $4 directly, rather than
+# using local variables, to avoid m4_defn overhead, or expansion swallowing
+# any $.  It also bypasses m4_popdef overhead with _m4_popdef since no user
+# macro expansion occurs in the meantime.  Also, the definition is written
+# with m4_do, to avoid time wasted on dnl during expansion (since this is
+# already a time-consuming macro).
+m4_define([m4_text_wrap],
+[_$0(m4_escape([$1]), [$2], m4_default_quoted([$3], [$2]),
+     m4_default_quoted([$4], [79]))])
+
+m4_define([_m4_text_wrap],
+m4_do(dnl set up local variables, to avoid repeated calculations
+[[m4_pushdef([m4_Indent], m4_qlen([$2]))]],
+[[m4_pushdef([m4_Cursor], m4_qlen([$3]))]],
+[[m4_pushdef([m4_Separator], [m4_define([m4_Separator], [ ])])]],
+dnl expand the first prefix, then check its length vs. regular prefix
+dnl same length: nothing special
+dnl prefix1 longer: output on line by itself, and reset cursor
+dnl prefix1 shorter: pad to length of prefix, and reset cursor
+[[[$3]m4_cond([m4_Cursor], m4_Indent, [],
+	      [m4_eval(m4_Cursor > m4_Indent)], [1], [
+[$2]m4_define([m4_Cursor], m4_Indent)],
+	      [m4_format([%*s], m4_max([0],
+  m4_eval(m4_Indent - m4_Cursor)), [])m4_define([m4_Cursor], m4_Indent)])]],
+dnl now, for each word, compute the curser after the word is output, then
+dnl check if the cursor would exceed the wrap column
+dnl if so, reset cursor, and insert newline and prefix
+dnl if not, insert the separator (usually a space)
+dnl either way, insert the word
+[[m4_map_args_w([$1], [$0_word(], [, [$2], [$4])])]],
+dnl finally, clean up the local variables
+[[_m4_popdef([m4_Separator], [m4_Cursor], [m4_Indent])]]))
+
+m4_define([_m4_text_wrap_word],
+[m4_define([m4_Cursor], m4_eval(m4_Cursor + m4_qlen([$1]) + 1))]dnl
+[m4_if(m4_eval(m4_Cursor > ([$3])),
+      [1], [m4_define([m4_Cursor], m4_eval(m4_Indent + m4_qlen([$1]) + 1))
+[$2]],
+      [m4_Separator[]])[$1]])
+
+# m4_text_box(MESSAGE, [FRAME-CHARACTER = `-'])
+# ---------------------------------------------
+# Turn MESSAGE into:
+#  ## ------- ##
+#  ## MESSAGE ##
+#  ## ------- ##
+# using FRAME-CHARACTER in the border.
+#
+# Quadrigraphs are correctly recognized.  More precisely, you may
+# redefine m4_qlen to recognize whatever escape sequences that you
+# will post-process.
+m4_define([m4_text_box],
+[m4_pushdef([m4_Border],
+	    m4_translit(m4_format([[[%*s]]], m4_decr(m4_qlen(_m4_expand([$1
+]))), []), [ ], m4_default_quoted([$2], [-])))]dnl
+[[##] _m4_defn([m4_Border]) [##]
+[##] $1 [##]
+[##] _m4_defn([m4_Border]) [##]_m4_popdef([m4_Border])])
+
+
+# m4_qlen(STRING)
+# ---------------
+# Expands to the length of STRING after autom4te converts all quadrigraphs.
+#
+# If you use some other means of post-processing m4 output rather than
+# autom4te, then you may redefine this macro to recognize whatever
+# escape sequences your post-processor will handle.  For that matter,
+# m4_define([m4_qlen], m4_defn([m4_len])) is sufficient if you don't
+# do any post-processing.
+#
+# Avoid bpatsubsts for the common case of no quadrigraphs.  Cache
+# results, as configure scripts tend to ask about lengths of common
+# strings like `/*' and `*/' rather frequently.  Minimize the number
+# of times that $1 occurs in m4_qlen, so there is less text to parse
+# on a cache hit.
+m4_define([m4_qlen],
+[m4_ifdef([$0-$1], [_m4_defn([$0-]], [_$0(])[$1])])
+m4_define([_m4_qlen],
+[m4_define([m4_qlen-$1],
+m4_if(m4_index([$1], [@]), [-1], [m4_len([$1])],
+      [m4_len(m4_bpatsubst([[$1]],
+			   [@\(\(<:\|:>\|S|\|%:\|\{:\|:\}\)\(@\)\|&t@\)],
+			   [\3]))]))_m4_defn([m4_qlen-$1])])
+
+# m4_copyright_condense(TEXT)
+# ---------------------------
+# Condense the copyright notice in TEXT to only display the final
+# year, wrapping the results to fit in 80 columns.
+m4_define([m4_copyright_condense],
+[m4_text_wrap(m4_bpatsubst(m4_flatten([[$1]]),
+[(C)[-	 ,0-9]*\([1-9][0-9][0-9][0-9]\)], [(C) \1]))])
+
+## ----------------------- ##
+## 13. Number processing.  ##
+## ----------------------- ##
+
+# m4_cmp(A, B)
+# ------------
+# Compare two integer expressions.
+# A < B -> -1
+# A = B ->  0
+# A > B ->  1
+m4_define([m4_cmp],
+[m4_eval((([$1]) > ([$2])) - (([$1]) < ([$2])))])
+
+
+# m4_list_cmp(A, B)
+# -----------------
+#
+# Compare the two lists of integer expressions A and B.  For instance:
+#   m4_list_cmp([1, 0],     [1])    ->  0
+#   m4_list_cmp([1, 0],     [1, 0]) ->  0
+#   m4_list_cmp([1, 2],     [1, 0]) ->  1
+#   m4_list_cmp([1, 2, 3],  [1, 2]) ->  1
+#   m4_list_cmp([1, 2, -3], [1, 2]) -> -1
+#   m4_list_cmp([1, 0],     [1, 2]) -> -1
+#   m4_list_cmp([1],        [1, 2]) -> -1
+#   m4_define([xa], [oops])dnl
+#   m4_list_cmp([[0xa]],    [5+5])  -> 0
+#
+# Rather than face the overhead of m4_case, we use a helper function whose
+# expansion includes the name of the macro to invoke on the tail, either
+# m4_ignore or m4_unquote.  This is particularly useful when comparing
+# long lists, since less text is being expanded for deciding when to end
+# recursion.  The recursion is between a pair of macros that alternate
+# which list is trimmed by one element; this is more efficient than
+# calling m4_cdr on both lists from a single macro.  Guarantee exactly
+# one expansion of both lists' side effects.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_list_cmp],
+[_$0_raw(m4_dquote($1), m4_dquote($2))])
+
+m4_define([_m4_list_cmp_raw],
+[m4_if([$1], [$2], [0], [_m4_list_cmp_1([$1], $2)])])
+
+m4_define([_m4_list_cmp],
+[m4_if([$1], [], [0m4_ignore], [$2], [0], [m4_unquote], [$2m4_ignore])])
+
+m4_define([_m4_list_cmp_1],
+[_m4_list_cmp_2([$2], [m4_shift2($@)], $1)])
+
+m4_define([_m4_list_cmp_2],
+[_m4_list_cmp([$1$3], m4_cmp([$3+0], [$1+0]))(
+  [_m4_list_cmp_1(m4_dquote(m4_shift3($@)), $2)])])
+
+# m4_max(EXPR, ...)
+# m4_min(EXPR, ...)
+# -----------------
+# Return the decimal value of the maximum (or minimum) in a series of
+# integer expressions.
+#
+# M4 1.4.x doesn't provide ?:.  Hence this huge m4_eval.  Avoid m4_eval
+# if both arguments are identical, but be aware of m4_max(0xa, 10) (hence
+# the use of <=, not just <, in the second multiply).
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_max],
+[m4_if([$#], [0], [m4_fatal([too few arguments to $0])],
+       [$#], [1], [m4_eval([$1])],
+       [$#$1], [2$2], [m4_eval([$1])],
+       [$#], [2], [_$0($@)],
+       [_m4_minmax([_$0], $@)])])
+
+m4_define([_m4_max],
+[m4_eval((([$1]) > ([$2])) * ([$1]) + (([$1]) <= ([$2])) * ([$2]))])
+
+m4_define([m4_min],
+[m4_if([$#], [0], [m4_fatal([too few arguments to $0])],
+       [$#], [1], [m4_eval([$1])],
+       [$#$1], [2$2], [m4_eval([$1])],
+       [$#], [2], [_$0($@)],
+       [_m4_minmax([_$0], $@)])])
+
+m4_define([_m4_min],
+[m4_eval((([$1]) < ([$2])) * ([$1]) + (([$1]) >= ([$2])) * ([$2]))])
+
+# _m4_minmax(METHOD, ARG1, ARG2...)
+# ---------------------------------
+# Common recursion code for m4_max and m4_min.  METHOD must be _m4_max
+# or _m4_min, and there must be at least two arguments to combine.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([_m4_minmax],
+[m4_if([$#], [3], [$1([$2], [$3])],
+       [$0([$1], $1([$2], [$3]), m4_shift3($@))])])
+
+
+# m4_sign(A)
+# ----------
+# The sign of the integer expression A.
+m4_define([m4_sign],
+[m4_eval((([$1]) > 0) - (([$1]) < 0))])
+
+
+
+## ------------------------ ##
+## 14. Version processing.  ##
+## ------------------------ ##
+
+
+# m4_version_unletter(VERSION)
+# ----------------------------
+# Normalize beta version numbers with letters to numeric expressions, which
+# can then be handed to m4_eval for the purpose of comparison.
+#
+#   Nl -> (N+1).-1.(l#)
+#
+# for example:
+#   [2.14a] -> [0,2,14+1,-1,[0r36:a]] -> 2.15.-1.10
+#   [2.14b] -> [0,2,15+1,-1,[0r36:b]] -> 2.15.-1.11
+#   [2.61aa.b] -> [0,2.61,1,-1,[0r36:aa],+1,-1,[0r36:b]] -> 2.62.-1.370.1.-1.11
+#   [08] -> [0,[0r10:0]8] -> 8
+#
+# This macro expects reasonable version numbers, but can handle double
+# letters and does not expand any macros.  Original version strings can
+# use both `.' and `-' separators.
+#
+# Inline constant expansions, to avoid m4_defn overhead.
+# _m4_version_unletter is the real workhorse used by m4_version_compare,
+# but since [0r36:a] and commas are less readable than 10 and dots, we
+# provide a wrapper for human use.
+m4_define([m4_version_unletter],
+[m4_substr(m4_map_args([.m4_eval], m4_unquote(_$0([$1]))), [3])])
+m4_define([_m4_version_unletter],
+[m4_bpatsubst(m4_bpatsubst(m4_translit([[[[0,$1]]]], [.-], [,,]),]dnl
+m4_dquote(m4_dquote(m4_defn([m4_cr_Letters])))[[+],
+	      [+1,-1,[0r36:\&]]), [,0], [,[0r10:0]])])
+
+
+# m4_version_compare(VERSION-1, VERSION-2)
+# ----------------------------------------
+# Compare the two version numbers and expand into
+#  -1 if VERSION-1 < VERSION-2
+#   0 if           =
+#   1 if           >
+#
+# Since _m4_version_unletter does not output side effects, we can
+# safely bypass the overhead of m4_version_cmp.
+m4_define([m4_version_compare],
+[_m4_list_cmp_raw(_m4_version_unletter([$1]), _m4_version_unletter([$2]))])
+
+
+# m4_PACKAGE_NAME
+# m4_PACKAGE_TARNAME
+# m4_PACKAGE_VERSION
+# m4_PACKAGE_STRING
+# m4_PACKAGE_BUGREPORT
+# --------------------
+# If m4sugar/version.m4 is present, then define version strings.  This
+# file is optional, provided by Autoconf but absent in Bison.
+m4_sinclude([m4sugar/version.m4])
+
+
+# m4_version_prereq(VERSION, [IF-OK], [IF-NOT = FAIL])
+# ----------------------------------------------------
+# Check this Autoconf version against VERSION.
+m4_define([m4_version_prereq],
+m4_ifdef([m4_PACKAGE_VERSION],
+[[m4_if(m4_version_compare(]m4_dquote(m4_defn([m4_PACKAGE_VERSION]))[, [$1]),
+	[-1],
+	[m4_default([$3],
+		    [m4_fatal([Autoconf version $1 or higher is required],
+			      [63])])],
+	[$2])]],
+[[m4_fatal([m4sugar/version.m4 not found])]]))
+
+
+## ------------------ ##
+## 15. Set handling.  ##
+## ------------------ ##
+
+# Autoconf likes to create arbitrarily large sets; for example, as of
+# this writing, the configure.ac for coreutils tracks a set of more
+# than 400 AC_SUBST.  How do we track all of these set members,
+# without introducing duplicates?  We could use m4_append_uniq, with
+# the set NAME residing in the contents of the macro NAME.
+# Unfortunately, m4_append_uniq is quadratic for set creation, because
+# it costs O(n) to search the string for each of O(n) insertions; not
+# to mention that with m4 1.4.x, even using m4_append is slow, costing
+# O(n) rather than O(1) per insertion.  Other set operations, not used
+# by Autoconf but still possible by manipulation of the definition
+# tracked in macro NAME, include O(n) deletion of one element and O(n)
+# computation of set size.  Because the set is exposed to the user via
+# the definition of a single macro, we cannot cache any data about the
+# set without risking the cache being invalidated by the user
+# redefining NAME.
+#
+# Can we do better?  Yes, because m4 gives us an O(1) search function
+# for free: ifdef.  Additionally, even m4 1.4.x gives us an O(1)
+# insert operation for free: pushdef.  But to use these, we must
+# represent the set via a group of macros; to keep the set consistent,
+# we must hide the set so that the user can only manipulate it through
+# accessor macros.  The contents of the set are maintained through two
+# access points; _m4_set([name]) is a pushdef stack of values in the
+# set, useful for O(n) traversal of the set contents; while the
+# existence of _m4_set([name],value) with no particular value is
+# useful for O(1) querying of set membership.  And since the user
+# cannot externally manipulate the set, we are free to add additional
+# caching macros for other performance improvements.  Deletion can be
+# O(1) per element rather than O(n), by reworking the definition of
+# _m4_set([name],value) to be 0 or 1 based on current membership, and
+# adding _m4_set_cleanup(name) to defer the O(n) cleanup of
+# _m4_set([name]) until we have another reason to do an O(n)
+# traversal.  The existence of _m4_set_cleanup(name) can then be used
+# elsewhere to determine if we must dereference _m4_set([name],value),
+# or assume that definition implies set membership.  Finally, size can
+# be tracked in an O(1) fashion with _m4_set_size(name).
+#
+# The quoting in _m4_set([name],value) is chosen so that there is no
+# ambiguity with a set whose name contains a comma, and so that we can
+# supply the value via _m4_defn([_m4_set([name])]) without needing any
+# quote manipulation.
+
+# m4_set_add(SET, VALUE, [IF-UNIQ], [IF-DUP])
+# -------------------------------------------
+# Add VALUE as an element of SET.  Expand IF-UNIQ on the first
+# addition, and IF-DUP if it is already in the set.  Addition of one
+# element is O(1), such that overall set creation is O(n).
+#
+# We do not want to add a duplicate for a previously deleted but
+# unpruned element, but it is just as easy to check existence directly
+# as it is to query _m4_set_cleanup($1).
+m4_define([m4_set_add],
+[m4_ifdef([_m4_set([$1],$2)],
+	  [m4_if(m4_indir([_m4_set([$1],$2)]), [0],
+		 [m4_define([_m4_set([$1],$2)],
+			    [1])_m4_set_size([$1], [m4_incr])$3], [$4])],
+	  [m4_define([_m4_set([$1],$2)],
+		     [1])m4_pushdef([_m4_set([$1])],
+				    [$2])_m4_set_size([$1], [m4_incr])$3])])
+
+# m4_set_add_all(SET, VALUE...)
+# -----------------------------
+# Add each VALUE into SET.  This is O(n) in the number of VALUEs, and
+# can be faster than calling m4_set_add for each VALUE.
+#
+# Implement two recursion helpers; the check variant is slower but
+# handles the case where an element has previously been removed but
+# not pruned.  The recursion helpers ignore their second argument, so
+# that we can use the faster m4_shift2 and 2 arguments, rather than
+# _m4_shift2 and one argument, as the signal to end recursion.
+#
+# Please keep foreach.m4 in sync with any adjustments made here.
+m4_define([m4_set_add_all],
+[m4_define([_m4_set_size($1)], m4_eval(m4_set_size([$1])
+  + m4_len(m4_ifdef([_m4_set_cleanup($1)], [_$0_check], [_$0])([$1], $@))))])
+
+m4_define([_m4_set_add_all],
+[m4_if([$#], [2], [],
+       [m4_ifdef([_m4_set([$1],$3)], [],
+		 [m4_define([_m4_set([$1],$3)], [1])m4_pushdef([_m4_set([$1])],
+	   [$3])-])$0([$1], m4_shift2($@))])])
+
+m4_define([_m4_set_add_all_check],
+[m4_if([$#], [2], [],
+       [m4_set_add([$1], [$3])$0([$1], m4_shift2($@))])])
+
+# m4_set_contains(SET, VALUE, [IF-PRESENT], [IF-ABSENT])
+# ------------------------------------------------------
+# Expand IF-PRESENT if SET contains VALUE, otherwise expand IF-ABSENT.
+# This is always O(1).
+m4_define([m4_set_contains],
+[m4_ifdef([_m4_set_cleanup($1)],
+	  [m4_if(m4_ifdef([_m4_set([$1],$2)],
+		    [m4_indir([_m4_set([$1],$2)])], [0]), [1], [$3], [$4])],
+	  [m4_ifdef([_m4_set([$1],$2)], [$3], [$4])])])
+
+# m4_set_contents(SET, [SEP])
+# ---------------------------
+# Expand to a single string containing all the elements in SET,
+# separated by SEP, without modifying SET.  No provision is made for
+# disambiguating set elements that contain non-empty SEP as a
+# sub-string, or for recognizing a set that contains only the empty
+# string.  Order of the output is not guaranteed.  If any elements
+# have been previously removed from the set, this action will prune
+# the unused memory.  This is O(n) in the size of the set before
+# pruning.
+#
+# Use _m4_popdef for speed.  The existence of _m4_set_cleanup($1)
+# determines which version of _1 helper we use.
+m4_define([m4_set_contents],
+[m4_set_map_sep([$1], [], [], [[$2]])])
+
+# _m4_set_contents_1(SET)
+# _m4_set_contents_1c(SET)
+# _m4_set_contents_2(SET, [PRE], [POST], [SEP])
+# ---------------------------------------------
+# Expand to a list of quoted elements currently in the set, each
+# surrounded by PRE and POST, and moving SEP in front of PRE on
+# recursion.  To avoid nesting limit restrictions, the algorithm must
+# be broken into two parts; _1 destructively copies the stack in
+# reverse into _m4_set_($1), producing no output; then _2
+# destructively copies _m4_set_($1) back into the stack in reverse.
+# If no elements were deleted, then this visits the set in the order
+# that elements were inserted.  Behavior is undefined if PRE/POST/SEP
+# tries to recursively list or modify SET in any way other than
+# calling m4_set_remove on the current element.  Use _1 if all entries
+# in the stack are guaranteed to be in the set, and _1c to prune
+# removed entries.  Uses _m4_defn and _m4_popdef for speed.
+m4_define([_m4_set_contents_1],
+[_m4_stack_reverse([_m4_set([$1])], [_m4_set_($1)])])
+
+m4_define([_m4_set_contents_1c],
+[m4_ifdef([_m4_set([$1])],
+	  [m4_set_contains([$1], _m4_defn([_m4_set([$1])]),
+		   [m4_pushdef([_m4_set_($1)], _m4_defn([_m4_set([$1])]))],
+		   [_m4_popdef([_m4_set([$1],]_m4_defn(
+      [_m4_set([$1])])[)])])_m4_popdef([_m4_set([$1])])$0([$1])],
+	  [_m4_popdef([_m4_set_cleanup($1)])])])
+
+m4_define([_m4_set_contents_2],
+[_m4_stack_reverse([_m4_set_($1)], [_m4_set([$1])],
+  [$2[]_m4_defn([_m4_set_($1)])$3], [$4[]])])
+
+# m4_set_delete(SET)
+# ------------------
+# Delete all elements in SET, and reclaim any memory occupied by the
+# set.  This is O(n) in the set size.
+#
+# Use _m4_defn and _m4_popdef for speed.
+m4_define([m4_set_delete],
+[m4_ifdef([_m4_set([$1])],
+	  [_m4_popdef([_m4_set([$1],]_m4_defn([_m4_set([$1])])[)],
+		      [_m4_set([$1])])$0([$1])],
+	  [m4_ifdef([_m4_set_cleanup($1)],
+		    [_m4_popdef([_m4_set_cleanup($1)])])m4_ifdef(
+		    [_m4_set_size($1)],
+		    [_m4_popdef([_m4_set_size($1)])])])])
+
+# m4_set_difference(SET1, SET2)
+# -----------------------------
+# Produce a LIST of quoted elements that occur in SET1 but not SET2.
+# Output a comma prior to any elements, to distinguish the empty
+# string from no elements.  This can be directly used as a series of
+# arguments, such as for m4_join, or wrapped inside quotes for use in
+# m4_foreach.  Order of the output is not guaranteed.
+#
+# Short-circuit the idempotence relation.
+m4_define([m4_set_difference],
+[m4_if([$1], [$2], [], [m4_set_map_sep([$1], [_$0([$2],], [)])])])
+
+m4_define([_m4_set_difference],
+[m4_set_contains([$1], [$2], [], [,[$2]])])
+
+# m4_set_dump(SET, [SEP])
+# -----------------------
+# Expand to a single string containing all the elements in SET,
+# separated by SEP, then delete SET.  In general, if you only need to
+# list the contents once, this is faster than m4_set_contents.  No
+# provision is made for disambiguating set elements that contain
+# non-empty SEP as a sub-string.  Order of the output is not
+# guaranteed.  This is O(n) in the size of the set before pruning.
+#
+# Use _m4_popdef for speed.  Use existence of _m4_set_cleanup($1) to
+# decide if more expensive recursion is needed.
+m4_define([m4_set_dump],
+[m4_ifdef([_m4_set_size($1)],
+	  [_m4_popdef([_m4_set_size($1)])])m4_ifdef([_m4_set_cleanup($1)],
+    [_$0_check], [_$0])([$1], [], [$2])])
+
+# _m4_set_dump(SET, [SEP], [PREP])
+# _m4_set_dump_check(SET, [SEP], [PREP])
+# --------------------------------------
+# Print SEP and the current element, then delete the element and
+# recurse with empty SEP changed to PREP.  The check variant checks
+# whether the element has been previously removed.  Use _m4_defn and
+# _m4_popdef for speed.
+m4_define([_m4_set_dump],
+[m4_ifdef([_m4_set([$1])],
+	  [[$2]_m4_defn([_m4_set([$1])])_m4_popdef([_m4_set([$1],]_m4_defn(
+		[_m4_set([$1])])[)], [_m4_set([$1])])$0([$1], [$2$3])])])
+
+m4_define([_m4_set_dump_check],
+[m4_ifdef([_m4_set([$1])],
+	  [m4_set_contains([$1], _m4_defn([_m4_set([$1])]),
+			   [[$2]_m4_defn([_m4_set([$1])])])_m4_popdef(
+    [_m4_set([$1],]_m4_defn([_m4_set([$1])])[)],
+    [_m4_set([$1])])$0([$1], [$2$3])],
+	  [_m4_popdef([_m4_set_cleanup($1)])])])
+
+# m4_set_empty(SET, [IF-EMPTY], [IF-ELEMENTS])
+# --------------------------------------------
+# Expand IF-EMPTY if SET has no elements, otherwise IF-ELEMENTS.
+m4_define([m4_set_empty],
+[m4_ifdef([_m4_set_size($1)],
+	  [m4_if(m4_indir([_m4_set_size($1)]), [0], [$2], [$3])], [$2])])
+
+# m4_set_foreach(SET, VAR, ACTION)
+# --------------------------------
+# For each element of SET, define VAR to the element and expand
+# ACTION.  ACTION should not recursively list SET's contents, add
+# elements to SET, nor delete any element from SET except the one
+# currently in VAR.  The order that the elements are visited in is not
+# guaranteed.  This is faster than the corresponding m4_foreach([VAR],
+#   m4_indir([m4_dquote]m4_set_listc([SET])), [ACTION])
+m4_define([m4_set_foreach],
+[m4_pushdef([$2])m4_set_map_sep([$1], [m4_define([$2],], [)$3])])
+
+# m4_set_intersection(SET1, SET2)
+# -------------------------------
+# Produce a LIST of quoted elements that occur in both SET1 or SET2.
+# Output a comma prior to any elements, to distinguish the empty
+# string from no elements.  This can be directly used as a series of
+# arguments, such as for m4_join, or wrapped inside quotes for use in
+# m4_foreach.  Order of the output is not guaranteed.
+#
+# Iterate over the smaller set, and short-circuit the idempotence
+# relation.
+m4_define([m4_set_intersection],
+[m4_if([$1], [$2], [m4_set_listc([$1])],
+       m4_eval(m4_set_size([$2]) < m4_set_size([$1])), [1], [$0([$2], [$1])],
+       [m4_set_map_sep([$1], [_$0([$2],], [)])])])
+
+m4_define([_m4_set_intersection],
+[m4_set_contains([$1], [$2], [,[$2]])])
+
+# m4_set_list(SET)
+# m4_set_listc(SET)
+# -----------------
+# Produce a LIST of quoted elements of SET.  This can be directly used
+# as a series of arguments, such as for m4_join or m4_set_add_all, or
+# wrapped inside quotes for use in m4_foreach or m4_map.  With
+# m4_set_list, there is no way to distinguish an empty set from a set
+# containing only the empty string; with m4_set_listc, a leading comma
+# is output if there are any elements.
+m4_define([m4_set_list],
+[m4_set_map_sep([$1], [], [], [,])])
+
+m4_define([m4_set_listc],
+[m4_set_map_sep([$1], [,])])
+
+# m4_set_map(SET, ACTION)
+# -----------------------
+# For each element of SET, expand ACTION with a single argument of the
+# current element.  ACTION should not recursively list SET's contents,
+# add elements to SET, nor delete any element from SET except the one
+# passed as an argument.  The order that the elements are visited in
+# is not guaranteed.  This is faster than either of the corresponding
+#   m4_map_args([ACTION]m4_set_listc([SET]))
+#   m4_set_foreach([SET], [VAR], [ACTION(m4_defn([VAR]))])
+m4_define([m4_set_map],
+[m4_set_map_sep([$1], [$2(], [)])])
+
+# m4_set_map_sep(SET, [PRE], [POST], [SEP])
+# -----------------------------------------
+# For each element of SET, expand PRE[value]POST[], and expand SEP
+# between elements.
+m4_define([m4_set_map_sep],
+[m4_ifdef([_m4_set_cleanup($1)], [_m4_set_contents_1c],
+	  [_m4_set_contents_1])([$1])_m4_set_contents_2($@)])
+
+# m4_set_remove(SET, VALUE, [IF-PRESENT], [IF-ABSENT])
+# ----------------------------------------------------
+# If VALUE is an element of SET, delete it and expand IF-PRESENT.
+# Otherwise expand IF-ABSENT.  Deleting a single value is O(1),
+# although it leaves memory occupied until the next O(n) traversal of
+# the set which will compact the set.
+#
+# Optimize if the element being removed is the most recently added,
+# since defining _m4_set_cleanup($1) slows down so many other macros.
+# In particular, this plays well with m4_set_foreach and m4_set_map.
+m4_define([m4_set_remove],
+[m4_set_contains([$1], [$2], [_m4_set_size([$1],
+    [m4_decr])m4_if(_m4_defn([_m4_set([$1])]), [$2],
+		    [_m4_popdef([_m4_set([$1],$2)], [_m4_set([$1])])],
+		    [m4_define([_m4_set_cleanup($1)])m4_define(
+		      [_m4_set([$1],$2)], [0])])$3], [$4])])
+
+# m4_set_size(SET)
+# ----------------
+# Expand to the number of elements currently in SET.  This operation
+# is O(1), and thus more efficient than m4_count(m4_set_list([SET])).
+m4_define([m4_set_size],
+[m4_ifdef([_m4_set_size($1)], [m4_indir([_m4_set_size($1)])], [0])])
+
+# _m4_set_size(SET, ACTION)
+# -------------------------
+# ACTION must be either m4_incr or m4_decr, and the size of SET is
+# changed accordingly.  If the set is empty, ACTION must not be
+# m4_decr.
+m4_define([_m4_set_size],
+[m4_define([_m4_set_size($1)],
+	   m4_ifdef([_m4_set_size($1)], [$2(m4_indir([_m4_set_size($1)]))],
+		    [1]))])
+
+# m4_set_union(SET1, SET2)
+# ------------------------
+# Produce a LIST of double quoted elements that occur in either SET1
+# or SET2, without duplicates.  Output a comma prior to any elements,
+# to distinguish the empty string from no elements.  This can be
+# directly used as a series of arguments, such as for m4_join, or
+# wrapped inside quotes for use in m4_foreach.  Order of the output is
+# not guaranteed.
+#
+# We can rely on the fact that m4_set_listc prunes SET1, so we don't
+# need to check _m4_set([$1],element) for 0.  Short-circuit the
+# idempotence relation.
+m4_define([m4_set_union],
+[m4_set_listc([$1])m4_if([$1], [$2], [],
+  [m4_set_map_sep([$2], [_$0([$1],], [)])])])
+
+m4_define([_m4_set_union],
+[m4_ifdef([_m4_set([$1],$2)], [], [,[$2]])])
+
+
+## ------------------- ##
+## 16. File handling.  ##
+## ------------------- ##
+
+
+# It is a real pity that M4 comes with no macros to bind a diversion
+# to a file.  So we have to deal without, which makes us a lot more
+# fragile than we should.
+
+
+# m4_file_append(FILE-NAME, CONTENT)
+# ----------------------------------
+m4_define([m4_file_append],
+[m4_syscmd([cat >>$1 <<_m4eof
+$2
+_m4eof
+])
+m4_if(m4_sysval, [0], [],
+      [m4_fatal([$0: cannot write: $1])])])
+
+
+
+## ------------------------ ##
+## 17. Setting M4sugar up.  ##
+## ------------------------ ##
+
+# _m4_divert_diversion should be defined.
+m4_divert_push([KILL])
+
+# m4_init
+# -------
+# Initialize the m4sugar language.
+m4_define([m4_init],
+[# All the M4sugar macros start with `m4_', except `dnl' kept as is
+# for sake of simplicity.
+m4_pattern_forbid([^_?m4_])
+m4_pattern_forbid([^dnl$])
+
+# If __m4_version__ is defined, we assume that we are being run by M4
+# 1.6 or newer, thus $@ recursion is linear, and debugmode(+do)
+# is available for faster checks of dereferencing undefined macros
+# and forcing dumpdef to print to stderr regardless of debugfile.
+# But if it is missing, we assume we are being run by M4 1.4.x, that
+# $@ recursion is quadratic, and that we need foreach-based
+# replacement macros.  Also, m4 prior to 1.4.8 loses track of location
+# during m4wrap text; __line__ should never be 0.
+#
+# Use the raw builtin to avoid tripping up include tracing.
+# Meanwhile, avoid m4_copy, since it temporarily undefines m4_defn.
+m4_ifdef([__m4_version__],
+[m4_debugmode([+do])
+m4_define([m4_defn], _m4_defn([_m4_defn]))
+m4_define([m4_dumpdef], _m4_defn([_m4_dumpdef]))
+m4_define([m4_popdef], _m4_defn([_m4_popdef]))
+m4_define([m4_undefine], _m4_defn([_m4_undefine]))],
+[m4_builtin([include], [m4sugar/foreach.m4])
+m4_wrap_lifo([m4_if(__line__, [0], [m4_pushdef([m4_location],
+]]m4_dquote(m4_dquote(m4_dquote(__file__:__line__)))[[)])])])
+
+# Rewrite the first entry of the diversion stack.
+m4_divert([KILL])
+
+# Check the divert push/pop perfect balance.
+# Some users are prone to also use m4_wrap to register last-minute
+# m4_divert_text; so after our diversion cleanups, we restore
+# KILL as the bottom of the diversion stack.
+m4_wrap([m4_popdef([_m4_divert_diversion])m4_ifdef(
+  [_m4_divert_diversion], [m4_fatal([$0: unbalanced m4_divert_push:
+]m4_divert_stack)])_m4_popdef([_m4_divert_stack])m4_divert_push([KILL])])
+])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4f b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4f
new file mode 100644
index 0000000..75e9121
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/m4sugar.m4f
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/version.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/version.m4
new file mode 100644
index 0000000..4f448aa
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/autoconf/m4sugar/version.m4
@@ -0,0 +1,12 @@
+# This file is part of -*- Autoconf -*-.
+# Version of Autoconf.
+# Copyright (C) 1999, 2000, 2001, 2002, 2006, 2007, 2009
+# Free Software Foundation, Inc.
+
+m4_define([m4_PACKAGE_NAME],      [GNU Autoconf])
+m4_define([m4_PACKAGE_TARNAME],   [autoconf])
+m4_define([m4_PACKAGE_VERSION],   [2.68])
+m4_define([m4_PACKAGE_STRING],    [GNU Autoconf 2.68])
+m4_define([m4_PACKAGE_BUGREPORT], [bug-autoconf@gnu.org])
+m4_define([m4_PACKAGE_URL],       [http://www.gnu.org/software/autoconf/])
+m4_define([m4_PACKAGE_YEAR],      [2010])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ChannelDefs.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ChannelDefs.pm
new file mode 100644
index 0000000..0fe6197
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ChannelDefs.pm
@@ -0,0 +1,444 @@
+# Copyright (C) 2002-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::ChannelDefs;
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+    }
+}
+use Automake::Channels;
+
+=head1 NAME
+
+Automake::ChannelDefs - channel definitions for Automake and helper functions
+
+=head1 SYNOPSIS
+
+  use Automake::ChannelDefs;
+
+  Automake::ChannelDefs::usage ();
+  prog_error ($MESSAGE, [%OPTIONS]);
+  error ($WHERE, $MESSAGE, [%OPTIONS]);
+  error ($MESSAGE);
+  fatal ($WHERE, $MESSAGE, [%OPTIONS]);
+  fatal ($MESSAGE);
+  verb ($MESSAGE, [%OPTIONS]);
+  switch_warning ($CATEGORY);
+  parse_WARNINGS ();
+  parse_warnings ($OPTION, $ARGUMENT);
+  Automake::ChannelDefs::set_strictness ($STRICTNESS_NAME);
+
+=head1 DESCRIPTION
+
+This packages defines channels that can be used in Automake to
+output diagnostics and other messages (via C<msg()>).  It also defines
+some helper function to enable or disable these channels, and some
+shorthand function to output on specific channels.
+
+=cut
+
+use 5.006;
+use strict;
+use Exporter;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&prog_error &error &fatal &verb
+	      &switch_warning &parse_WARNINGS &parse_warnings);
+
+=head2 CHANNELS
+
+The following channels can be used as the first argument of
+C<Automake::Channel::msg>.  For some of them we list a shorthand
+function that makes the code more readable.
+
+=over 4
+
+=item C<fatal>
+
+Fatal errors.  Use C<&fatal> to send messages over this channel.
+
+=item C<error>
+
+Common errors.  Use C<&error> to send messages over this channel.
+
+=item C<error-gnu>
+
+Errors related to GNU Standards.
+
+=item C<error-gnu/warn>
+
+Errors related to GNU Standards that should be warnings in 'foreign' mode.
+
+=item C<error-gnits>
+
+Errors related to GNITS Standards (silent by default).
+
+=item C<automake>
+
+Internal errors.  Use C<&prog_error> to send messages over this channel.
+
+=item C<gnu>
+
+Warnings related to GNU Coding Standards.
+
+=item C<obsolete>
+
+Warnings about obsolete features (silent by default).
+
+=item C<override>
+
+Warnings about user redefinitions of Automake rules or
+variables (silent by default).
+
+=item C<portability>
+
+Warnings about non-portable constructs.
+
+=item C<extra-portability>
+
+Extra warnings about non-portable constructs covering obscure tools.
+
+=item C<syntax>
+
+Warnings about weird syntax, unused variables, typos...
+
+=item C<unsupported>
+
+Warnings about unsupported (or mis-supported) features.
+
+=item C<verb>
+
+Messages output in C<--verbose> mode.  Use C<&verb> to send such messages.
+
+=item C<note>
+
+Informative messages.
+
+=back
+
+=cut
+
+# Initialize our list of error/warning channels.
+# Do not forget to update &usage and the manual
+# if you add or change a warning channel.
+
+register_channel 'fatal', type => 'fatal', uniq_part => UP_NONE, ordered => 0;
+register_channel 'error', type => 'error';
+register_channel 'error-gnu', type => 'error';
+register_channel 'error-gnu/warn', type => 'error';
+register_channel 'error-gnits', type => 'error', silent => 1;
+register_channel 'automake', type => 'fatal', backtrace => 1,
+  header => ("####################\n" .
+	     "## Internal Error ##\n" .
+	     "####################\n"),
+  footer => "\nPlease contact <$PACKAGE_BUGREPORT>.",
+  uniq_part => UP_NONE, ordered => 0;
+
+register_channel 'extra-portability', type => 'warning', silent => 1;
+register_channel 'gnu', type => 'warning';
+register_channel 'obsolete', type => 'warning';
+register_channel 'override', type => 'warning', silent => 1;
+register_channel 'portability', type => 'warning', silent => 1;
+register_channel 'portability-recursive', type => 'warning', silent => 1;
+register_channel 'syntax', type => 'warning';
+register_channel 'unsupported', type => 'warning';
+
+register_channel 'verb', type => 'debug', silent => 1, uniq_part => UP_NONE,
+  ordered => 0;
+register_channel 'note', type => 'debug', silent => 0;
+
+setup_channel_type 'warning', header => 'warning: ';
+setup_channel_type 'error', header => 'error: ';
+setup_channel_type 'fatal', header => 'error: ';
+
+=head2 FUNCTIONS
+
+=over 4
+
+=item C<usage ()>
+
+Display warning categories.
+
+=cut
+
+sub usage ()
+{
+  print <<EOF;
+Warning categories include:
+  gnu                GNU coding standards (default in gnu and gnits modes)
+  obsolete           obsolete features or constructions
+  override           user redefinitions of Automake rules or variables
+  portability        portability issues (default in gnu and gnits modes)
+  extra-portability  extra portability issues related to obscure tools
+  syntax             dubious syntactic constructs (default)
+  unsupported        unsupported or incomplete features (default)
+  all                all the warnings
+  no-CATEGORY        turn off warnings in CATEGORY
+  none               turn off all the warnings
+  error              treat warnings as errors
+EOF
+}
+
+=item C<prog_error ($MESSAGE, [%OPTIONS])>
+
+Signal a programming error (on channel C<automake>),
+display C<$MESSAGE>, and exit 1.
+
+=cut
+
+sub prog_error ($;%)
+{
+  my ($msg, %opts) = @_;
+  msg 'automake', '', $msg, %opts;
+}
+
+=item C<error ($WHERE, $MESSAGE, [%OPTIONS])>
+
+=item C<error ($MESSAGE)>
+
+Uncategorized errors.
+
+=cut
+
+sub error ($;$%)
+{
+  my ($where, $msg, %opts) = @_;
+  msg ('error', $where, $msg, %opts);
+}
+
+=item C<fatal ($WHERE, $MESSAGE, [%OPTIONS])>
+
+=item C<fatal ($MESSAGE)>
+
+Fatal errors.
+
+=cut
+
+sub fatal ($;$%)
+{
+  my ($where, $msg, %opts) = @_;
+  msg ('fatal', $where, $msg, %opts);
+}
+
+=item C<verb ($MESSAGE, [%OPTIONS])>
+
+C<--verbose> messages.
+
+=cut
+
+sub verb ($;%)
+{
+  my ($msg, %opts) = @_;
+  $msg = "thread " . threads->tid . ": " . $msg
+    if $perl_threads;
+  msg 'verb', '', $msg, %opts;
+}
+
+=item C<switch_warning ($CATEGORY)>
+
+If C<$CATEGORY> is C<mumble>, turn on channel C<mumble>.
+If it's C<no-mumble>, turn C<mumble> off.
+Else handle C<all> and C<none> for completeness.
+
+=cut
+
+sub switch_warning ($)
+{
+  my ($cat) = @_;
+  my $has_no = 0;
+
+  if ($cat =~ /^no-(.*)$/)
+    {
+      $cat = $1;
+      $has_no = 1;
+    }
+
+  if ($cat eq 'all')
+    {
+      setup_channel_type 'warning', silent => $has_no;
+    }
+  elsif ($cat eq 'none')
+    {
+      setup_channel_type 'warning', silent => ! $has_no;
+    }
+  elsif ($cat eq 'error')
+    {
+      $warnings_are_errors = ! $has_no;
+      # Set exit code if Perl warns about something
+      # (like uninitialized variables).
+      $SIG{"__WARN__"} =
+	$has_no ? 'DEFAULT' : sub { print STDERR @_; $exit_code = 1; };
+    }
+  elsif (channel_type ($cat) eq 'warning')
+    {
+      setup_channel $cat, silent => $has_no;
+      #
+      # Handling of portability warnings is trickier.  For relevant tests,
+      # see 'dollarvar2', 'extra-portability' and 'extra-portability3'.
+      #
+      # -Wportability-recursive and -Wno-portability-recursive should not
+      # have any effect on other 'portability' or 'extra-portability'
+      # warnings, so there's no need to handle them separately or ad-hoc.
+      #
+      if ($cat eq 'extra-portability' && ! $has_no) # -Wextra-portability
+        {
+          # -Wextra-portability must enable 'portability' and
+          # 'portability-recursive' warnings.
+          setup_channel 'portability', silent => 0;
+          setup_channel 'portability-recursive', silent => 0;
+        }
+      if ($cat eq 'portability') # -Wportability or -Wno-portability
+        {
+          if ($has_no) # -Wno-portability
+            {
+              # -Wno-portability must disable 'extra-portability' and
+              # 'portability-recursive' warnings.
+              setup_channel 'portability-recursive', silent => 1;
+              setup_channel 'extra-portability', silent => 1;
+            }
+          else # -Wportability
+            {
+              # -Wportability must enable 'portability-recursive'
+              # warnings.  But it should have no influence over the
+              # 'extra-portability' warnings.
+              setup_channel 'portability-recursive', silent => 0;
+            }
+        }
+    }
+  else
+    {
+      return 1;
+    }
+  return 0;
+}
+
+=item C<parse_WARNINGS ()>
+
+Parse the WARNINGS environment variable.
+
+=cut
+
+sub parse_WARNINGS ()
+{
+  if (exists $ENV{'WARNINGS'})
+    {
+      # Ignore unknown categories.  This is required because WARNINGS
+      # should be honored by many tools.
+      switch_warning $_ foreach (split (',', $ENV{'WARNINGS'}));
+    }
+}
+
+=item C<parse_warnings ($OPTION, $ARGUMENT)>
+
+Parse the argument of C<--warning=CATEGORY> or C<-WCATEGORY>.
+
+C<$OPTIONS> is C<"--warning"> or C<"-W">, C<$ARGUMENT> is C<CATEGORY>.
+
+This is meant to be used as an argument to C<Getopt>.
+
+=cut
+
+sub parse_warnings ($$)
+{
+  my ($opt, $categories) = @_;
+
+  foreach my $cat (split (',', $categories))
+    {
+      msg 'unsupported', "unknown warning category '$cat'"
+	if switch_warning $cat;
+    }
+}
+
+=item C<set_strictness ($STRICTNESS_NAME)>
+
+Configure channels for strictness C<$STRICTNESS_NAME>.
+
+=cut
+
+sub set_strictness ($)
+{
+  my ($name) = @_;
+
+  if ($name eq 'gnu')
+    {
+      setup_channel 'error-gnu', silent => 0;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'error';
+      setup_channel 'error-gnits', silent => 1;
+      setup_channel 'portability', silent => 0;
+      setup_channel 'extra-portability', silent => 1;
+      setup_channel 'gnu', silent => 0;
+    }
+  elsif ($name eq 'gnits')
+    {
+      setup_channel 'error-gnu', silent => 0;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'error';
+      setup_channel 'error-gnits', silent => 0;
+      setup_channel 'portability', silent => 0;
+      setup_channel 'extra-portability', silent => 1;
+      setup_channel 'gnu', silent => 0;
+    }
+  elsif ($name eq 'foreign')
+    {
+      setup_channel 'error-gnu', silent => 1;
+      setup_channel 'error-gnu/warn', silent => 0, type => 'warning';
+      setup_channel 'error-gnits', silent => 1;
+      setup_channel 'portability', silent => 1;
+      setup_channel 'extra-portability', silent => 1;
+      setup_channel 'gnu', silent => 1;
+    }
+  else
+    {
+      prog_error "level '$name' not recognized";
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::Channels>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Channels.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Channels.pm
new file mode 100644
index 0000000..fe843ab
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Channels.pm
@@ -0,0 +1,836 @@
+# Copyright (C) 2002-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's git repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Automake::Channels;
+
+=head1 NAME
+
+Automake::Channels - support functions for error and warning management
+
+=head1 SYNOPSIS
+
+  use Automake::Channels;
+
+  # Register a channel to output warnings about unused variables.
+  register_channel 'unused', type => 'warning';
+
+  # Register a channel for system errors.
+  register_channel 'system', type => 'error', exit_code => 4;
+
+  # Output a message on channel 'unused'.
+  msg 'unused', "$file:$line", "unused variable '$var'";
+
+  # Make the 'unused' channel silent.
+  setup_channel 'unused', silent => 1;
+
+  # Turn on all channels of type 'warning'.
+  setup_channel_type 'warning', silent => 0;
+
+  # Redirect all channels to push messages on a Thread::Queue using
+  # the specified serialization key.
+  setup_channel_queue $queue, $key;
+
+  # Output a message pending in a Thread::Queue.
+  pop_channel_queue $queue;
+
+  # Treat all warnings as errors.
+  $warnings_are_errors = 1;
+
+  # Exit with the greatest exit code encountered so far.
+  exit $exit_code;
+
+=head1 DESCRIPTION
+
+This perl module provides support functions for handling diagnostic
+channels in programs.  Channels can be registered to convey fatal,
+error, warning, or debug messages.  Each channel has various options
+(e.g. is the channel silent, should duplicate messages be removed,
+etc.) that can also be overridden on a per-message basis.
+
+=cut
+
+use 5.006;
+use strict;
+use Exporter;
+use Carp;
+use File::Basename;
+
+use vars qw (@ISA @EXPORT %channels $me);
+
+@ISA = qw (Exporter);
+@EXPORT = qw ($exit_code $warnings_are_errors
+	      &reset_local_duplicates &reset_global_duplicates
+	      &register_channel &msg &exists_channel &channel_type
+	      &setup_channel &setup_channel_type
+	      &dup_channel_setup &drop_channel_setup
+	      &buffer_messages &flush_messages
+	      &setup_channel_queue &pop_channel_queue
+	      US_GLOBAL US_LOCAL
+	      UP_NONE UP_TEXT UP_LOC_TEXT);
+
+$me = basename $0;
+
+=head2 Global Variables
+
+=over 4
+
+=item C<$exit_code>
+
+The greatest exit code seen so far. C<$exit_code> is updated from
+the C<exit_code> options of C<fatal> and C<error> channels.
+
+=cut
+
+use vars qw ($exit_code);
+$exit_code = 0;
+
+=item C<$warnings_are_errors>
+
+Set this variable to 1 if warning messages should be treated as
+errors (i.e. if they should update C<$exit_code>).
+
+=cut
+
+use vars qw ($warnings_are_errors);
+$warnings_are_errors = 0;
+
+=back
+
+=head2 Constants
+
+=over 4
+
+=item C<UP_NONE>, C<UP_TEXT>, C<UP_LOC_TEXT>
+
+Possible values for the C<uniq_part> options.  This selects the part
+of the message that should be considered when filtering out duplicates.
+If C<UP_LOC_TEXT> is used, the location and the explanation message
+are used for filtering.  If C<UP_TEXT> is used, only the explanation
+message is used (so the same message will be filtered out if it appears
+at different locations).  C<UP_NONE> means that duplicate messages
+should be output.
+
+=cut
+
+use constant UP_NONE => 0;
+use constant UP_TEXT => 1;
+use constant UP_LOC_TEXT => 2;
+
+=item C<US_LOCAL>, C<US_GLOBAL>
+
+Possible values for the C<uniq_scope> options.
+Use C<US_GLOBAL> for error messages that should be printed only
+once during the execution of the program, C<US_LOCAL> for message that
+should be printed only once per file.  (Actually, C<Channels> does not
+do this now when files are changed, it relies on you calling
+C<reset_local_duplicates> when this happens.)
+
+=cut
+
+# possible values for uniq_scope
+use constant US_LOCAL => 0;
+use constant US_GLOBAL => 1;
+
+=back
+
+=head2 Options
+
+Channels accept the options described below.  These options can be
+passed as a hash to the C<register_channel>, C<setup_channel>, and C<msg>
+functions.  The possible keys, with their default value are:
+
+=over
+
+=item C<type =E<gt> 'warning'>
+
+The type of the channel.  One of C<'debug'>, C<'warning'>, C<'error'>, or
+C<'fatal'>.  Fatal messages abort the program when they are output.
+Error messages update the exit status.  Debug and warning messages are
+harmless, except that warnings are treated as errors if
+C<$warnings_are_errors> is set.
+
+=item C<exit_code =E<gt> 1>
+
+The value to update C<$exit_code> with when a fatal or error message
+is emitted.  C<$exit_code> is also updated for warnings output
+when C<$warnings_are_errors> is set.
+
+=item C<file =E<gt> \*STDERR>
+
+The file where the error should be output.
+
+=item C<silent =E<gt> 0>
+
+Whether the channel should be silent.  Use this do disable a
+category of warning, for instance.
+
+=item C<ordered =E<gt> 1>
+
+Whether, with multi-threaded execution, the message should be queued
+for ordered output.
+
+=item C<uniq_part =E<gt> UP_LOC_TEXT>
+
+The part of the message subject to duplicate filtering.  See the
+documentation for the C<UP_NONE>, C<UP_TEXT>, and C<UP_LOC_TEXT>
+constants above.
+
+C<uniq_part> can also be set to an arbitrary string that will be used
+instead of the message when considering duplicates.
+
+=item C<uniq_scope =E<gt> US_LOCAL>
+
+The scope of duplicate filtering.  See the documentation for the
+C<US_LOCAL>, and C<US_GLOBAL> constants above.
+
+=item C<header =E<gt> ''>
+
+A string to prepend to each message emitted through this channel.
+With partial messages, only the first part will have C<header>
+prepended.
+
+=item C<footer =E<gt> ''>
+
+A string to append to each message emitted through this channel.
+With partial messages, only the final part will have C<footer>
+appended.
+
+=item C<backtrace =E<gt> 0>
+
+Die with a stack backtrace after displaying the message.
+
+=item C<partial =E<gt> 0>
+
+When set, indicates a partial message that should
+be output along with the next message with C<partial> unset.
+Several partial messages can be stacked this way.
+
+Duplicate filtering will apply to the I<global> message resulting from
+all I<partial> messages, using the options from the last (non-partial)
+message.  Linking associated messages is the main reason to use this
+option.
+
+For instance the following messages
+
+  msg 'channel', 'foo:2', 'redefinition of A ...';
+  msg 'channel', 'foo:1', '... A previously defined here';
+  msg 'channel', 'foo:3', 'redefinition of A ...';
+  msg 'channel', 'foo:1', '... A previously defined here';
+
+will result in
+
+ foo:2: redefinition of A ...
+ foo:1: ... A previously defined here
+ foo:3: redefinition of A ...
+
+where the duplicate "I<... A previously defined here>" has been
+filtered out.
+
+Linking these messages using C<partial> as follows will prevent the
+fourth message to disappear.
+
+  msg 'channel', 'foo:2', 'redefinition of A ...', partial => 1;
+  msg 'channel', 'foo:1', '... A previously defined here';
+  msg 'channel', 'foo:3', 'redefinition of A ...', partial => 1;
+  msg 'channel', 'foo:1', '... A previously defined here';
+
+Note that because the stack of C<partial> messages is printed with the
+first non-C<partial> message, most options of C<partial> messages will
+be ignored.
+
+=back
+
+=cut
+
+use vars qw (%_default_options %_global_duplicate_messages
+	     %_local_duplicate_messages);
+
+# Default options for a channel.
+%_default_options =
+  (
+   type => 'warning',
+   exit_code => 1,
+   file => \*STDERR,
+   silent => 0,
+   ordered => 1,
+   queue => 0,
+   queue_key => undef,
+   uniq_scope => US_LOCAL,
+   uniq_part => UP_LOC_TEXT,
+   header => '',
+   footer => '',
+   backtrace => 0,
+   partial => 0,
+   );
+
+# Filled with output messages as keys, to detect duplicates.
+# The value associated with each key is the number of occurrences
+# filtered out.
+%_local_duplicate_messages = ();
+%_global_duplicate_messages = ();
+
+sub _reset_duplicates (\%)
+{
+  my ($ref) = @_;
+  my $dup = 0;
+  foreach my $k (keys %$ref)
+    {
+      $dup += $ref->{$k};
+    }
+  %$ref = ();
+  return $dup;
+}
+
+
+=head2 Functions
+
+=over 4
+
+=item C<reset_local_duplicates ()>
+
+Reset local duplicate messages (see C<US_LOCAL>), and
+return the number of messages that have been filtered out.
+
+=cut
+
+sub reset_local_duplicates ()
+{
+  return _reset_duplicates %_local_duplicate_messages;
+}
+
+=item C<reset_global_duplicates ()>
+
+Reset local duplicate messages (see C<US_GLOBAL>), and
+return the number of messages that have been filtered out.
+
+=cut
+
+sub reset_global_duplicates ()
+{
+  return _reset_duplicates %_global_duplicate_messages;
+}
+
+sub _merge_options (\%%)
+{
+  my ($hash, %options) = @_;
+  local $_;
+
+  foreach (keys %options)
+    {
+      if (exists $hash->{$_})
+	{
+	  $hash->{$_} = $options{$_}
+	}
+      else
+	{
+	  confess "unknown option '$_'";
+	}
+    }
+  if ($hash->{'ordered'})
+    {
+      confess "fatal messages cannot be ordered"
+	if $hash->{'type'} eq 'fatal';
+      confess "backtrace cannot be output on ordered messages"
+	if $hash->{'backtrace'};
+    }
+}
+
+=item C<register_channel ($name, [%options])>
+
+Declare channel C<$name>, and override the default options
+with those listed in C<%options>.
+
+=cut
+
+sub register_channel ($;%)
+{
+  my ($name, %options) = @_;
+  my %channel_opts = %_default_options;
+  _merge_options %channel_opts, %options;
+  $channels{$name} = \%channel_opts;
+}
+
+=item C<exists_channel ($name)>
+
+Returns true iff channel C<$name> has been registered.
+
+=cut
+
+sub exists_channel ($)
+{
+  my ($name) = @_;
+  return exists $channels{$name};
+}
+
+=item C<channel_type ($name)>
+
+Returns the type of channel C<$name> if it has been registered.
+Returns the empty string otherwise.
+
+=cut
+
+sub channel_type ($)
+{
+  my ($name) = @_;
+  return $channels{$name}{'type'} if exists_channel $name;
+  return '';
+}
+
+# _format_sub_message ($LEADER, $MESSAGE)
+# ---------------------------------------
+# Split $MESSAGE at new lines and add $LEADER to each line.
+sub _format_sub_message ($$)
+{
+  my ($leader, $message) = @_;
+  return $leader . join ("\n" . $leader, split ("\n", $message)) . "\n";
+}
+
+# Store partial messages here. (See the 'partial' option.)
+use vars qw ($partial);
+$partial = '';
+
+# _format_message ($LOCATION, $MESSAGE, %OPTIONS)
+# -----------------------------------------------
+# Format the message.  Return a string ready to print.
+sub _format_message ($$%)
+{
+  my ($location, $message, %opts) = @_;
+  my $msg = ($partial eq '' ? $opts{'header'} : '') . $message
+	    . ($opts{'partial'} ? '' : $opts{'footer'});
+  if (ref $location)
+    {
+      # If $LOCATION is a reference, assume it's an instance of the
+      # Automake::Location class and display contexts.
+      my $loc = $location->get || $me;
+      $msg = _format_sub_message ("$loc: ", $msg);
+      for my $pair ($location->get_contexts)
+	{
+	  $msg .= _format_sub_message ($pair->[0] . ":   ", $pair->[1]);
+	}
+    }
+  else
+    {
+      $location ||= $me;
+      $msg = _format_sub_message ("$location: ", $msg);
+    }
+  return $msg;
+}
+
+# _enqueue ($QUEUE, $KEY, $UNIQ_SCOPE, $TO_FILTER, $MSG, $FILE)
+# -------------------------------------------------------------
+# Push message on a queue, to be processed by another thread.
+sub _enqueue ($$$$$$)
+{
+  my ($queue, $key, $uniq_scope, $to_filter, $msg, $file) = @_;
+  $queue->enqueue ($key, $msg, $to_filter, $uniq_scope);
+  confess "message queuing works only for STDERR"
+    if $file ne \*STDERR;
+}
+
+# _dequeue ($QUEUE)
+# -----------------
+# Pop a message from a queue, and print, similarly to how
+# _print_message would do it.  Return 0 if the queue is
+# empty.  Note that the key has already been dequeued.
+sub _dequeue ($)
+{
+  my ($queue) = @_;
+  my $msg = $queue->dequeue || return 0;
+  my $to_filter = $queue->dequeue;
+  my $uniq_scope = $queue->dequeue;
+  my $file = \*STDERR;
+
+  if ($to_filter ne '')
+    {
+      # Do we want local or global uniqueness?
+      my $dups;
+      if ($uniq_scope == US_LOCAL)
+	{
+	  $dups = \%_local_duplicate_messages;
+	}
+      elsif ($uniq_scope == US_GLOBAL)
+	{
+	  $dups = \%_global_duplicate_messages;
+	}
+      else
+	{
+	  confess "unknown value for uniq_scope: " . $uniq_scope;
+	}
+
+      # Update the hash of messages.
+      if (exists $dups->{$to_filter})
+	{
+	  ++$dups->{$to_filter};
+	  return 1;
+	}
+      else
+	{
+	  $dups->{$to_filter} = 0;
+	}
+    }
+  print $file $msg;
+  return 1;
+}
+
+
+# _print_message ($LOCATION, $MESSAGE, %OPTIONS)
+# ----------------------------------------------
+# Format the message, check duplicates, and print it.
+sub _print_message ($$%)
+{
+  my ($location, $message, %opts) = @_;
+
+  return 0 if ($opts{'silent'});
+
+  my $msg = _format_message ($location, $message, %opts);
+  if ($opts{'partial'})
+    {
+      # Incomplete message.  Store, don't print.
+      $partial .= $msg;
+      return;
+    }
+  else
+    {
+      # Prefix with any partial message send so far.
+      $msg = $partial . $msg;
+      $partial = '';
+    }
+
+  msg ('note', '', 'warnings are treated as errors', uniq_scope => US_GLOBAL)
+    if ($opts{'type'} eq 'warning' && $warnings_are_errors);
+
+  # Check for duplicate message if requested.
+  my $to_filter;
+  if ($opts{'uniq_part'} ne UP_NONE)
+    {
+      # Which part of the error should we match?
+      if ($opts{'uniq_part'} eq UP_TEXT)
+	{
+	  $to_filter = $message;
+	}
+      elsif ($opts{'uniq_part'} eq UP_LOC_TEXT)
+	{
+	  $to_filter = $msg;
+	}
+      else
+	{
+	  $to_filter = $opts{'uniq_part'};
+	}
+
+      # Do we want local or global uniqueness?
+      my $dups;
+      if ($opts{'uniq_scope'} == US_LOCAL)
+	{
+	  $dups = \%_local_duplicate_messages;
+	}
+      elsif ($opts{'uniq_scope'} == US_GLOBAL)
+	{
+	  $dups = \%_global_duplicate_messages;
+	}
+      else
+	{
+	  confess "unknown value for uniq_scope: " . $opts{'uniq_scope'};
+	}
+
+      # Update the hash of messages.
+      if (exists $dups->{$to_filter})
+	{
+	  ++$dups->{$to_filter};
+	  return 0;
+	}
+      else
+	{
+	  $dups->{$to_filter} = 0;
+	}
+    }
+  my $file = $opts{'file'};
+  if ($opts{'ordered'} && $opts{'queue'})
+    {
+      _enqueue ($opts{'queue'}, $opts{'queue_key'}, $opts{'uniq_scope'},
+		$to_filter, $msg, $file);
+    }
+  else
+    {
+      print $file $msg;
+    }
+  return 1;
+}
+
+=item C<msg ($channel, $location, $message, [%options])>
+
+Emit a message on C<$channel>, overriding some options of the channel with
+those specified in C<%options>.  Obviously C<$channel> must have been
+registered with C<register_channel>.
+
+C<$message> is the text of the message, and C<$location> is a location
+associated to the message.
+
+For instance to complain about some unused variable C<mumble>
+declared at line 10 in F<foo.c>, one could do:
+
+  msg 'unused', 'foo.c:10', "unused variable 'mumble'";
+
+If channel C<unused> is not silent (and if this message is not a duplicate),
+the following would be output:
+
+  foo.c:10: unused variable 'mumble'
+
+C<$location> can also be an instance of C<Automake::Location>.  In this
+case, the stack of contexts will be displayed in addition.
+
+If C<$message> contains newline characters, C<$location> is prepended
+to each line.  For instance,
+
+  msg 'error', 'somewhere', "1st line\n2nd line";
+
+becomes
+
+  somewhere: 1st line
+  somewhere: 2nd line
+
+If C<$location> is an empty string, it is replaced by the name of the
+program.  Actually, if you don't use C<%options>, you can even
+elide the empty C<$location>.  Thus
+
+  msg 'fatal', '', 'fatal error';
+  msg 'fatal', 'fatal error';
+
+both print
+
+  progname: fatal error
+
+=cut
+
+
+use vars qw (@backlog %buffering);
+
+# See buffer_messages() and flush_messages() below.
+%buffering = ();	# The map of channel types to buffer.
+@backlog = ();		# The buffer of messages.
+
+sub msg ($$;$%)
+{
+  my ($channel, $location, $message, %options) = @_;
+
+  if (! defined $message)
+    {
+      $message = $location;
+      $location = '';
+    }
+
+  confess "unknown channel $channel" unless exists $channels{$channel};
+
+  my %opts = %{$channels{$channel}};
+  _merge_options (%opts, %options);
+
+  if (exists $buffering{$opts{'type'}})
+    {
+      push @backlog, [$channel, $location->clone, $message, %options];
+      return;
+    }
+
+  # Print the message if needed.
+  if (_print_message ($location, $message, %opts))
+    {
+      # Adjust exit status.
+      if ($opts{'type'} eq 'error'
+	  || $opts{'type'} eq 'fatal'
+	  || ($opts{'type'} eq 'warning' && $warnings_are_errors))
+	{
+	  my $es = $opts{'exit_code'};
+	  $exit_code = $es if $es > $exit_code;
+	}
+
+      # Die on fatal messages.
+      confess if $opts{'backtrace'};
+      if ($opts{'type'} eq 'fatal')
+        {
+	  # flush messages explicitly here, needed in worker threads.
+	  STDERR->flush;
+	  exit $exit_code;
+	}
+    }
+}
+
+
+=item C<setup_channel ($channel, %options)>
+
+Override the options of C<$channel> with those specified by C<%options>.
+
+=cut
+
+sub setup_channel ($%)
+{
+  my ($name, %opts) = @_;
+  confess "unknown channel $name" unless exists $channels{$name};
+  _merge_options %{$channels{$name}}, %opts;
+}
+
+=item C<setup_channel_type ($type, %options)>
+
+Override the options of any channel of type C<$type>
+with those specified by C<%options>.
+
+=cut
+
+sub setup_channel_type ($%)
+{
+  my ($type, %opts) = @_;
+  foreach my $channel (keys %channels)
+    {
+      setup_channel $channel, %opts
+	if $channels{$channel}{'type'} eq $type;
+    }
+}
+
+=item C<dup_channel_setup ()>, C<drop_channel_setup ()>
+
+Sometimes it is necessary to make temporary modifications to channels.
+For instance one may want to disable a warning while processing a
+particular file, and then restore the initial setup.  These two
+functions make it easy: C<dup_channel_setup ()> saves a copy of the
+current configuration for later restoration by
+C<drop_channel_setup ()>.
+
+You can think of this as a stack of configurations whose first entry
+is the active one.  C<dup_channel_setup ()> duplicates the first
+entry, while C<drop_channel_setup ()> just deletes it.
+
+=cut
+
+use vars qw (@_saved_channels @_saved_werrors);
+@_saved_channels = ();
+@_saved_werrors = ();
+
+sub dup_channel_setup ()
+{
+  my %channels_copy;
+  foreach my $k1 (keys %channels)
+    {
+      $channels_copy{$k1} = {%{$channels{$k1}}};
+    }
+  push @_saved_channels, \%channels_copy;
+  push @_saved_werrors, $warnings_are_errors;
+}
+
+sub drop_channel_setup ()
+{
+  my $saved = pop @_saved_channels;
+  %channels = %$saved;
+  $warnings_are_errors = pop @_saved_werrors;
+}
+
+=item C<buffer_messages (@types)>, C<flush_messages ()>
+
+By default, when C<msg> is called, messages are processed immediately.
+
+Sometimes it is necessary to delay the output of messages.
+For instance you might want to make diagnostics before
+channels have been completely configured.
+
+After C<buffer_messages(@types)> has been called, messages sent with
+C<msg> to a channel whose type is listed in C<@types> will be stored in a
+list for later processing.
+
+This backlog of messages is processed when C<flush_messages> is
+called, with the current channel options (not the options in effect,
+at the time of C<msg>).  So for instance, if some channel was silenced
+in the meantime, messages to this channel will not be printed.
+
+C<flush_messages> cancels the effect of C<buffer_messages>.  Following
+calls to C<msg> are processed immediately as usual.
+
+=cut
+
+sub buffer_messages (@)
+{
+  foreach my $type (@_)
+    {
+      $buffering{$type} = 1;
+    }
+}
+
+sub flush_messages ()
+{
+  %buffering = ();
+  foreach my $args (@backlog)
+    {
+      &msg (@$args);
+    }
+  @backlog = ();
+}
+
+=item C<setup_channel_queue ($queue, $key)>
+
+Set the queue to fill for each channel that is ordered,
+and the key to use for serialization.
+
+=cut
+sub setup_channel_queue ($$)
+{
+  my ($queue, $key) = @_;
+  foreach my $channel (keys %channels)
+    {
+      setup_channel $channel, queue => $queue, queue_key => $key
+        if $channels{$channel}{'ordered'};
+    }
+}
+
+=item C<pop_channel_queue ($queue)>
+
+pop a message off the $queue; the key has already been popped.
+
+=cut
+sub pop_channel_queue ($)
+{
+  my ($queue) = @_;
+  return _dequeue ($queue);
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::Location>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Condition.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Condition.pm
new file mode 100644
index 0000000..3231d71
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Condition.pm
@@ -0,0 +1,657 @@
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Condition;
+
+use 5.006;
+use strict;
+use Carp;
+
+require Exporter;
+use vars '@ISA', '@EXPORT_OK';
+@ISA = qw/Exporter/;
+@EXPORT_OK = qw/TRUE FALSE reduce_and reduce_or/;
+
+=head1 NAME
+
+Automake::Condition - record a conjunction of conditionals
+
+=head1 SYNOPSIS
+
+  use Automake::Condition;
+
+  # Create a condition to represent "COND1 and not COND2".
+  my $cond = new Automake::Condition "COND1_TRUE", "COND2_FALSE";
+  # Create a condition to represent "not COND3".
+  my $other = new Automake::Condition "COND3_FALSE";
+
+  # Create a condition to represent
+  #   "COND1 and not COND2 and not COND3".
+  my $both = $cond->merge ($other);
+
+  # Likewise, but using a list of conditional strings
+  my $both2 = $cond->merge_conds ("COND3_FALSE");
+
+  # Strip from $both any subconditions which are in $other.
+  # This is the opposite of merge.
+  $cond = $both->strip ($other);
+
+  # Return the list of conditions ("COND1_TRUE", "COND2_FALSE"):
+  my @conds = $cond->conds;
+
+  # Is $cond always true?  (Not in this example)
+  if ($cond->true) { ... }
+
+  # Is $cond always false? (Not in this example)
+  if ($cond->false) { ... }
+
+  # Return the list of conditionals as a string:
+  #  "COND1_TRUE COND2_FALSE"
+  my $str = $cond->string;
+
+  # Return the list of conditionals as a human readable string:
+  #  "COND1 and !COND2"
+  my $str = $cond->human;
+
+  # Return the list of conditionals as a AC_SUBST-style string:
+  #  "@COND1_TRUE@@COND2_FALSE@"
+  my $subst = $cond->subst_string;
+
+  # Is $cond true when $both is true?  (Yes in this example)
+  if ($cond->true_when ($both)) { ... }
+
+  # Is $cond redundant w.r.t. {$other, $both}?
+  # (Yes in this example)
+  if ($cond->redundant_wrt ($other, $both)) { ... }
+
+  # Does $cond imply any of {$other, $both}?
+  # (Not in this example)
+  if ($cond->implies_any ($other, $both)) { ... }
+
+  # Remove superfluous conditionals assuming they will eventually
+  # be multiplied together.
+  # (Returns @conds = ($both) in this example, because
+  # $other and $cond are implied by $both.)
+  @conds = Automake::Condition::reduce_and ($other, $both, $cond);
+
+  # Remove superfluous conditionals assuming they will eventually
+  # be summed together.
+  # (Returns @conds = ($cond, $other) in this example, because
+  # $both is a subset condition of $cond: $cond is true whenever $both
+  # is true.)
+  @conds = Automake::Condition::reduce_or ($other, $both, $cond);
+
+  # Invert a Condition.  This returns a list of Conditions.
+  @conds = $both->not;
+
+=head1 DESCRIPTION
+
+A C<Condition> is a conjunction of conditionals (i.e., atomic conditions
+defined in F<configure.ac> by C<AM_CONDITIONAL>.  In Automake they
+are used to represent the conditions into which F<Makefile> variables and
+F<Makefile> rules are defined.
+
+If the variable C<VAR> is defined as
+
+  if COND1
+    if COND2
+      VAR = value
+    endif
+  endif
+
+then it will be associated a C<Condition> created with
+the following statement.
+
+  new Automake::Condition "COND1_TRUE", "COND2_TRUE";
+
+Remember that a C<Condition> is a I<conjunction> of conditionals, so
+the above C<Condition> means C<VAR> is defined when C<COND1>
+B<and> C<COND2> are true. There is no way to express disjunctions
+(i.e., I<or>s) with this class (but see L<DisjConditions>).
+
+Another point worth to mention is that each C<Condition> object is
+unique with respect to its conditionals.  Two C<Condition> objects
+created for the same set of conditionals will have the same address.
+This makes it easy to compare C<Condition>s: just compare the
+references.
+
+  my $c1 = new Automake::Condition "COND1_TRUE", "COND2_TRUE";
+  my $c2 = new Automake::Condition "COND1_TRUE", "COND2_TRUE";
+  $c1 == $c2;  # True!
+
+=head2 Methods
+
+=over 4
+
+=item C<$cond = new Automake::Condition [@conds]>
+
+Return a C<Condition> objects for the conjunctions of conditionals
+listed in C<@conds> as strings.
+
+An item in C<@conds> should be either C<"FALSE">, C<"TRUE">, or have
+the form C<"NAME_FALSE"> or C<"NAME_TRUE"> where C<NAME> can be
+anything (in practice C<NAME> should be the name of a conditional
+declared in F<configure.ac> with C<AM_CONDITIONAL>, but it's not
+C<Automake::Condition>'s responsibility to ensure this).
+
+An empty C<@conds> means C<"TRUE">.
+
+As explained previously, the reference (object) returned is unique
+with respect to C<@conds>.  For this purpose, duplicate elements are
+ignored, and C<@conds> is rewritten as C<("FALSE")> if it contains
+C<"FALSE"> or two contradictory conditionals (such as C<"NAME_FALSE">
+and C<"NAME_TRUE">.)
+
+Therefore the following two statements create the same object (they
+both create the C<"FALSE"> condition).
+
+  my $c3 = new Automake::Condition "COND1_TRUE", "COND1_FALSE";
+  my $c4 = new Automake::Condition "COND2_TRUE", "FALSE";
+  $c3 == $c4;   # True!
+  $c3 == FALSE; # True!
+
+=cut
+
+# Keys in this hash are conditional strings. Values are the
+# associated object conditions.  This is used by 'new' to reuse
+# Condition objects with identical conditionals.
+use vars '%_condition_singletons';
+# Do NOT reset this hash here.  It's already empty by default,
+# and any setting would otherwise occur AFTER the 'TRUE' and 'FALSE'
+# constants definitions.
+#   %_condition_singletons = ();
+
+sub new ($;@)
+{
+  my ($class, @conds) = @_;
+  my $self = {
+    hash => {},
+  };
+  bless $self, $class;
+
+  for my $cond (@conds)
+    {
+      # Catch some common programming errors:
+      # - A Condition passed to new
+      confess "'$cond' is a reference, expected a string" if ref $cond;
+      # - A Condition passed as a string to new
+      confess "'$cond' does not look like a condition" if $cond =~ /::/;
+    }
+
+  # Accept strings like "FOO BAR" as shorthand for ("FOO", "BAR").
+  @conds = map { split (' ', $_) } @conds;
+
+  for my $cond (@conds)
+    {
+      next if $cond eq 'TRUE';
+
+      # Detect cases when @conds can be simplified to FALSE.
+      if (($cond eq 'FALSE' && $#conds > 0)
+	  || ($cond =~ /^(.*)_TRUE$/ && exists $self->{'hash'}{"${1}_FALSE"})
+	  || ($cond =~ /^(.*)_FALSE$/ && exists $self->{'hash'}{"${1}_TRUE"}))
+	{
+	  return &FALSE;
+	}
+
+      $self->{'hash'}{$cond} = 1;
+    }
+
+  my $key = $self->string;
+  if (exists $_condition_singletons{$key})
+    {
+      return $_condition_singletons{$key};
+    }
+  $_condition_singletons{$key} = $self;
+  return $self;
+}
+
+=item C<$newcond = $cond-E<gt>merge (@otherconds)>
+
+Return a new condition which is the conjunction of
+C<$cond> and C<@otherconds>.
+
+=cut
+
+sub merge ($@)
+{
+  my ($self, @otherconds) = @_;
+  new Automake::Condition (map { $_->conds } ($self, @otherconds));
+}
+
+=item C<$newcond = $cond-E<gt>merge_conds (@conds)>
+
+Return a new condition which is the conjunction of C<$cond> and
+C<@conds>, where C<@conds> is a list of conditional strings, as
+passed to C<new>.
+
+=cut
+
+sub merge_conds ($@)
+{
+  my ($self, @conds) = @_;
+  new Automake::Condition $self->conds, @conds;
+}
+
+=item C<$newcond = $cond-E<gt>strip ($minuscond)>
+
+Return a new condition which has all the conditionals of C<$cond>
+except those of C<$minuscond>.  This is the opposite of C<merge>.
+
+=cut
+
+sub strip ($$)
+{
+  my ($self, $minus) = @_;
+  my @res = grep { not $minus->_has ($_) } $self->conds;
+  return new Automake::Condition @res;
+}
+
+=item C<@list = $cond-E<gt>conds>
+
+Return the set of conditionals defining C<$cond>, as strings.  Note that
+this might not be exactly the list passed to C<new> (or a
+concatenation of such lists if C<merge> was used), because of the
+cleanup mentioned in C<new>'s description.
+
+For instance C<$c3-E<gt>conds> will simply return C<("FALSE")>.
+
+=cut
+
+sub conds ($ )
+{
+  my ($self) = @_;
+  my @conds = keys %{$self->{'hash'}};
+  return ("TRUE") unless @conds;
+  return sort @conds;
+}
+
+# Undocumented, shouldn't be needed outside of this class.
+sub _has ($$)
+{
+  my ($self, $cond) = @_;
+  return exists $self->{'hash'}{$cond};
+}
+
+=item C<$cond-E<gt>false>
+
+Return 1 iff this condition is always false.
+
+=cut
+
+sub false ($ )
+{
+  my ($self) = @_;
+  return $self->_has ('FALSE');
+}
+
+=item C<$cond-E<gt>true>
+
+Return 1 iff this condition is always true.
+
+=cut
+
+sub true ($ )
+{
+  my ($self) = @_;
+  return 0 == keys %{$self->{'hash'}};
+}
+
+=item C<$cond-E<gt>string>
+
+Build a string which denotes the condition.
+
+For instance using the C<$cond> definition from L<SYNOPSYS>,
+C<$cond-E<gt>string> will return C<"COND1_TRUE COND2_FALSE">.
+
+=cut
+
+sub string ($ )
+{
+  my ($self) = @_;
+
+  return $self->{'string'} if defined $self->{'string'};
+
+  my $res = '';
+  if ($self->false)
+    {
+      $res = 'FALSE';
+    }
+  else
+    {
+      $res = join (' ', $self->conds);
+    }
+  $self->{'string'} = $res;
+  return $res;
+}
+
+=item C<$cond-E<gt>human>
+
+Build a human readable string which denotes the condition.
+
+For instance using the C<$cond> definition from L<SYNOPSYS>,
+C<$cond-E<gt>string> will return C<"COND1 and !COND2">.
+
+=cut
+
+sub _to_human ($ )
+{
+  my ($s) = @_;
+  if ($s =~ /^(.*)_(TRUE|FALSE)$/)
+    {
+      return (($2 eq 'FALSE') ? '!' : '') . $1;
+    }
+  else
+    {
+      return $s;
+    }
+}
+
+sub human ($ )
+{
+  my ($self) = @_;
+
+  return $self->{'human'} if defined $self->{'human'};
+
+  my $res = '';
+  if ($self->false)
+    {
+      $res = 'FALSE';
+    }
+  else
+    {
+      $res = join (' and ', map { _to_human $_ } $self->conds);
+    }
+  $self->{'human'} = $res;
+  return $res;
+}
+
+=item C<$cond-E<gt>subst_string>
+
+Build a C<AC_SUBST>-style string for output in F<Makefile.in>.
+
+For instance using the C<$cond> definition from L<SYNOPSYS>,
+C<$cond-E<gt>subst_string> will return C<"@COND1_TRUE@@COND2_FALSE@">.
+
+=cut
+
+sub subst_string ($ )
+{
+  my ($self) = @_;
+
+  return $self->{'subst_string'} if defined $self->{'subst_string'};
+
+  my $res = '';
+  if ($self->false)
+    {
+      $res = '#';
+    }
+  elsif (! $self->true)
+    {
+      $res = '@' . join ('@@', sort $self->conds) . '@';
+    }
+  $self->{'subst_string'} = $res;
+  return $res;
+}
+
+=item C<$cond-E<gt>true_when ($when)>
+
+Return 1 iff C<$cond> is true when C<$when> is true.
+Return 0 otherwise.
+
+Using the definitions from L<SYNOPSYS>, C<$cond> is true
+when C<$both> is true, but the converse is wrong.
+
+=cut
+
+sub true_when ($$)
+{
+  my ($self, $when) = @_;
+
+  # Nothing is true when FALSE (not even FALSE itself, but it
+  # shouldn't hurt if you decide to change that).
+  return 0 if $self->false || $when->false;
+
+  # If we are true, we stay true when $when is true :)
+  return 1 if $self->true;
+
+  # $SELF is true under $WHEN if each conditional component of $SELF
+  # exists in $WHEN.
+  foreach my $cond ($self->conds)
+    {
+      return 0 unless $when->_has ($cond);
+    }
+  return 1;
+}
+
+=item C<$cond-E<gt>redundant_wrt (@conds)>
+
+Return 1 iff C<$cond> is true for any condition in C<@conds>.
+If @conds is empty, return 1 iff C<$cond> is C<FALSE>.
+Return 0 otherwise.
+
+=cut
+
+sub redundant_wrt ($@)
+{
+  my ($self, @conds) = @_;
+
+  foreach my $cond (@conds)
+    {
+      return 1 if $self->true_when ($cond);
+    }
+  return $self->false;
+}
+
+=item C<$cond-E<gt>implies_any (@conds)>
+
+Return 1 iff C<$cond> implies any of the conditions in C<@conds>.
+Return 0 otherwise.
+
+=cut
+
+sub implies_any ($@)
+{
+  my ($self, @conds) = @_;
+
+  foreach my $cond (@conds)
+    {
+      return 1 if $cond->true_when ($self);
+    }
+  return 0;
+}
+
+=item C<$cond-E<gt>not>
+
+Return a negation of C<$cond> as a list of C<Condition>s.
+This list should be used to construct a C<DisjConditions>
+(we cannot return a C<DisjConditions> from C<Automake::Condition>,
+because that would make these two packages interdependent).
+
+=cut
+
+sub not ($ )
+{
+  my ($self) = @_;
+  return @{$self->{'not'}} if defined $self->{'not'};
+  my @res =
+    map { new Automake::Condition &conditional_negate ($_) } $self->conds;
+  $self->{'not'} = [@res];
+  return @res;
+}
+
+=item C<$cond-E<gt>multiply (@conds)>
+
+Assumption: C<@conds> represent a disjunction of conditions.
+
+Return the result of multiplying C<$cond> with that disjunction.
+The result will be a list of conditions suitable to construct a
+C<DisjConditions>.
+
+=cut
+
+sub multiply ($@)
+{
+  my ($self, @set) = @_;
+  my %res = ();
+  for my $cond (@set)
+    {
+      my $ans = $self->merge ($cond);
+      $res{$ans} = $ans;
+    }
+
+  # FALSE can always be removed from a disjunction.
+  delete $res{FALSE};
+
+  # Now, $self is a common factor of the remaining conditions.
+  # If one of the conditions is $self, we can discard the rest.
+  return ($self, ())
+    if exists $res{$self};
+
+  return (values %res);
+}
+
+=back
+
+=head2 Other helper functions
+
+=over 4
+
+=item C<TRUE>
+
+The C<"TRUE"> conditional.
+
+=item C<FALSE>
+
+The C<"FALSE"> conditional.
+
+=cut
+
+use constant TRUE => new Automake::Condition "TRUE";
+use constant FALSE => new Automake::Condition "FALSE";
+
+=item C<reduce_and (@conds)>
+
+Return a subset of @conds with the property that the conjunction of
+the subset is the same as the conjunction of @conds.  For example, if
+both C<COND1_TRUE COND2_TRUE> and C<COND1_TRUE> are in the list,
+discard the latter.  If the input list is empty, return C<(TRUE)>.
+
+=cut
+
+sub reduce_and (@)
+{
+  my (@conds) = @_;
+  my @ret = ();
+  my $cond;
+  while (@conds > 0)
+    {
+      $cond = shift @conds;
+
+      # FALSE is absorbent.
+      return FALSE
+	if $cond == FALSE;
+
+      if (! $cond->redundant_wrt (@ret, @conds))
+	{
+	  push (@ret, $cond);
+	}
+    }
+
+  return TRUE if @ret == 0;
+  return @ret;
+}
+
+=item C<reduce_or (@conds)>
+
+Return a subset of @conds with the property that the disjunction of
+the subset is equivalent to the disjunction of @conds.  For example,
+if both C<COND1_TRUE COND2_TRUE> and C<COND1_TRUE> are in the list,
+discard the former.  If the input list is empty, return C<(FALSE)>.
+
+=cut
+
+sub reduce_or (@)
+{
+  my (@conds) = @_;
+  my @ret = ();
+  my $cond;
+  while (@conds > 0)
+    {
+      $cond = shift @conds;
+
+      next
+       if $cond == FALSE;
+      return TRUE
+       if $cond == TRUE;
+
+      push (@ret, $cond)
+       unless $cond->implies_any (@ret, @conds);
+    }
+
+  return FALSE if @ret == 0;
+  return @ret;
+}
+
+=item C<conditional_negate ($condstr)>
+
+Negate a conditional string.
+
+=cut
+
+sub conditional_negate ($)
+{
+  my ($cond) = @_;
+
+  $cond =~ s/TRUE$/TRUEO/;
+  $cond =~ s/FALSE$/TRUE/;
+  $cond =~ s/TRUEO$/FALSE/;
+
+  return $cond;
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::DisjConditions>.
+
+=head1 HISTORY
+
+C<AM_CONDITIONAL>s and supporting code were added to Automake 1.1o by
+Ian Lance Taylor <ian@cygnus.org> in 1997.  Since then it has been
+improved by Tom Tromey <tromey@redhat.com>, Richard Boulton
+<richard@tartarus.org>, Raja R Harinath <harinath@cs.umn.edu>,
+Akim Demaille <akim@epita.fr>, and  Alexandre Duret-Lutz <adl@gnu.org>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Config.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Config.pm
new file mode 100644
index 0000000..6b0705e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Config.pm
@@ -0,0 +1,62 @@
+#  -*- Perl -*-
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+# Generated from Config.in; do not edit by hand.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Config;
+use strict;
+
+use 5.006;
+require Exporter;
+
+our @ISA = qw (Exporter);
+our @EXPORT = qw ($APIVERSION $PACKAGE $PACKAGE_BUGREPORT $VERSION
+                  $RELEASE_YEAR $libdir $perl_threads);
+
+# Parameters set by configure.  Not to be changed.  NOTE: assign
+# VERSION as string so that e.g. version 0.30 will print correctly.
+our $APIVERSION = '1.14';
+our $PACKAGE = 'automake';
+our $PACKAGE_BUGREPORT = 'bug-automake@gnu.org';
+our $VERSION = '1.14.1';
+our $RELEASE_YEAR = '2013';
+our $libdir = '//share/automake-1.14';
+
+our $perl_threads = 0;
+# We need at least this version for CLONE support.
+if (eval { require 5.007_002; })
+  {
+    use Config;
+    $perl_threads = $Config{useithreads};
+  }
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Configure_ac.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Configure_ac.pm
new file mode 100644
index 0000000..8e48991
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Configure_ac.pm
@@ -0,0 +1,128 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's git repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Automake::Configure_ac;
+
+use 5.006;
+use strict;
+use Exporter;
+use Automake::Channels;
+use Automake::ChannelDefs;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&find_configure_ac &require_configure_ac);
+
+=head1 NAME
+
+Automake::Configure_ac - Locate configure.ac or configure.in.
+
+=head1 SYNOPSIS
+
+  use Automake::Configure_ac;
+
+  # Try to locate configure.in or configure.ac in the current
+  # directory.  It may be absent.  Complain if both files exist.
+  my $file_name = find_configure_ac;
+
+  # Likewise, but bomb out if the file does not exist.
+  my $file_name = require_configure_ac;
+
+  # Likewise, but in $dir.
+  my $file_name = find_configure_ac ($dir);
+  my $file_name = require_configure_ac ($dir);
+
+=over 4
+
+=back
+
+=head2 Functions
+
+=over 4
+
+=item C<$configure_ac = find_configure_ac ([$directory])>
+
+Find a F<configure.ac> or F<configure.in> file in C<$directory>,
+defaulting to the current directory.  Complain if both files are present.
+Return the name of the file found, or the former if neither is present.
+
+=cut
+
+sub find_configure_ac (;@)
+{
+  my ($directory) = @_;
+  $directory ||= '.';
+  my $configure_ac =
+    File::Spec->canonpath (File::Spec->catfile ($directory, 'configure.ac'));
+  my $configure_in =
+    File::Spec->canonpath (File::Spec->catfile ($directory, 'configure.in'));
+
+  if (-f $configure_in)
+    {
+      msg ('obsolete', "autoconf input should be named 'configure.ac'," .
+                       " not 'configure.in'");
+      if (-f $configure_ac)
+	{
+	  msg ('unsupported',
+	       "'$configure_ac' and '$configure_in' both present.\n"
+	       . "proceeding with '$configure_ac'");
+          return $configure_ac
+	}
+      else
+        {
+          return $configure_in;
+        }
+    }
+  return $configure_ac;
+}
+
+
+=item C<$configure_ac = require_configure_ac ([$directory])>
+
+Like C<find_configure_ac>, but fail if neither is present.
+
+=cut
+
+sub require_configure_ac (;$)
+{
+  my $res = find_configure_ac (@_);
+  fatal "'configure.ac' is required" unless -f $res;
+  return $res
+}
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/DisjConditions.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/DisjConditions.pm
new file mode 100644
index 0000000..c68d4cb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/DisjConditions.pm
@@ -0,0 +1,557 @@
+# Copyright (C) 1997-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::DisjConditions;
+
+use 5.006;
+use strict;
+use Carp;
+use Automake::Condition qw/TRUE FALSE/;
+
+=head1 NAME
+
+Automake::DisjConditions - record a disjunction of Conditions
+
+=head1 SYNOPSIS
+
+  use Automake::Condition;
+  use Automake::DisjConditions;
+
+  # Create a Condition to represent "COND1 and not COND2".
+  my $cond = new Automake::Condition "COND1_TRUE", "COND2_FALSE";
+  # Create a Condition to represent "not COND3".
+  my $other = new Automake::Condition "COND3_FALSE";
+
+  # Create a DisjConditions to represent
+  #   "(COND1 and not COND2) or (not COND3)"
+  my $set = new Automake::DisjConditions $cond, $other;
+
+  # Return the list of Conditions involved in $set.
+  my @conds = $set->conds;
+
+  # Return one of the Condition involved in $set.
+  my $cond = $set->one_cond;
+
+  # Return true iff $set is always true (i.e. its subconditions
+  # cover all cases).
+  if ($set->true) { ... }
+
+  # Return false iff $set is always false (i.e. is empty, or contains
+  # only false conditions).
+  if ($set->false) { ... }
+
+  # Return a string representing the DisjConditions.
+  #   "COND1_TRUE COND2_FALSE | COND3_FALSE"
+  my $str = $set->string;
+
+  # Return a human readable string representing the DisjConditions.
+  #   "(COND1 and !COND2) or (!COND3)"
+  my $str = $set->human;
+
+  # Merge (OR) several DisjConditions.
+  my $all = $set->merge($set2, $set3, ...)
+
+  # Invert a DisjConditions, i.e., create a new DisjConditions
+  # that complements $set.
+  my $inv = $set->invert;
+
+  # Multiply two DisjConditions.
+  my $prod = $set1->multiply ($set2);
+
+  # Return the subconditions of a DisjConditions with respect to
+  # a Condition.  See the description for a real example.
+  my $subconds = $set->sub_conditions ($cond);
+
+  # Check whether a new definition in condition $cond would be
+  # ambiguous w.r.t. existing definitions in $set.
+  ($msg, $ambig_cond) = $set->ambiguous_p ($what, $cond);
+
+=head1 DESCRIPTION
+
+A C<DisjConditions> is a disjunction of C<Condition>s.  In Automake
+they are used to represent the conditions into which Makefile
+variables and Makefile rules are defined.
+
+If the variable C<VAR> is defined as
+
+  if COND1
+    if COND2
+      VAR = value1
+    endif
+  endif
+  if !COND3
+    if COND4
+      VAR = value2
+    endif
+  endif
+
+then it will be associated a C<DisjConditions> created with
+the following statement.
+
+  new Automake::DisjConditions
+    (new Automake::Condition ("COND1_TRUE", "COND2_TRUE"),
+     new Automake::Condition ("COND3_FALSE", "COND4_TRUE"));
+
+As you can see, a C<DisjConditions> is made from a list of
+C<Condition>s.  Since C<DisjConditions> is a disjunction, and
+C<Condition> is a conjunction, the above can be read as
+follows.
+
+  (COND1 and COND2) or ((not COND3) and COND4)
+
+That's indeed the condition in which C<VAR> has a value.
+
+Like C<Condition> objects, a C<DisjConditions> object is unique
+with respect to its conditions.  Two C<DisjConditions> objects created
+for the same set of conditions will have the same address.  This makes
+it easy to compare C<DisjConditions>s: just compare the references.
+
+=head2 Methods
+
+=over 4
+
+=item C<$set = new Automake::DisjConditions [@conds]>
+
+Create a C<DisjConditions> object from the list of C<Condition>
+objects passed in arguments.
+
+If the C<@conds> list is empty, the C<DisjConditions> is assumed to be
+false.
+
+As explained previously, the reference (object) returned is unique
+with respect to C<@conds>.  For this purpose, duplicate elements are
+ignored.
+
+=cut
+
+# Keys in this hash are DisjConditions strings. Values are the
+# associated object DisjConditions.  This is used by 'new' to reuse
+# DisjConditions objects with identical conditions.
+use vars '%_disjcondition_singletons';
+
+sub new ($;@)
+{
+  my ($class, @conds) = @_;
+  my @filtered_conds = ();
+  for my $cond (@conds)
+    {
+      confess "'$cond' isn't a reference" unless ref $cond;
+      confess "'$cond' isn't an Automake::Condition"
+	unless $cond->isa ("Automake::Condition");
+
+      # This is a disjunction of conditions, so we drop
+      # false conditions.  We'll always treat an "empty"
+      # DisjConditions as false for this reason.
+      next if $cond->false;
+
+      push @filtered_conds, $cond;
+    }
+
+  my $string;
+  if (@filtered_conds)
+    {
+      @filtered_conds = sort { $a->string cmp $b->string } @filtered_conds;
+      $string = join (' | ', map { $_->string } @filtered_conds);
+    }
+  else
+    {
+      $string = 'FALSE';
+    }
+
+  # Return any existing identical DisjConditions.
+  my $me = $_disjcondition_singletons{$string};
+  return $me if $me;
+
+  # Else, create a new DisjConditions.
+
+  # Store conditions as keys AND as values, because blessed
+  # objects are converted to strings when used as keys (so
+  # at least we still have the value when we need to call
+  # a method).
+  my %h = map {$_ => $_} @filtered_conds;
+
+  my $self = {
+    hash => \%h,
+    string => $string,
+    conds => \@filtered_conds,
+  };
+  bless $self, $class;
+
+  $_disjcondition_singletons{$string} = $self;
+  return $self;
+}
+
+
+=item C<CLONE>
+
+Internal special subroutine to fix up the self hashes in
+C<%_disjcondition_singletons> upon thread creation.  C<CLONE> is invoked
+automatically with ithreads from Perl 5.7.2 or later, so if you use this
+module with earlier versions of Perl, it is not thread-safe.
+
+=cut
+
+sub CLONE
+{
+  foreach my $self (values %_disjcondition_singletons)
+    {
+      my %h = map { $_ => $_ } @{$self->{'conds'}};
+      $self->{'hash'} = \%h;
+    }
+}
+
+
+=item C<@conds = $set-E<gt>conds>
+
+Return the list of C<Condition> objects involved in C<$set>.
+
+=cut
+
+sub conds ($ )
+{
+  my ($self) = @_;
+  return @{$self->{'conds'}};
+}
+
+=item C<$cond = $set-E<gt>one_cond>
+
+Return one C<Condition> object involved in C<$set>.
+
+=cut
+
+sub one_cond ($)
+{
+  my ($self) = @_;
+  return (%{$self->{'hash'}},)[1];
+}
+
+=item C<$et = $set-E<gt>false>
+
+Return 1 iff the C<DisjConditions> object is always false (i.e., if it
+is empty, or if it contains only false C<Condition>s). Return 0
+otherwise.
+
+=cut
+
+sub false ($ )
+{
+  my ($self) = @_;
+  return 0 == keys %{$self->{'hash'}};
+}
+
+=item C<$et = $set-E<gt>true>
+
+Return 1 iff the C<DisjConditions> object is always true (i.e. covers all
+conditions). Return 0 otherwise.
+
+=cut
+
+sub true ($ )
+{
+  my ($self) = @_;
+  return $self->invert->false;
+}
+
+=item C<$str = $set-E<gt>string>
+
+Build a string which denotes the C<DisjConditions>.
+
+=cut
+
+sub string ($ )
+{
+  my ($self) = @_;
+  return $self->{'string'};
+}
+
+=item C<$cond-E<gt>human>
+
+Build a human readable string which denotes the C<DisjConditions>.
+
+=cut
+
+sub human ($ )
+{
+  my ($self) = @_;
+
+  return $self->{'human'} if defined $self->{'human'};
+
+  my $res = '';
+  if ($self->false)
+    {
+      $res = 'FALSE';
+    }
+  else
+    {
+      my @c = $self->conds;
+      if (1 == @c)
+	{
+	  $res = $c[0]->human;
+	}
+      else
+	{
+	  $res = '(' . join (') or (', map { $_->human } $self->conds) . ')';
+	}
+    }
+  $self->{'human'} = $res;
+  return $res;
+}
+
+
+=item C<$newcond = $cond-E<gt>merge (@otherconds)>
+
+Return a new C<DisjConditions> which is the disjunction of
+C<$cond> and C<@otherconds>.  Items in C<@otherconds> can be
+@C<Condition>s or C<DisjConditions>.
+
+=cut
+
+sub merge ($@)
+{
+  my ($self, @otherconds) = @_;
+  new Automake::DisjConditions (
+    map { $_->isa ("Automake::DisjConditions") ? $_->conds : $_ }
+        ($self, @otherconds));
+}
+
+
+=item C<$prod = $set1-E<gt>multiply ($set2)>
+
+Multiply two conditional sets.
+
+  my $set1 = new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE"),
+     new Automake::Condition ("B_TRUE"));
+  my $set2 = new Automake::DisjConditions
+    (new Automake::Condition ("C_FALSE"),
+     new Automake::Condition ("D_FALSE"));
+
+C<$set1-E<gt>multiply ($set2)> will return
+
+  new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE", "C_FALSE"),
+     new Automake::Condition ("B_TRUE", "C_FALSE"),;
+     new Automake::Condition ("A_TRUE", "D_FALSE"),
+     new Automake::Condition ("B_TRUE", "D_FALSE"));
+
+The argument can also be a C<Condition>.
+
+=cut
+
+# Same as multiply() but take a list of Conditionals as second argument.
+# We use this in invert().
+sub _multiply ($@)
+{
+  my ($self, @set) = @_;
+  my @res = map { $_->multiply (@set) } $self->conds;
+  return new Automake::DisjConditions (Automake::Condition::reduce_or @res);
+}
+
+sub multiply ($$)
+{
+  my ($self, $set) = @_;
+  return $self->_multiply ($set) if $set->isa('Automake::Condition');
+  return $self->_multiply ($set->conds);
+}
+
+=item C<$inv = $set-E<gt>invert>
+
+Invert a C<DisjConditions>.  Return a C<DisjConditions> which is true
+when C<$set> is false, and vice-versa.
+
+  my $set = new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE", "B_TRUE"),
+     new Automake::Condition ("A_FALSE", "B_FALSE"));
+
+Calling C<$set-E<gt>invert> will return the following C<DisjConditions>.
+
+  new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE", "B_FALSE"),
+     new Automake::Condition ("A_FALSE", "B_TRUE"));
+
+We implement the inversion by a product-of-sums to sum-of-products
+conversion using repeated multiplications.  Because of the way we
+implement multiplication, the result of inversion is in canonical
+prime implicant form.
+
+=cut
+
+sub invert($ )
+{
+  my ($self) = @_;
+
+  return $self->{'invert'} if defined $self->{'invert'};
+
+  # The invert of an empty DisjConditions is TRUE.
+  my $res = new Automake::DisjConditions TRUE;
+
+  #   !((a.b)+(c.d)+(e.f))
+  # = (!a+!b).(!c+!d).(!e+!f)
+  # We develop this into a sum of product iteratively, starting from TRUE:
+  # 1) TRUE
+  # 2) TRUE.!a + TRUE.!b
+  # 3) TRUE.!a.!c + TRUE.!b.!c + TRUE.!a.!d + TRUE.!b.!d
+  # 4) TRUE.!a.!c.!e + TRUE.!b.!c.!e + TRUE.!a.!d.!e + TRUE.!b.!d.!e
+  #    + TRUE.!a.!c.!f + TRUE.!b.!c.!f + TRUE.!a.!d.!f + TRUE.!b.!d.!f
+  foreach my $cond ($self->conds)
+    {
+      $res = $res->_multiply ($cond->not);
+    }
+
+  # Cache result.
+  $self->{'invert'} = $res;
+  # It's tempting to also set $res->{'invert'} to $self, but that
+  # is a bad idea as $self hasn't been normalized in any way.
+  # (Different inputs can produce the same inverted set.)
+  return $res;
+}
+
+=item C<$self-E<gt>simplify>
+
+Return a C<Disjunction> which is a simplified canonical form of C<$self>.
+This canonical form contains only prime implicants, but it can contain
+non-essential prime implicants.
+
+=cut
+
+sub simplify ($)
+{
+  my ($self) = @_;
+  return $self->invert->invert;
+}
+
+=item C<$self-E<gt>sub_conditions ($cond)>
+
+Return the subconditions of C<$self> that contains C<$cond>, with
+C<$cond> stripped.  More formally, return C<$res> such that
+C<$res-E<gt>multiply ($cond) == $self-E<gt>multiply ($cond)> and
+C<$res> does not mention any of the variables in C<$cond>.
+
+For instance, consider:
+
+  my $a = new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE", "B_TRUE"),
+     new Automake::Condition ("A_TRUE", "C_FALSE"),
+     new Automake::Condition ("A_TRUE", "B_FALSE", "C_TRUE"),
+     new Automake::Condition ("A_FALSE"));
+  my $b = new Automake::DisjConditions
+    (new Automake::Condition ("A_TRUE", "B_FALSE"));
+
+Calling C<$a-E<gt>sub_conditions ($b)> will return the following
+C<DisjConditions>.
+
+  new Automake::DisjConditions
+    (new Automake::Condition ("C_FALSE"), # From A_TRUE C_FALSE
+     new Automake::Condition ("C_TRUE")); # From A_TRUE B_FALSE C_TRUE"
+
+=cut
+
+sub sub_conditions ($$)
+{
+  my ($self, $subcond) = @_;
+
+  # Make $subcond blindingly apparent in the DisjConditions.
+  # For instance '$b->multiply($a->conds)' (from the POD example) is:
+  # 	(new Automake::Condition ("FALSE"),
+  # 	 new Automake::Condition ("A_TRUE", "B_FALSE", "C_FALSE"),
+  # 	 new Automake::Condition ("A_TRUE", "B_FALSE", "C_TRUE"),
+  # 	 new Automake::Condition ("FALSE"))
+  my @prodconds = $subcond->multiply ($self->conds);
+
+  # Now, strip $subcond from the remaining (i.e., non-false) Conditions.
+  my @res = map { $_->false ? () : $_->strip ($subcond) } @prodconds;
+
+  return new Automake::DisjConditions @res;
+}
+
+=item C<($string, $ambig_cond) = $condset-E<gt>ambiguous_p ($what, $cond)>
+
+Check for an ambiguous condition.  Return an error message and the
+other condition involved if we have an ambiguity.  Return an empty
+string and FALSE otherwise.
+
+C<$what> is the name of the thing being defined, to use in the error
+message.  C<$cond> is the C<Condition> under which it is being
+defined.  C<$condset> is the C<DisjConditions> under which it had
+already been defined.
+
+=cut
+
+sub ambiguous_p ($$$)
+{
+  my ($self, $var, $cond) = @_;
+
+  # Note that these rules don't consider the following
+  # example as ambiguous.
+  #
+  #   if COND1
+  #     FOO = foo
+  #   endif
+  #   if COND2
+  #     FOO = bar
+  #   endif
+  #
+  # It's up to the user to not define COND1 and COND2
+  # simultaneously.
+
+  return ("$var multiply defined in condition " . $cond->human, $cond)
+    if exists $self->{'hash'}{$cond};
+
+  foreach my $vcond ($self->conds)
+    {
+      return ("$var was already defined in condition " . $vcond->human
+	      . ", which includes condition ". $cond->human, $vcond)
+	if $vcond->true_when ($cond);
+
+      return ("$var was already defined in condition " . $vcond->human
+	      . ", which is included in condition " . $cond->human, $vcond)
+	if $cond->true_when ($vcond);
+    }
+  return ('', FALSE);
+}
+
+=head1 SEE ALSO
+
+L<Automake::Condition>.
+
+=head1 HISTORY
+
+C<AM_CONDITIONAL>s and supporting code were added to Automake 1.1o by
+Ian Lance Taylor <ian@cygnus.org> in 1997.  Since then it has been
+improved by Tom Tromey <tromey@redhat.com>, Richard Boulton
+<richard@tartarus.org>, Raja R Harinath <harinath@cs.umn.edu>, Akim
+Demaille <akim@epita.fr>, Pavel Roskin <proski@gnu.org>, and
+Alexandre Duret-Lutz <adl@gnu.org>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/FileUtils.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/FileUtils.pm
new file mode 100644
index 0000000..d650aef
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/FileUtils.pm
@@ -0,0 +1,425 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+###############################################################
+# The main copy of this file is in Automake's git repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Automake::FileUtils;
+
+=head1 NAME
+
+Automake::FileUtils - handling files
+
+=head1 SYNOPSIS
+
+  use Automake::FileUtils
+
+=head1 DESCRIPTION
+
+This perl module provides various general purpose file handling functions.
+
+=cut
+
+use 5.006;
+use strict;
+use Exporter;
+use File::stat;
+use IO::File;
+use Automake::Channels;
+use Automake::ChannelDefs;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&contents
+	      &find_file &mtime
+	      &update_file &up_to_date_p
+	      &xsystem &xsystem_hint &xqx
+	      &dir_has_case_matching_file &reset_dir_cache
+	      &set_dir_cache_file);
+
+=item C<find_file ($file_name, @include)>
+
+Return the first path for a C<$file_name> in the C<include>s.
+
+We match exactly the behavior of GNU M4: first look in the current
+directory (which includes the case of absolute file names), and then,
+if the file name is not absolute, look in C<@include>.
+
+If the file is flagged as optional (ends with C<?>), then return undef
+if absent, otherwise exit with error.
+
+=cut
+
+# $FILE_NAME
+# find_file ($FILE_NAME, @INCLUDE)
+# --------------------------------
+sub find_file ($@)
+{
+  use File::Spec;
+
+  my ($file_name, @include) = @_;
+  my $optional = 0;
+
+  $optional = 1
+    if $file_name =~ s/\?$//;
+
+  return File::Spec->canonpath ($file_name)
+    if -e $file_name;
+
+  if (!File::Spec->file_name_is_absolute ($file_name))
+    {
+      foreach my $path (@include)
+	{
+	  return File::Spec->canonpath (File::Spec->catfile ($path, $file_name))
+	    if -e File::Spec->catfile ($path, $file_name)
+	}
+    }
+
+  fatal "$file_name: no such file or directory"
+    unless $optional;
+  return undef;
+}
+
+=item C<mtime ($file)>
+
+Return the mtime of C<$file>.  Missing files, or C<-> standing for
+C<STDIN> or C<STDOUT> are "obsolete", i.e., as old as possible.
+
+=cut
+
+# $MTIME
+# MTIME ($FILE)
+# -------------
+sub mtime ($)
+{
+  my ($file) = @_;
+
+  return 0
+    if $file eq '-' || ! -f $file;
+
+  my $stat = stat ($file)
+    or fatal "cannot stat $file: $!";
+
+  return $stat->mtime;
+}
+
+
+=item C<update_file ($from, $to, [$force])>
+
+Rename C<$from> as C<$to>, preserving C<$to> timestamp if it has not
+changed, unless C<$force> is true (defaults to false).  Recognize
+C<$to> = C<-> standing for C<STDIN>.  C<$from> is always
+removed/renamed.
+
+=cut
+
+# &update_file ($FROM, $TO; $FORCE)
+# ---------------------------------
+sub update_file ($$;$)
+{
+  my ($from, $to, $force) = @_;
+  $force = 0
+    unless defined $force;
+  my $SIMPLE_BACKUP_SUFFIX = $ENV{'SIMPLE_BACKUP_SUFFIX'} || '~';
+  use File::Compare;
+  use File::Copy;
+
+  if ($to eq '-')
+    {
+      my $in = new IO::File $from, "<";
+      my $out = new IO::File (">-");
+      while ($_ = $in->getline)
+	{
+	  print $out $_;
+	}
+      $in->close;
+      unlink ($from) || fatal "cannot remove $from: $!";
+      return;
+    }
+
+  if (!$force && -f "$to" && compare ("$from", "$to") == 0)
+    {
+      # File didn't change, so don't update its mod time.
+      msg 'note', "'$to' is unchanged";
+      unlink ($from)
+        or fatal "cannot remove $from: $!";
+      return
+    }
+
+  if (-f "$to")
+    {
+      # Back up and install the new one.
+      move ("$to",  "$to$SIMPLE_BACKUP_SUFFIX")
+	or fatal "cannot backup $to: $!";
+      move ("$from", "$to")
+	or fatal "cannot rename $from as $to: $!";
+      msg 'note', "'$to' is updated";
+    }
+  else
+    {
+      move ("$from", "$to")
+	or fatal "cannot rename $from as $to: $!";
+      msg 'note', "'$to' is created";
+    }
+}
+
+
+=item C<up_to_date_p ($file, @dep)>
+
+Is C<$file> more recent than C<@dep>?
+
+=cut
+
+# $BOOLEAN
+# &up_to_date_p ($FILE, @DEP)
+# ---------------------------
+sub up_to_date_p ($@)
+{
+  my ($file, @dep) = @_;
+  my $mtime = mtime ($file);
+
+  foreach my $dep (@dep)
+    {
+      if ($mtime < mtime ($dep))
+	{
+	  verb "up_to_date ($file): outdated: $dep";
+	  return 0;
+	}
+    }
+
+  verb "up_to_date ($file): up to date";
+  return 1;
+}
+
+
+=item C<handle_exec_errors ($command, [$expected_exit_code = 0], [$hint])>
+
+Display an error message for C<$command>, based on the content of
+C<$?> and C<$!>.  Be quiet if the command exited normally
+with C<$expected_exit_code>.  If C<$hint> is given, display that as well
+if the command failed to run at all.
+
+=cut
+
+sub handle_exec_errors ($;$$)
+{
+  my ($command, $expected, $hint) = @_;
+  $expected = 0 unless defined $expected;
+  if (defined $hint)
+    {
+      $hint = "\n" . $hint;
+    }
+  else
+    {
+      $hint = '';
+    }
+
+  $command = (split (' ', $command))[0];
+  if ($!)
+    {
+      fatal "failed to run $command: $!" . $hint;
+    }
+  else
+    {
+      use POSIX qw (WIFEXITED WEXITSTATUS WIFSIGNALED WTERMSIG);
+
+      if (WIFEXITED ($?))
+	{
+	  my $status = WEXITSTATUS ($?);
+	  # Propagate exit codes.
+	  fatal ('',
+		 "$command failed with exit status: $status",
+		 exit_code => $status)
+	    unless $status == $expected;
+	}
+      elsif (WIFSIGNALED ($?))
+	{
+	  my $signal = WTERMSIG ($?);
+	  fatal "$command terminated by signal: $signal";
+	}
+      else
+	{
+	  fatal "$command exited abnormally";
+	}
+    }
+}
+
+=item C<xqx ($command)>
+
+Same as C<qx> (but in scalar context), but fails on errors.
+
+=cut
+
+# xqx ($COMMAND)
+# --------------
+sub xqx ($)
+{
+  my ($command) = @_;
+
+  verb "running: $command";
+
+  $! = 0;
+  my $res = `$command`;
+  handle_exec_errors $command
+    if $?;
+
+  return $res;
+}
+
+
+=item C<xsystem (@argv)>
+
+Same as C<system>, but fails on errors, and reports the C<@argv>
+in verbose mode.
+
+=cut
+
+sub xsystem (@)
+{
+  my (@command) = @_;
+
+  verb "running: @command";
+
+  $! = 0;
+  handle_exec_errors "@command"
+    if system @command;
+}
+
+
+=item C<xsystem_hint ($msg, @argv)>
+
+Same as C<xsystem>, but allows to pass a hint that will be displayed
+in case the command failed to run at all.
+
+=cut
+
+sub xsystem_hint (@)
+{
+  my ($hint, @command) = @_;
+
+  verb "running: @command";
+
+  $! = 0;
+  handle_exec_errors "@command", 0, $hint
+    if system @command;
+}
+
+
+=item C<contents ($file_name)>
+
+Return the contents of C<$file_name>.
+
+=cut
+
+# contents ($FILE_NAME)
+# ---------------------
+sub contents ($)
+{
+  my ($file) = @_;
+  verb "reading $file";
+  local $/;			# Turn on slurp-mode.
+  my $f = new Automake::XFile $file, "<";
+  my $contents = $f->getline;
+  $f->close;
+  return $contents;
+}
+
+
+=item C<dir_has_case_matching_file ($DIRNAME, $FILE_NAME)>
+
+Return true iff $DIR contains a file name that matches $FILE_NAME case
+insensitively.
+
+We need to be cautious on case-insensitive case-preserving file
+systems (e.g. Mac OS X's HFS+).  On such systems C<-f 'Foo'> and C<-f
+'foO'> answer the same thing.  Hence if a package distributes its own
+F<CHANGELOG> file, but has no F<ChangeLog> file, automake would still
+try to distribute F<ChangeLog> (because it thinks it exists) in
+addition to F<CHANGELOG>, although it is impossible for these two
+files to be in the same directory (the two file names designate the
+same file).
+
+=cut
+
+use vars '%_directory_cache';
+sub dir_has_case_matching_file ($$)
+{
+  # Note that print File::Spec->case_tolerant returns 0 even on MacOS
+  # X (with Perl v5.8.1-RC3 at least), so do not try to shortcut this
+  # function using that.
+
+  my ($dirname, $file_name) = @_;
+  return 0 unless -f "$dirname/$file_name";
+
+  # The file appears to exist, however it might be a mirage if the
+  # system is case insensitive.  Let's browse the directory and check
+  # whether the file is really in.  We maintain a cache of directories
+  # so Automake doesn't spend all its time reading the same directory
+  # again and again.
+  if (!exists $_directory_cache{$dirname})
+    {
+      error "failed to open directory '$dirname'"
+	unless opendir (DIR, $dirname);
+      $_directory_cache{$dirname} = { map { $_ => 1 } readdir (DIR) };
+      closedir (DIR);
+    }
+  return exists $_directory_cache{$dirname}{$file_name};
+}
+
+=item C<reset_dir_cache ($dirname)>
+
+Clear C<dir_has_case_matching_file>'s cache for C<$dirname>.
+
+=cut
+
+sub reset_dir_cache ($)
+{
+  delete $_directory_cache{$_[0]};
+}
+
+=item C<set_dir_cache_file ($dirname, $file_name)>
+
+State that C<$dirname> contains C<$file_name> now.
+
+=cut
+
+sub set_dir_cache_file ($$)
+{
+  my ($dirname, $file_name) = @_;
+  $_directory_cache{$dirname}{$file_name} = 1
+    if exists $_directory_cache{$dirname};
+}
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/General.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/General.pm
new file mode 100644
index 0000000..86a367b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/General.pm
@@ -0,0 +1,87 @@
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::General;
+
+use 5.006;
+use strict;
+use Exporter;
+use File::Basename;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (&uniq $me);
+
+# Variable we share with the main package.  Be sure to have a single
+# copy of them: using 'my' together with multiple inclusion of this
+# package would introduce several copies.
+use vars qw ($me);
+$me = basename ($0);
+
+# END
+# ---
+# Exit nonzero whenever closing STDOUT fails.
+sub END
+{
+  # This is required if the code might send any output to stdout
+  # E.g., even --version or --help.  So it's best to do it unconditionally.
+  if (! close STDOUT)
+    {
+      print STDERR "$me: closing standard output: $!\n";
+      $? = 74; # EX_IOERR
+      return;
+    }
+}
+
+
+# @RES
+# uniq (@LIST)
+# ------------
+# Return LIST with no duplicates.
+sub uniq (@)
+{
+   my @res = ();
+   my %seen = ();
+   foreach my $item (@_)
+     {
+       if (! exists $seen{$item})
+	 {
+	   $seen{$item} = 1;
+	   push (@res, $item);
+	 }
+     }
+   return wantarray ? @res : "@res";
+}
+
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Getopt.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Getopt.pm
new file mode 100644
index 0000000..c03b9cf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Getopt.pm
@@ -0,0 +1,115 @@
+# Copyright (C) 2012-2013 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Getopt;
+
+=head1 NAME
+
+Automake::Getopt - GCS conforming parser for command line options
+
+=head1 SYNOPSIS
+
+  use Automake::Getopt;
+
+=head1 DESCRIPTION
+
+Export a function C<parse_options>, performing parsing of command
+line options in conformance to the GNU Coding standards.
+
+=cut
+
+use 5.006;
+use strict;
+use warnings FATAL => 'all';
+use Exporter ();
+use Getopt::Long ();
+use Automake::ChannelDefs qw/fatal/;
+use Carp qw/croak confess/;
+
+use vars qw (@ISA @EXPORT);
+@ISA = qw (Exporter);
+@EXPORT= qw/getopt/;
+
+=item C<parse_options (%option)>
+
+Wrapper around C<Getopt::Long>, trying to conform to the GNU
+Coding Standards for error messages.
+
+=cut
+
+sub parse_options (%)
+{
+  my %option = @_;
+
+  Getopt::Long::Configure ("bundling", "pass_through");
+  # Unrecognized options are passed through, so GetOption can only fail
+  # due to internal errors or misuse of options specification.
+  Getopt::Long::GetOptions (%option)
+    or confess "error in options specification (likely)";
+
+  if (@ARGV && $ARGV[0] =~ /^-./)
+    {
+      my %argopts;
+      for my $k (keys %option)
+	{
+	  if ($k =~ /(.*)=s$/)
+	    {
+	      map { $argopts{(length ($_) == 1)
+			     ? "-$_" : "--$_" } = 1; } (split (/\|/, $1));
+	    }
+	}
+      if ($ARGV[0] eq '--')
+	{
+	  shift @ARGV;
+	}
+      elsif (exists $argopts{$ARGV[0]})
+	{
+	  fatal ("option '$ARGV[0]' requires an argument\n"
+		 . "Try '$0 --help' for more information.");
+	}
+      else
+	{
+	  fatal ("unrecognized option '$ARGV[0]'.\n"
+		 . "Try '$0 --help' for more information.");
+	}
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Getopt::Long>
+
+=cut
+
+1; # for require
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Item.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Item.pm
new file mode 100644
index 0000000..6b699be
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Item.pm
@@ -0,0 +1,206 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Item;
+
+use 5.006;
+use strict;
+use Carp;
+use Automake::ChannelDefs;
+use Automake::DisjConditions;
+
+=head1 NAME
+
+Automake::Item - base class for Automake::Variable and Automake::Rule
+
+=head1 DESCRIPTION
+
+=head2 Methods
+
+=over 4
+
+=item C<new Automake::Item $name>
+
+Create and return an empty Item called C<$name>.
+
+=cut
+
+sub new ($$)
+{
+  my ($class, $name) = @_;
+  my $self = {
+    name => $name,
+    defs => {},
+    conds => {},
+  };
+  bless $self, $class;
+  return $self;
+}
+
+=item C<$item-E<gt>name>
+
+Return the name of C<$item>.
+
+=cut
+
+sub name ($)
+{
+  my ($self) = @_;
+  return $self->{'name'};
+}
+
+=item C<$item-E<gt>def ($cond)>
+
+Return the definition for this item in condition C<$cond>, if it
+exists.  Return 0 otherwise.
+
+=cut
+
+sub def ($$)
+{
+  # This method is called very often, so keep it small and fast.  We
+  # don't mind the extra undefined items introduced by lookup failure;
+  # avoiding this with 'exists' means doing two hash lookup on
+  # success, and proved worse on benchmark.
+  my $def = $_[0]->{'defs'}{$_[1]};
+  return defined $def && $def;
+}
+
+=item C<$item-E<gt>rdef ($cond)>
+
+Return the definition for this item in condition C<$cond>.  Abort with
+an internal error if the item was not defined under this condition.
+
+The I<r> in front of C<def> stands for I<required>.  One
+should call C<rdef> to assert the conditional definition's existence.
+
+=cut
+
+sub rdef ($$)
+{
+  my ($self, $cond) = @_;
+  my $d = $self->def ($cond);
+  prog_error ("undefined condition '" . $cond->human . "' for '"
+	      . $self->name . "'\n" . $self->dump)
+    unless $d;
+  return $d;
+}
+
+=item C<$item-E<gt>set ($cond, $def)>
+
+Add a new definition to an existing item.
+
+=cut
+
+sub set ($$$)
+{
+  my ($self, $cond, $def) = @_;
+  $self->{'defs'}{$cond} = $def;
+  $self->{'conds'}{$cond} = $cond;
+}
+
+=item C<$var-E<gt>conditions>
+
+Return an L<Automake::DisjConditions> describing the conditions that
+that an item is defined in.
+
+These are all the conditions for which is would be safe to call
+C<rdef>.
+
+=cut
+
+sub conditions ($)
+{
+  my ($self) = @_;
+  prog_error ("self is not a reference")
+    unless ref $self;
+  return new Automake::DisjConditions (values %{$self->{'conds'}});
+}
+
+=item C<@missing_conds = $var-E<gt>not_always_defined_in_cond ($cond)>
+
+Check whether C<$var> is always defined for condition C<$cond>.
+Return a list of conditions where the definition is missing.
+
+For instance, given
+
+  if COND1
+    if COND2
+      A = foo
+      D = d1
+    else
+      A = bar
+      D = d2
+    endif
+  else
+    D = d3
+  endif
+  if COND3
+    A = baz
+    B = mumble
+  endif
+  C = mumble
+
+we should have (we display result as conditional strings in this
+illustration, but we really return DisjConditions objects):
+
+  var ('A')->not_always_defined_in_cond ('COND1_TRUE COND2_TRUE')
+    => ()
+  var ('A')->not_always_defined_in_cond ('COND1_TRUE')
+    => ()
+  var ('A')->not_always_defined_in_cond ('TRUE')
+    => ("COND1_FALSE COND3_FALSE")
+  var ('B')->not_always_defined_in_cond ('COND1_TRUE')
+    => ("COND1_TRUE COND3_FALSE")
+  var ('C')->not_always_defined_in_cond ('COND1_TRUE')
+    => ()
+  var ('D')->not_always_defined_in_cond ('TRUE')
+    => ()
+  var ('Z')->not_always_defined_in_cond ('TRUE')
+    => ("TRUE")
+
+=cut
+
+sub not_always_defined_in_cond ($$)
+{
+  my ($self, $cond) = @_;
+
+  # Compute the subconditions where $var isn't defined.
+  return
+    $self->conditions
+      ->sub_conditions ($cond)
+	->invert
+	  ->multiply ($cond);
+}
+
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ItemDef.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ItemDef.pm
new file mode 100644
index 0000000..416ae12
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/ItemDef.pm
@@ -0,0 +1,113 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::ItemDef;
+
+use 5.006;
+use strict;
+use Carp;
+
+=head1 NAME
+
+Automake::ItemDef - base class for Automake::VarDef and Automake::RuleDef
+
+=head1 DESCRIPTION
+
+=head2 Methods
+
+=over 4
+
+=item C<my $def = Automake::new ($comment, $location, $owner)>
+
+Create a new Makefile-item definition.
+
+C<$comment> is any comment preceding the definition.  (Because
+Automake reorders items in the output, it also tries to carry comments
+around.)
+
+C<$location> is the place where the definition occurred, it should be
+an instance of L<Automake::Location>.
+
+C<$owner> specifies who owns the rule.
+
+=cut
+
+sub new ($$$$)
+{
+  my ($class, $comment, $location, $owner) = @_;
+
+  my $self = {
+    comment => $comment,
+    location => $location,
+    owner => $owner,
+  };
+  bless $self, $class;
+
+  return $self;
+}
+
+=item C<$def-E<gt>comment>
+
+=item C<$def-E<gt>location>
+
+=item C<$def-E<gt>owner>
+
+Accessors to the various constituents of an C<ItemDef>.  See the
+documentation of C<new>'s arguments for a description of these.
+
+=cut
+
+sub comment ($)
+{
+  my ($self) = @_;
+  return $self->{'comment'};
+}
+
+sub location ($)
+{
+  my ($self) = @_;
+  return $self->{'location'};
+}
+
+sub owner ($)
+{
+  my ($self) = @_;
+  return $self->{'owner'};
+}
+
+=head1 SEE ALSO
+
+L<Automake::VarDef>, and L<Automake::RuleDef>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Language.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Language.pm
new file mode 100644
index 0000000..a678e1e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Language.pm
@@ -0,0 +1,122 @@
+# Copyright (C) 2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Language;
+
+use 5.006;
+use strict;
+
+use Class::Struct ();
+Class::Struct::struct (
+	# Short name of the language (c, f77...).
+	'name' => "\$",
+	# Nice name of the language (C, Fortran 77...).
+	'Name' => "\$",
+
+	# List of configure variables which must be defined.
+	'config_vars' => '@',
+
+	# 'pure' is '1' or ''.  A 'pure' language is one where, if
+	# all the files in a directory are of that language, then we
+	# do not require the C compiler or any code to call it.
+	'pure'   => "\$",
+
+	'autodep' => "\$",
+
+	# Name of the compiling variable (COMPILE).
+	'compiler'  => "\$",
+	# Content of the compiling variable.
+	'compile'  => "\$",
+	# Flag to require compilation without linking (-c).
+	'compile_flag' => "\$",
+	'extensions' => '@',
+	# A subroutine to compute a list of possible extensions of
+	# the product given the input extensions.
+	# (defaults to a subroutine which returns ('.$(OBJEXT)', '.lo'))
+	'output_extensions' => "\$",
+	# A list of flag variables used in 'compile'.
+	# (defaults to [])
+	'flags' => "@",
+
+	# Any tag to pass to libtool while compiling.
+	'libtool_tag' => "\$",
+
+	# The file to use when generating rules for this language.
+	# The default is 'depend2'.
+	'rule_file' => "\$",
+
+	# Name of the linking variable (LINK).
+	'linker' => "\$",
+	# Content of the linking variable.
+	'link' => "\$",
+
+	# Name of the compiler variable (CC).
+	'ccer' => "\$",
+
+	# Name of the linker variable (LD).
+	'lder' => "\$",
+	# Content of the linker variable ($(CC)).
+	'ld' => "\$",
+
+	# Flag to specify the output file (-o).
+	'output_flag' => "\$",
+	'_finish' => "\$",
+
+	# This is a subroutine which is called whenever we finally
+	# determine the context in which a source file will be
+	# compiled.
+	'_target_hook' => "\$",
+
+	# If TRUE, nodist_ sources will be compiled using specific rules
+	# (i.e. not inference rules).  The default is FALSE.
+	'nodist_specific' => "\$");
+
+
+sub finish ($)
+{
+  my ($self) = @_;
+  if (defined $self->_finish)
+    {
+      &{$self->_finish} (@_);
+    }
+}
+
+sub target_hook ($$$$%)
+{
+    my ($self) = @_;
+    if (defined $self->_target_hook)
+    {
+	$self->_target_hook->(@_);
+    }
+}
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Location.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Location.pm
new file mode 100644
index 0000000..1a9bc6f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Location.pm
@@ -0,0 +1,279 @@
+# Copyright (C) 2002-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Location;
+
+use 5.006;
+
+=head1 NAME
+
+Automake::Location - a class for location tracking, with a stack of contexts
+
+=head1 SYNOPSIS
+
+  use Automake::Location;
+
+  # Create a new Location object
+  my $where = new Automake::Location "foo.c:13";
+
+  # Change the location
+  $where->set ("foo.c:14");
+
+  # Get the location (without context).
+  # Here this should print "foo.c:14"
+  print $where->get, "\n";
+
+  # Push a context, and change the location
+  $where->push_context ("included from here");
+  $where->set ("bar.h:1");
+
+  # Print the location and the stack of context (for debugging)
+  print $where->dump;
+  # This should display
+  #   bar.h:1:
+  #   foo.c:14:   included from here
+
+  # Get the contexts (list of [$location_string, $description])
+  for my $pair (reverse $where->contexts)
+    {
+      my ($loc, $descr) = @{$pair};
+      ...
+    }
+
+  # Pop a context, and reset the location to the previous context.
+  $where->pop_context;
+
+  # Clone a Location.  Use this when storing the state of a location
+  # that would otherwise be modified.
+  my $where_copy = $where->clone;
+
+  # Serialize a Location object (for passing through a thread queue,
+  # for example)
+  my @array = $where->serialize ();
+
+  # De-serialize: recreate a Location object from a queue.
+  my $where = new Automake::Location::deserialize ($queue);
+
+=head1 DESCRIPTION
+
+C<Location> objects are used to keep track of locations in Automake,
+and used to produce diagnostics.
+
+A C<Location> object is made of two parts: a location string, and
+a stack of contexts.
+
+For instance if C<VAR> is defined at line 1 in F<bar.h> which was
+included at line 14 in F<foo.c>, then the location string should be
+C<"bar.h:10"> and the context should be the pair (C<"foo.c:14">,
+C<"included from here">).
+
+Section I<SYNOPSIS> shows how to setup such a C<Location>, and access
+the location string or the stack of contexts.
+
+You can pass a C<Location> to C<Automake::Channels::msg>.
+
+=cut
+
+=head2 Methods
+
+=over
+
+=item C<$where = new Automake::Location ([$position])>
+
+Create and return a new Location object.
+
+=cut
+
+sub new ($;$)
+{
+  my ($class, $position) = @_;
+  my $self = {
+    position => $position,
+    contexts => [],
+  };
+  bless $self, $class;
+  return $self;
+}
+
+=item C<$location-E<gt>set ($position)>
+
+Change the location to be C<$position>.
+
+=cut
+
+sub set ($$)
+{
+  my ($self, $position) = @_;
+  $self->{'position'} = $position;
+}
+
+=item C<$location-E<gt>get>
+
+Get the location (without context).
+
+=cut
+
+sub get ($)
+{
+  my ($self) = @_;
+  return $self->{'position'};
+}
+
+=item C<$location-E<gt>push_context ($context)>
+
+Push a context to the location.
+
+=cut
+
+sub push_context ($$)
+{
+  my ($self, $context) = @_;
+  push @{$self->{'contexts'}}, [$self->get, $context];
+  $self->set (undef);
+}
+
+=item C<$where = $location-E<gt>pop_context ($context)>
+
+Pop a context, and reset the location to the previous context.
+
+=cut
+
+sub pop_context ($)
+{
+  my ($self) = @_;
+  my $pair = pop @{$self->{'contexts'}};
+  $self->set ($pair->[0]);
+  return @{$pair};
+}
+
+=item C<@contexts = $location-E<gt>get_contexts>
+
+Return the array of contexts.
+
+=cut
+
+sub get_contexts ($)
+{
+  my ($self) = @_;
+  return @{$self->{'contexts'}};
+}
+
+=item C<$location = $location-E<gt>clone>
+
+Clone a Location.  Use this when storing the state of a location
+that would otherwise be modified.
+
+=cut
+
+sub clone ($)
+{
+  my ($self) = @_;
+  my $other = new Automake::Location ($self->get);
+  my @contexts = $self->get_contexts;
+  for my $pair (@contexts)
+    {
+      push @{$other->{'contexts'}}, [@{$pair}];
+    }
+  return $other;
+}
+
+=item C<$res = $location-E<gt>dump>
+
+Print the location and the stack of context (for debugging).
+
+=cut
+
+sub dump ($)
+{
+  my ($self) = @_;
+  my $res = ($self->get || 'INTERNAL') . ":\n";
+  for my $pair (reverse $self->get_contexts)
+    {
+      $res .= $pair->[0] || 'INTERNAL';
+      $res .= ": $pair->[1]\n";
+    }
+  return $res;
+}
+
+=item C<@array = $location-E<gt>serialize>
+
+Serialize a Location object (for passing through a thread queue,
+for example).
+
+=cut
+
+sub serialize ($)
+{
+  my ($self) = @_;
+  my @serial = ();
+  push @serial, $self->get;
+  my @contexts = $self->get_contexts;
+  for my $pair (@contexts)
+    {
+      push @serial, @{$pair};
+    }
+  push @serial, undef;
+  return @serial;
+}
+
+=item C<new Automake::Location::deserialize ($queue)>
+
+De-serialize: recreate a Location object from a queue.
+
+=cut
+
+sub deserialize ($)
+{
+  my ($queue) = @_;
+  my $position = $queue->dequeue ();
+  my $self = new Automake::Location $position;
+  while (my $position = $queue->dequeue ())
+    {
+      my $context = $queue->dequeue ();
+      push @{$self->{'contexts'}}, [$position, $context];
+    }
+  return $self;
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::Channels>
+
+=head1 HISTORY
+
+Written by Alexandre Duret-Lutz E<lt>F<adl@gnu.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Options.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Options.pm
new file mode 100644
index 0000000..fab33f3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Options.pm
@@ -0,0 +1,476 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Options;
+
+use 5.006;
+use strict;
+use Exporter;
+use Automake::Config;
+use Automake::ChannelDefs;
+use Automake::Channels;
+use Automake::Version;
+
+use vars qw (@ISA @EXPORT);
+
+@ISA = qw (Exporter);
+@EXPORT = qw (option global_option
+              set_option set_global_option
+              unset_option unset_global_option
+              process_option_list process_global_option_list
+              set_strictness $strictness $strictness_name
+              &FOREIGN &GNU &GNITS);
+
+=head1 NAME
+
+Automake::Options - keep track of Automake options
+
+=head1 SYNOPSIS
+
+  use Automake::Options;
+
+  # Option lookup and setting.
+  $opt = option 'name';
+  $opt = global_option 'name';
+  set_option 'name', 'value';
+  set_global_option 'name', 'value';
+  unset_option 'name';
+  unset_global_option 'name';
+
+  # Batch option setting.
+  process_option_list $location, @names;
+  process_global_option_list $location, @names;
+
+  # Strictness lookup and setting.
+  set_strictness 'foreign';
+  set_strictness 'gnu';
+  set_strictness 'gnits';
+  if ($strictness >= GNU) { ... }
+  print "$strictness_name\n";
+
+=head1 DESCRIPTION
+
+This packages manages Automake's options and strictness settings.
+Options can be either local or global.  Local options are set using an
+C<AUTOMAKE_OPTIONS> variable in a F<Makefile.am> and apply only to
+this F<Makefile.am>.  Global options are set from the command line or
+passed as an argument to C<AM_INIT_AUTOMAKE>, they apply to all
+F<Makefile.am>s.
+
+=cut
+
+# Values are the Automake::Location of the definition.
+use vars '%_options';        # From AUTOMAKE_OPTIONS
+use vars '%_global_options'; # From AM_INIT_AUTOMAKE or the command line.
+
+# Whether process_option_list has already been called for the current
+# Makefile.am.
+use vars '$_options_processed';
+# Whether process_global_option_list has already been called.
+use vars '$_global_options_processed';
+
+=head2 Constants
+
+=over 4
+
+=item FOREIGN
+
+=item GNU
+
+=item GNITS
+
+Strictness constants used as values for C<$strictness>.
+
+=back
+
+=cut
+
+# Constants to define the "strictness" level.
+use constant FOREIGN => 0;
+use constant GNU     => 1;
+use constant GNITS   => 2;
+
+=head2 Variables
+
+=over 4
+
+=item C<$strictness>
+
+The current strictness.  One of C<FOREIGN>, C<GNU>, or C<GNITS>.
+
+=item C<$strictness_name>
+
+The current strictness name.  One of C<'foreign'>, C<'gnu'>, or C<'gnits'>.
+
+=back
+
+=cut
+
+# Strictness levels.
+use vars qw ($strictness $strictness_name);
+
+# Strictness level as set on command line.
+use vars qw ($_default_strictness $_default_strictness_name);
+
+
+=head2 Functions
+
+=over 4
+
+=item C<Automake::Options::reset>
+
+Reset the options variables for the next F<Makefile.am>.
+
+In other words, this gets rid of all local options in use by the
+previous F<Makefile.am>.
+
+=cut
+
+sub reset ()
+{
+  $_options_processed = 0;
+  %_options = %_global_options;
+  # The first time we are run,
+  # remember the current setting as the default.
+  if (defined $_default_strictness)
+    {
+      $strictness = $_default_strictness;
+      $strictness_name = $_default_strictness_name;
+    }
+  else
+    {
+      $_default_strictness = $strictness;
+      $_default_strictness_name = $strictness_name;
+    }
+}
+
+=item C<$value = option ($name)>
+
+=item C<$value = global_option ($name)>
+
+Query the state of an option.  If the option is unset, this
+returns the empty list.  Otherwise it returns the option's value,
+as set by C<set_option> or C<set_global_option>.
+
+Note that C<global_option> should be used only when it is
+important to make sure an option hasn't been set locally.
+Otherwise C<option> should be the standard function to
+check for options (be they global or local).
+
+=cut
+
+sub option ($)
+{
+  my ($name) = @_;
+  return () unless defined $_options{$name};
+  return $_options{$name};
+}
+
+sub global_option ($)
+{
+  my ($name) = @_;
+  return () unless defined $_global_options{$name};
+  return $_global_options{$name};
+}
+
+=item C<set_option ($name, $value)>
+
+=item C<set_global_option ($name, $value)>
+
+Set an option.  By convention, C<$value> is usually the location
+of the option definition.
+
+=cut
+
+sub set_option ($$)
+{
+  my ($name, $value) = @_;
+  $_options{$name} = $value;
+}
+
+sub set_global_option ($$)
+{
+  my ($name, $value) = @_;
+  $_global_options{$name} = $value;
+}
+
+
+=item C<unset_option ($name)>
+
+=item C<unset_global_option ($name)>
+
+Unset an option.
+
+=cut
+
+sub unset_option ($)
+{
+  my ($name) = @_;
+  delete $_options{$name};
+}
+
+sub unset_global_option ($)
+{
+  my ($name) = @_;
+  delete $_global_options{$name};
+}
+
+
+=item C<process_option_list (@list)>
+
+=item C<process_global_option_list (@list)>
+
+Process Automake's option lists.  C<@list> should be a list of hash
+references with keys C<option> and C<where>, where C<option> is an
+option as they occur in C<AUTOMAKE_OPTIONS> or C<AM_INIT_AUTOMAKE>,
+and C<where> is the location where that option occurred.
+
+These functions should be called at most once for each set of options
+having the same precedence; i.e., do not call it twice for two options
+from C<AM_INIT_AUTOMAKE>.
+
+Return 0 on error, 1 otherwise.
+
+=cut
+
+# $BOOL
+# _option_is_from_configure ($OPTION, $WHERE)
+# ----------------------------------------------
+# Check that the $OPTION given in location $WHERE is specified with
+# AM_INIT_AUTOMAKE, not with AUTOMAKE_OPTIONS.
+sub _option_is_from_configure ($$)
+{
+  my ($opt, $where)= @_;
+  return 1
+    if $where->get =~ /^configure\./;
+  error $where,
+        "option '$opt' can only be used as argument to AM_INIT_AUTOMAKE\n" .
+        "but not in AUTOMAKE_OPTIONS makefile statements";
+  return 0;
+}
+
+# $BOOL
+# _is_valid_easy_option ($OPTION)
+# -------------------------------
+# Explicitly recognize valid automake options that require no
+# special handling by '_process_option_list' below.
+sub _is_valid_easy_option ($)
+{
+  my $opt = shift;
+  return scalar grep { $opt eq $_ } qw(
+    check-news
+    color-tests
+    dejagnu
+    dist-bzip2
+    dist-lzip
+    dist-xz
+    dist-zip
+    info-in-builddir
+    no-define
+    no-dependencies
+    no-dist
+    no-dist-gzip
+    no-exeext
+    no-installinfo
+    no-installman
+    no-texinfo.tex
+    nostdinc
+    readme-alpha
+    serial-tests
+    parallel-tests
+    silent-rules
+    std-options
+    subdir-objects
+  );
+}
+
+# $BOOL
+# _process_option_list (\%OPTIONS, @LIST)
+# ------------------------------------------
+# Process a list of options.  \%OPTIONS is the hash to fill with options
+# data.  @LIST is a list of options as get passed to public subroutines
+# process_option_list() and process_global_option_list() (see POD
+# documentation above).
+sub _process_option_list (\%@)
+{
+  my ($options, @list) = @_;
+  my @warnings = ();
+  my $ret = 1;
+
+  foreach my $h (@list)
+    {
+      local $_ = $h->{'option'};
+      my $where = $h->{'where'};
+      $options->{$_} = $where;
+      if ($_ eq 'gnits' || $_ eq 'gnu' || $_ eq 'foreign')
+        {
+          set_strictness ($_);
+        }
+      # TODO: Remove this special check in Automake 3.0.
+      elsif (/^(.*\/)?ansi2knr$/)
+        {
+          # Obsolete (and now removed) de-ANSI-fication support.
+          error ($where,
+                 "automatic de-ANSI-fication support has been removed");
+          $ret = 0;
+        }
+      # TODO: Remove this special check in Automake 3.0.
+      elsif ($_ eq 'cygnus')
+        {
+          error $where, "support for Cygnus-style trees has been removed";
+          $ret = 0;
+        }
+      # TODO: Remove this special check in Automake 3.0.
+      elsif ($_ eq 'dist-lzma')
+        {
+          error ($where, "support for lzma-compressed distribution " .
+                         "archives has been removed");
+          $ret = 0;
+        }
+      # TODO: Make this a fatal error in Automake 2.0.
+      elsif ($_ eq 'dist-shar')
+        {
+          msg ('obsolete', $where,
+               "support for shar distribution archives is deprecated.\n" .
+               "  It will be removed in Automake 2.0");
+        }
+      # TODO: Make this a fatal error in Automake 2.0.
+      elsif ($_ eq 'dist-tarZ')
+        {
+          msg ('obsolete', $where,
+               "support for distribution archives compressed with " .
+               "legacy program 'compress' is deprecated.\n" .
+               "  It will be removed in Automake 2.0");
+        }
+      elsif (/^filename-length-max=(\d+)$/)
+        {
+          delete $options->{$_};
+          $options->{'filename-length-max'} = [$_, $1];
+        }
+      elsif ($_ eq 'tar-v7' || $_ eq 'tar-ustar' || $_ eq 'tar-pax')
+        {
+          if (not _option_is_from_configure ($_, $where))
+            {
+              $ret = 0;
+            }
+          for my $opt ('tar-v7', 'tar-ustar', 'tar-pax')
+            {
+              next
+                if $opt eq $_ or ! exists $options->{$opt};
+              error ($where,
+                     "options '$_' and '$opt' are mutually exclusive");
+              $ret = 0;
+            }
+        }
+      elsif (/^\d+\.\d+(?:\.\d+)?[a-z]?(?:-[A-Za-z0-9]+)?$/)
+        {
+          # Got a version number.
+          if (Automake::Version::check ($VERSION, $&))
+            {
+              error ($where, "require Automake $_, but have $VERSION");
+              $ret = 0;
+            }
+        }
+      elsif (/^(?:--warnings=|-W)(.*)$/)
+        {
+          my @w = map { { cat => $_, loc => $where} } split (',', $1);
+          push @warnings, @w;
+        }
+      elsif (! _is_valid_easy_option $_)
+        {
+          error ($where, "option '$_' not recognized");
+          $ret = 0;
+        }
+    }
+
+  # We process warnings here, so that any explicitly-given warning setting
+  # will take precedence over warning settings defined implicitly by the
+  # strictness.
+  foreach my $w (@warnings)
+    {
+      msg 'unsupported', $w->{'loc'},
+          "unknown warning category '$w->{'cat'}'"
+        if switch_warning $w->{cat};
+    }
+
+  return $ret;
+}
+
+sub process_option_list (@)
+{
+  prog_error "local options already processed"
+    if $_options_processed;
+  $_options_processed = 1;
+  _process_option_list (%_options, @_);
+}
+
+sub process_global_option_list (@)
+{
+  prog_error "global options already processed"
+    if $_global_options_processed;
+  $_global_options_processed = 1;
+  _process_option_list (%_global_options, @_);
+}
+
+=item C<set_strictness ($name)>
+
+Set the current strictness level.
+C<$name> should be one of C<'foreign'>, C<'gnu'>, or C<'gnits'>.
+
+=cut
+
+# Set strictness.
+sub set_strictness ($)
+{
+  $strictness_name = $_[0];
+
+  Automake::ChannelDefs::set_strictness ($strictness_name);
+
+  if ($strictness_name eq 'gnu')
+    {
+      $strictness = GNU;
+    }
+  elsif ($strictness_name eq 'gnits')
+    {
+      $strictness = GNITS;
+    }
+  elsif ($strictness_name eq 'foreign')
+    {
+      $strictness = FOREIGN;
+    }
+  else
+    {
+      prog_error "level '$strictness_name' not recognized";
+    }
+}
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Rule.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Rule.pm
new file mode 100644
index 0000000..a28a78d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Rule.pm
@@ -0,0 +1,879 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Rule;
+
+use 5.006;
+use strict;
+use Carp;
+
+use Automake::Item;
+use Automake::RuleDef;
+use Automake::ChannelDefs;
+use Automake::Channels;
+use Automake::Options;
+use Automake::Condition qw (TRUE FALSE);
+use Automake::DisjConditions;
+require Exporter;
+use vars '@ISA', '@EXPORT', '@EXPORT_OK';
+@ISA = qw/Automake::Item Exporter/;
+@EXPORT = qw (reset register_suffix_rule next_in_suffix_chain
+	      suffixes rules $KNOWN_EXTENSIONS_PATTERN
+	      depend %dependencies %actions register_action
+	      accept_extensions
+	      reject_rule msg_rule msg_cond_rule err_rule err_cond_rule
+	      rule rrule ruledef rruledef);
+
+=head1 NAME
+
+Automake::Rule - support for rules definitions
+
+=head1 SYNOPSIS
+
+  use Automake::Rule;
+  use Automake::RuleDef;
+
+
+=head1 DESCRIPTION
+
+This package provides support for Makefile rule definitions.
+
+An C<Automake::Rule> is a rule name associated to possibly
+many conditional definitions.  These definitions are instances
+of C<Automake::RuleDef>.
+
+Therefore obtaining the value of a rule under a given
+condition involves two lookups.  One to look up the rule,
+and one to look up the conditional definition:
+
+  my $rule = rule $name;
+  if ($rule)
+    {
+      my $def = $rule->def ($cond);
+      if ($def)
+	{
+	  return $def->location;
+	}
+      ...
+    }
+  ...
+
+when it is known that the rule and the definition
+being looked up exist, the above can be simplified to
+
+  return rule ($name)->def ($cond)->location; # do not write this.
+
+but is better written
+
+  return rrule ($name)->rdef ($cond)->location;
+
+or even
+
+  return rruledef ($name, $cond)->location;
+
+The I<r> variants of the C<rule>, C<def>, and C<ruledef> methods add
+an extra test to ensure that the lookup succeeded, and will diagnose
+failures as internal errors (with a message which is much more
+informative than Perl's warning about calling a method on a
+non-object).
+
+=head2 Global variables
+
+=over 4
+
+=cut
+
+my $_SUFFIX_RULE_PATTERN =
+  '^(\.[a-zA-Z0-9_(){}$+@\-]+)(\.[a-zA-Z0-9_(){}$+@\-]+)' . "\$";
+
+my @_suffixes = ();
+my @_known_extensions_list = ();
+my %_rule_dict = ();
+
+# See comments in the implementation of the 'next_in_suffix_chain()'
+# variable for details.
+my %_suffix_rules;
+
+# Same as $suffix_rules, but records only the default rules
+# supplied by the languages Automake supports.
+my %_suffix_rules_builtin;
+
+=item C<%dependencies>
+
+Holds the dependencies of targets which dependencies are factored.
+Typically, C<.PHONY> will appear in plenty of F<*.am> files, but must
+be output once.  Arguably all pure dependencies could be subject to
+this factoring, but it is not unpleasant to have paragraphs in
+Makefile: keeping related stuff altogether.
+
+=cut
+
+use vars '%dependencies';
+
+=item <%actions>
+
+Holds the factored actions.  Tied to C<%dependencies>, i.e., filled
+only when keys exists in C<%dependencies>.
+
+=cut
+
+use vars '%actions';
+
+=item C<$KNOWN_EXTENSIONS_PATTERN>
+
+Pattern that matches all know input extensions (i.e. extensions used
+by the languages supported by Automake).  Using this pattern (instead
+of '\..*$') to match extensions allows Automake to support dot-less
+extensions.
+
+New extensions should be registered with C<accept_extensions>.
+
+=cut
+
+use vars qw ($KNOWN_EXTENSIONS_PATTERN);
+$KNOWN_EXTENSIONS_PATTERN = "";
+
+=back
+
+=head2 Error reporting functions
+
+In these functions, C<$rule> can be either a rule name, or
+an instance of C<Automake::Rule>.
+
+=over 4
+
+=item C<err_rule ($rule, $message, [%options])>
+
+Uncategorized errors about rules.
+
+=cut
+
+sub err_rule ($$;%)
+{
+  msg_rule ('error', @_);
+}
+
+=item C<err_cond_rule ($cond, $rule, $message, [%options])>
+
+Uncategorized errors about conditional rules.
+
+=cut
+
+sub err_cond_rule ($$$;%)
+{
+  msg_cond_rule ('error', @_);
+}
+
+=item C<msg_cond_rule ($channel, $cond, $rule, $message, [%options])>
+
+Messages about conditional rules.
+
+=cut
+
+sub msg_cond_rule ($$$$;%)
+{
+  my ($channel, $cond, $rule, $msg, %opts) = @_;
+  my $r = ref ($rule) ? $rule : rrule ($rule);
+  msg $channel, $r->rdef ($cond)->location, $msg, %opts;
+}
+
+=item C<msg_rule ($channel, $targetname, $message, [%options])>
+
+Messages about rules.
+
+=cut
+
+sub msg_rule ($$$;%)
+{
+  my ($channel, $rule, $msg, %opts) = @_;
+  my $r = ref ($rule) ? $rule : rrule ($rule);
+  # Don't know which condition is concerned.  Pick any.
+  my $cond = $r->conditions->one_cond;
+  msg_cond_rule ($channel, $cond, $r, $msg, %opts);
+}
+
+
+=item C<$bool = reject_rule ($rule, $error_msg)>
+
+Bail out with C<$error_msg> if a rule with name C<$rule> has been
+defined.
+
+Return true iff C<$rule> is defined.
+
+=cut
+
+sub reject_rule ($$)
+{
+  my ($rule, $msg) = @_;
+  if (rule ($rule))
+    {
+      err_rule $rule, $msg;
+      return 1;
+    }
+  return 0;
+}
+
+=back
+
+=head2 Administrative functions
+
+=over 4
+
+=item C<accept_extensions (@exts)>
+
+Update C<$KNOWN_EXTENSIONS_PATTERN> to recognize the extensions
+listed in C<@exts>.  Extensions should contain a dot if needed.
+
+=cut
+
+sub accept_extensions (@)
+{
+    push @_known_extensions_list, @_;
+    $KNOWN_EXTENSIONS_PATTERN =
+	'(?:' . join ('|', map (quotemeta, @_known_extensions_list)) . ')';
+}
+
+=item C<rules>
+
+Return the list of all L<Automake::Rule> instances.  (I.e., all
+rules defined so far.)
+
+=cut
+
+sub rules ()
+{
+  return values %_rule_dict;
+}
+
+
+=item C<register_action($target, $action)>
+
+Append the C<$action> to C<$actions{$target}> taking care of special
+cases.
+
+=cut
+
+sub register_action ($$)
+{
+  my ($target, $action) = @_;
+  if ($actions{$target})
+    {
+      $actions{$target} .= "\n$action" if $action;
+    }
+  else
+    {
+      $actions{$target} = $action;
+    }
+}
+
+
+=item C<Automake::Rule::reset>
+
+The I<forget all> function.  Clears all known rules and resets some
+other internal data.
+
+=cut
+
+sub reset()
+{
+  %_rule_dict = ();
+  @_suffixes = ();
+  %_suffix_rules = %_suffix_rules_builtin;
+
+  %dependencies =
+    (
+     # Texinfoing.
+     'dvi'      => [],
+     'dvi-am'   => [],
+     'pdf'      => [],
+     'pdf-am'   => [],
+     'ps'       => [],
+     'ps-am'    => [],
+     'info'     => [],
+     'info-am'  => [],
+     'html'     => [],
+     'html-am'  => [],
+
+     # Installing/uninstalling.
+     'install-data-am'      => [],
+     'install-exec-am'      => [],
+     'uninstall-am'         => [],
+
+     'install-man'	    => [],
+     'uninstall-man'	    => [],
+
+     'install-dvi'          => [],
+     'install-dvi-am'       => [],
+     'install-html'         => [],
+     'install-html-am'      => [],
+     'install-info'         => [],
+     'install-info-am'      => [],
+     'install-pdf'          => [],
+     'install-pdf-am'       => [],
+     'install-ps'           => [],
+     'install-ps-am'        => [],
+
+     'installcheck-am'      => [],
+
+     # Cleaning.
+     'clean-am'             => [],
+     'mostlyclean-am'       => [],
+     'maintainer-clean-am'  => [],
+     'distclean-am'         => [],
+     'clean'                => [],
+     'mostlyclean'          => [],
+     'maintainer-clean'     => [],
+     'distclean'            => [],
+
+     # Tarballing.
+     'dist-all'             => [],
+
+     # Phonying.
+     '.PHONY'               => [],
+     # Recursive install targets (so "make -n install" works for BSD Make).
+     '.MAKE'		    => [],
+     );
+  %actions = ();
+}
+
+=item C<next_in_suffix_chain ($ext1, $ext2)>
+
+Return the target suffix for the next rule to use to reach C<$ext2>
+from C<$ext1>, or C<undef> if no such rule exists.
+
+=cut
+
+sub next_in_suffix_chain ($$)
+{
+  my ($ext1, $ext2) = @_;
+  return undef unless (exists $_suffix_rules{$ext1} and
+                       exists $_suffix_rules{$ext1}{$ext2});
+  return $_suffix_rules{$ext1}{$ext2}[0];
+}
+
+=item C<register_suffix_rule ($where, $src, $dest)>
+
+Register a suffix rule defined on C<$where> that transforms
+files ending in C<$src> into files ending in C<$dest>.
+
+=cut
+
+sub register_suffix_rule ($$$)
+{
+  my ($where, $src, $dest) = @_;
+  my $suffix_rules = $where->{'position'} ? \%_suffix_rules
+                                          : \%_suffix_rules_builtin;
+
+  verb "Sources ending in $src become $dest";
+  push @_suffixes, $src, $dest;
+
+  # When transforming sources to objects, Automake uses the
+  # %suffix_rules to move from each source extension to
+  # '.$(OBJEXT)', not to '.o' or '.obj'.  However some people
+  # define suffix rules for '.o' or '.obj', so internally we will
+  # consider these extensions equivalent to '.$(OBJEXT)'.  We
+  # CANNOT rewrite the target (i.e., automagically replace '.o'
+  # and '.obj' by '.$(OBJEXT)' in the output), or warn the user
+  # that (s)he'd better use '.$(OBJEXT)', because Automake itself
+  # output suffix rules for '.o' or '.obj' ...
+  $dest = '.$(OBJEXT)' if ($dest eq '.o' || $dest eq '.obj');
+
+  # ----------------------------------------------------------------------
+  # The $suffix_rules variable maps the source extension for all suffix
+  # rules seen to a hash whose keys are the possible output extensions.
+  #
+  # Note that this is transitively closed by construction:
+  # if we have
+  #
+  #       exists $suffix_rules{$ext1}{$ext2}
+  #    && exists $suffix_rules{$ext2}{$ext3}
+  #
+  # then we also have
+  #
+  #       exists $suffix_rules{$ext1}{$ext3}
+  #
+  # So it's easy to check whether '.foo' can be transformed to
+  # '.$(OBJEXT)' by checking whether $suffix_rules{'.foo'}{'.$(OBJEXT)'}
+  # exists.  This will work even if transforming '.foo' to '.$(OBJEXT)'
+  # involves a chain of several suffix rules.
+  #
+  # The value of $suffix_rules{$ext1}{$ext2} is a pair [$next_sfx, $dist]
+  # where $next_sfx is target suffix for the next rule to use to reach
+  # $ext2, and $dist the distance to $ext2.
+  # ----------------------------------------------------------------------
+
+  # Register $dest as a possible destination from $src.
+  # We might have the create the \hash.
+  if (exists $suffix_rules->{$src})
+    {
+      $suffix_rules->{$src}{$dest} = [ $dest, 1 ];
+    }
+  else
+    {
+      $suffix_rules->{$src} = { $dest => [ $dest, 1 ] };
+    }
+
+  # If we know how to transform $dest in something else, then
+  # we know how to transform $src in that "something else".
+  if (exists $suffix_rules->{$dest})
+    {
+      for my $dest2 (keys %{$suffix_rules->{$dest}})
+	{
+	  my $dist = $suffix_rules->{$dest}{$dest2}[1] + 1;
+	  # Overwrite an existing $src->$dest2 path only if
+	  # the path via $dest which is shorter.
+	  if (! exists $suffix_rules->{$src}{$dest2}
+	      || $suffix_rules->{$src}{$dest2}[1] > $dist)
+	    {
+	      $suffix_rules->{$src}{$dest2} = [ $dest, $dist ];
+	    }
+	}
+    }
+
+  # Similarly, any extension that can be derived into $src
+  # can be derived into the same extensions as $src can.
+  my @dest2 = keys %{$suffix_rules->{$src}};
+  for my $src2 (keys %$suffix_rules)
+    {
+      if (exists $suffix_rules->{$src2}{$src})
+	{
+	  for my $dest2 (@dest2)
+	    {
+	      my $dist = $suffix_rules->{$src}{$dest2} + 1;
+	      # Overwrite an existing $src2->$dest2 path only if
+	      # the path via $src is shorter.
+	      if (! exists $suffix_rules->{$src2}{$dest2}
+		  || $suffix_rules->{$src2}{$dest2}[1] > $dist)
+		{
+		  $suffix_rules->{$src2}{$dest2} = [ $src, $dist ];
+		}
+	    }
+	}
+    }
+}
+
+=item C<@list = suffixes>
+
+Return the list of known suffixes.
+
+=cut
+
+sub suffixes ()
+{
+  return @_suffixes;
+}
+
+=item C<rule ($rulename)>
+
+Return the C<Automake::Rule> object for the rule
+named C<$rulename> if defined.  Return 0 otherwise.
+
+=cut
+
+sub rule ($)
+{
+  my ($name) = @_;
+  # Strip $(EXEEXT) from $name, so we can diagnose
+  # a clash if 'ctags$(EXEEXT):' is redefined after 'ctags:'.
+  $name =~ s,\$\(EXEEXT\)$,,;
+  return $_rule_dict{$name} || 0;
+}
+
+=item C<ruledef ($rulename, $cond)>
+
+Return the C<Automake::RuleDef> object for the rule named
+C<$rulename> if defined in condition C<$cond>.  Return false
+if the condition or the rule does not exist.
+
+=cut
+
+sub ruledef ($$)
+{
+  my ($name, $cond) = @_;
+  my $rule = rule $name;
+  return $rule && $rule->def ($cond);
+}
+
+=item C<rrule ($rulename)
+
+Return the C<Automake::Rule> object for the variable named
+C<$rulename>.  Abort with an internal error if the variable was not
+defined.
+
+The I<r> in front of C<var> stands for I<required>.  One
+should call C<rvar> to assert the rule's existence.
+
+=cut
+
+sub rrule ($)
+{
+  my ($name) = @_;
+  my $r = rule $name;
+  prog_error ("undefined rule $name\n" . &rules_dump)
+    unless $r;
+  return $r;
+}
+
+=item C<rruledef ($varname, $cond)>
+
+Return the C<Automake::RuleDef> object for the rule named
+C<$rulename> if defined in condition C<$cond>.  Abort with an internal
+error if the condition or the rule does not exist.
+
+=cut
+
+sub rruledef ($$)
+{
+  my ($name, $cond) = @_;
+  return rrule ($name)->rdef ($cond);
+}
+
+# Create the variable if it does not exist.
+# This is used only by other functions in this package.
+sub _crule ($)
+{
+  my ($name) = @_;
+  my $r = rule $name;
+  return $r if $r;
+  return _new Automake::Rule $name;
+}
+
+sub _new ($$)
+{
+  my ($class, $name) = @_;
+
+  # Strip $(EXEEXT) from $name, so we can diagnose
+  # a clash if 'ctags$(EXEEXT):' is redefined after 'ctags:'.
+  (my $keyname = $name) =~ s,\$\(EXEEXT\)$,,;
+
+  my $self = Automake::Item::new ($class, $name);
+  $_rule_dict{$keyname} = $self;
+  return $self;
+}
+
+sub _rule_defn_with_exeext_awareness ($$$)
+{
+  my ($target, $cond, $where) = @_;
+
+  # For now 'foo:' will override 'foo$(EXEEXT):'.  This is temporary,
+  # though, so we emit a warning.
+  (my $noexe = $target) =~ s/\$\(EXEEXT\)$//;
+  my $noexerule = rule $noexe;
+  my $tdef = $noexerule ? $noexerule->def ($cond) : undef;
+
+  if ($noexe ne $target
+      && $tdef
+      && $noexerule->name ne $target)
+    {
+      # The no-exeext option enables this feature.
+      if (! option 'no-exeext')
+	{
+	  msg ('obsolete', $tdef->location,
+	       "deprecated feature: target '$noexe' overrides "
+	       . "'$noexe\$(EXEEXT)'\n"
+	       . "change your target to read '$noexe\$(EXEEXT)'",
+	       partial => 1);
+	  msg ('obsolete', $where, "target '$target' was defined here");
+	}
+    }
+    return $tdef;
+}
+
+sub _maybe_warn_about_duplicated_target ($$$$$$)
+{
+  my ($target, $tdef, $source, $owner, $cond, $where) = @_;
+
+  my $oldowner  = $tdef->owner;
+  # Ok, it's the name target, but the name maybe different because
+  # 'foo$(EXEEXT)' and 'foo' have the same key in our table.
+  my $oldname = $tdef->name;
+
+  # Don't mention true conditions in diagnostics.
+  my $condmsg =
+    $cond == TRUE ? '' : (" in condition '" . $cond->human . "'");
+
+  if ($owner == RULE_USER)
+    {
+      if ($oldowner == RULE_USER)
+        {
+          # Ignore '%'-style pattern rules.  We'd need the
+          # dependencies to detect duplicates, and they are
+          # already diagnosed as unportable by -Wportability.
+          if ($target !~ /^[^%]*%[^%]*$/)
+            {
+              ## FIXME: Presently we can't diagnose duplicate user rules
+              ## because we don't distinguish rules with commands
+              ## from rules that only add dependencies.  E.g.,
+              ##   .PHONY: foo
+              ##   .PHONY: bar
+              ## is legitimate.  This is checked in the 'phony.sh' test.
+
+              # msg ('syntax', $where,
+              #      "redefinition of '$target'$condmsg ...", partial => 1);
+              # msg_cond_rule ('syntax', $cond, $target,
+              #                "... '$target' previously defined here");
+            }
+        }
+      else
+        {
+          # Since we parse the user Makefile.am before reading
+          # the Automake fragments, this condition should never happen.
+          prog_error ("user target '$target'$condmsg seen after Automake's"
+                      . " definition\nfrom " . $tdef->source);
+        }
+    }
+  else # $owner == RULE_AUTOMAKE
+    {
+      if ($oldowner == RULE_USER)
+        {
+          # -am targets listed in %dependencies support a -local
+          # variant.  If the user tries to override TARGET or
+          # TARGET-am for which there exists a -local variant,
+          # just tell the user to use it.
+          my $hint = 0;
+          my $noam = $target;
+          $noam =~ s/-am$//;
+          if (exists $dependencies{"$noam-am"})
+            {
+              $hint = "consider using $noam-local instead of $target";
+            }
+
+          msg_cond_rule ('override', $cond, $target,
+                         "user target '$target' defined here"
+                         . "$condmsg ...", partial => 1);
+          msg ('override', $where,
+               "... overrides Automake target '$oldname' defined here",
+               partial => $hint);
+          msg_cond_rule ('override', $cond, $target, $hint)
+            if $hint;
+        }
+      else # $oldowner == RULE_AUTOMAKE
+        {
+          # Automake should ignore redefinitions of its own
+          # rules if they came from the same file.  This makes
+          # it easier to process a Makefile fragment several times.
+          # However it's an error if the target is defined in many
+          # files.  E.g., the user might be using bin_PROGRAMS = ctags
+          # which clashes with our 'ctags' rule.
+          # (It would be more accurate if we had a way to compare
+          # the *content* of both rules.  Then $targets_source would
+          # be useless.)
+          my $oldsource = $tdef->source;
+          if (not ($source eq $oldsource && $target eq $oldname))
+            {
+               msg ('syntax',
+                    $where, "redefinition of '$target'$condmsg ...",
+                    partial => 1);
+               msg_cond_rule ('syntax', $cond, $target,
+                              "... '$oldname' previously defined here");
+            }
+        }
+    }
+}
+
+# Return the list of conditionals in which the rule was defined.  In case
+# an ambiguous conditional definition is detected, return the empty list.
+sub _conditionals_for_rule ($$$$)
+{
+  my ($rule, $owner, $cond, $where) = @_;
+  my $target = $rule->name;
+  my @conds;
+  my ($message, $ambig_cond) = $rule->conditions->ambiguous_p ($target, $cond);
+
+  return $cond if !$message; # No ambiguity.
+
+  if ($owner == RULE_USER)
+    {
+      # For user rules, just diagnose the ambiguity.
+      msg 'syntax', $where, "$message ...", partial => 1;
+      msg_cond_rule ('syntax', $ambig_cond, $target,
+                     "... '$target' previously defined here");
+      return ();
+    }
+
+  # FIXME: for Automake rules, we can't diagnose ambiguities yet.
+  # The point is that Automake doesn't propagate conditions
+  # everywhere.  For instance &handle_PROGRAMS doesn't care if
+  # bin_PROGRAMS was defined conditionally or not.
+  # On the following input
+  #   if COND1
+  #   foo:
+  #           ...
+  #   else
+  #   bin_PROGRAMS = foo
+  #   endif
+  # &handle_PROGRAMS will attempt to define a 'foo:' rule
+  # in condition TRUE (which conflicts with COND1).  Fixing
+  # this in &handle_PROGRAMS and siblings seems hard: you'd
+  # have to explain &file_contents what to do with a
+  # condition.  So for now we do our best *here*.  If 'foo:'
+  # was already defined in condition COND1 and we want to define
+  # it in condition TRUE, then define it only in condition !COND1.
+  # (See cond14.sh and cond15.sh for some test cases.)
+  @conds = $rule->not_always_defined_in_cond ($cond)->conds;
+
+  # No conditions left to define the rule.
+  # Warn, because our workaround is meaningless in this case.
+  if (scalar @conds == 0)
+    {
+      msg 'syntax', $where, "$message ...", partial => 1;
+      msg_cond_rule ('syntax', $ambig_cond, $target,
+                     "... '$target' previously defined here");
+      return ();
+    }
+  return @conds;
+}
+
+=item C<@conds = define ($rulename, $source, $owner, $cond, $where)>
+
+Define a new rule.  C<$rulename> is the list of targets.  C<$source>
+is the filename the rule comes from.  C<$owner> is the owner of the
+rule (C<RULE_AUTOMAKE> or C<RULE_USER>).  C<$cond> is the
+C<Automake::Condition> under which the rule is defined.  C<$where> is
+the C<Automake::Location> where the rule is defined.
+
+Returns a (possibly empty) list of C<Automake::Condition>s where the
+rule's definition should be output.
+
+=cut
+
+sub define ($$$$$)
+{
+  my ($target, $source, $owner, $cond, $where) = @_;
+
+  prog_error "$where is not a reference"
+    unless ref $where;
+  prog_error "$cond is not a reference"
+    unless ref $cond;
+
+  # Don't even think about defining a rule in condition FALSE.
+  return () if $cond == FALSE;
+
+  my $tdef = _rule_defn_with_exeext_awareness ($target, $cond, $where);
+
+  # A GNU make-style pattern rule has a single "%" in the target name.
+  msg ('portability', $where,
+       "'%'-style pattern rules are a GNU make extension")
+    if $target =~ /^[^%]*%[^%]*$/;
+
+  # See whether this is a duplicated target declaration.
+  if ($tdef)
+    {
+      # Diagnose invalid target redefinitions, if any.  Note that some
+      # target redefinitions are valid (e.g., for multiple-targets
+      # pattern rules).
+      _maybe_warn_about_duplicated_target ($target, $tdef, $source,
+                                           $owner, $cond, $where);
+      # Return so we don't redefine the rule in our tables, don't check
+      # for ambiguous condition, etc.  The rule will be output anyway
+      # because '&read_am_file' ignores the return code.
+      return ();
+    }
+
+  my $rule = _crule $target;
+
+  # Conditions for which the rule should be defined.  Due to some
+  # complications in the automake internals, this aspect is not as
+  # obvious as it might be, and in come cases this list must contain
+  # other entries in addition to '$cond'.  See the comments in
+  # '_conditionals_for_rule' for a rationale.
+  my @conds = _conditionals_for_rule ($rule, $owner, $cond, $where);
+
+  # Stop if we had ambiguous conditional definitions.
+  return unless @conds;
+
+  # Finally define this rule.
+  for my $c (@conds)
+    {
+      my $def = new Automake::RuleDef ($target, '', $where->clone,
+				       $owner, $source);
+      $rule->set ($c, $def);
+    }
+
+  # We honor inference rules with multiple targets because many
+  # makes support this and people use it.  However this is disallowed
+  # by POSIX.  We'll print a warning later.
+  my $target_count = 0;
+  my $inference_rule_count = 0;
+
+  for my $t (split (' ', $target))
+    {
+      ++$target_count;
+      # Check if the rule is a suffix rule: either it's a rule for
+      # two known extensions...
+      if ($t =~ /^($KNOWN_EXTENSIONS_PATTERN)($KNOWN_EXTENSIONS_PATTERN)$/
+	  # ...or it's a rule with unknown extensions (i.e., the rule
+	  # looks like '.foo.bar:' but '.foo' or '.bar' are not
+	  # declared in SUFFIXES and are not known language
+	  # extensions).  Automake will complete SUFFIXES from
+	  # @suffixes automatically (see handle_footer).
+	  || ($t =~ /$_SUFFIX_RULE_PATTERN/o && accept_extensions($1)))
+	{
+	  ++$inference_rule_count;
+	  register_suffix_rule ($where, $1, $2);
+	}
+    }
+
+  # POSIX allows multiple targets before the colon, but disallows
+  # definitions of multiple inference rules.  It's also
+  # disallowed to mix plain targets with inference rules.
+  msg ('portability', $where,
+       "inference rules can have only one target before the colon (POSIX)")
+    if $inference_rule_count > 0 && $target_count > 1;
+
+  return @conds;
+}
+
+=item C<depend ($target, @deps)>
+
+Adds C<@deps> to the dependencies of target C<$target>.  This should
+be used only with factored targets (those appearing in
+C<%dependees>).
+
+=cut
+
+sub depend ($@)
+{
+  my ($category, @dependees) = @_;
+  push (@{$dependencies{$category}}, @dependees);
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::RuleDef>, L<Automake::Condition>,
+L<Automake::DisjConditions>, L<Automake::Location>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/RuleDef.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/RuleDef.pm
new file mode 100644
index 0000000..35a0530
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/RuleDef.pm
@@ -0,0 +1,129 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::RuleDef;
+
+use 5.006;
+use strict;
+use Carp;
+use Automake::ChannelDefs;
+use Automake::ItemDef;
+
+require Exporter;
+use vars '@ISA', '@EXPORT';
+@ISA = qw/Automake::ItemDef Exporter/;
+@EXPORT = qw (&RULE_AUTOMAKE &RULE_USER);
+
+=head1 NAME
+
+Automake::RuleDef - a class for rule definitions
+
+=head1 SYNOPSIS
+
+  use Automake::RuleDef;
+  use Automake::Location;
+
+=head1 DESCRIPTION
+
+This class gathers data related to one Makefile-rule definition.
+It shouldn't be needed outside of F<Rule.pm>.
+
+=head2 Constants
+
+=over 4
+
+=item C<RULE_AUTOMAKE>, C<RULE_USER>
+
+Possible owners for rules.
+
+=cut
+
+use constant RULE_AUTOMAKE => 0; # Rule defined by Automake.
+use constant RULE_USER => 1;     # Rule defined in the user's Makefile.am.
+
+=back
+
+=head2 Methods
+
+=over 4
+
+=item C<new Automake::RuleDef ($name, $comment, $location, $owner, $source)>
+
+Create a new rule definition with target C<$name>, with associated comment
+C<$comment>, Location C<$location> and owner C<$owner>, defined in file
+C<$source>.
+
+=cut
+
+sub new ($$$$$)
+{
+  my ($class, $name, $comment, $location, $owner, $source) = @_;
+
+  my $self = Automake::ItemDef::new ($class, $comment, $location, $owner);
+  $self->{'source'} = $source;
+  $self->{'name'} = $name;
+  return $self;
+}
+
+=item C<$source = $rule-E<gt>source>
+
+Return the source of the rule.
+
+=cut
+
+sub source ($)
+{
+  my ($self) = @_;
+  return $self->{'source'};
+}
+
+=item C<$name = $rule-E<gt>name>
+
+Return the name of the rule.
+
+=cut
+
+sub name ($)
+{
+  my ($self) = @_;
+  return $self->{'name'};
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::Rule>, L<Automake::ItemDef>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/VarDef.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/VarDef.pm
new file mode 100644
index 0000000..279e17c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/VarDef.pm
@@ -0,0 +1,349 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::VarDef;
+
+use 5.006;
+use strict;
+use Carp;
+use Automake::ChannelDefs;
+use Automake::ItemDef;
+
+require Exporter;
+use vars '@ISA', '@EXPORT';
+@ISA = qw/Automake::ItemDef Exporter/;
+@EXPORT = qw (&VAR_AUTOMAKE &VAR_CONFIGURE &VAR_MAKEFILE
+	      &VAR_ASIS &VAR_PRETTY &VAR_SILENT &VAR_SORTED);
+
+=head1 NAME
+
+Automake::VarDef - a class for variable definitions
+
+=head1 SYNOPSIS
+
+  use Automake::VarDef;
+  use Automake::Location;
+
+  # Create a VarDef for a definition such as
+  # | # any comment
+  # | foo = bar # more comment
+  # in Makefile.am
+  my $loc = new Automake::Location 'Makefile.am:2';
+  my $def = new Automake::VarDef ('foo', 'bar # more comment',
+                                  '# any comment',
+                                  $loc, '', VAR_MAKEFILE, VAR_ASIS);
+
+  # Appending to a definition.
+  $def->append ('value to append', 'comment to append');
+
+  # Accessors.
+  my $value    = $def->value;  # with trailing '#' comments and
+                               # continuation ("\\\n") omitted.
+  my $value    = $def->raw_value; # the real value, as passed to new().
+  my $comment  = $def->comment;
+  my $location = $def->location;
+  my $type     = $def->type;
+  my $owner    = $def->owner;
+  my $pretty   = $def->pretty;
+
+  # Changing owner.
+  $def->set_owner (VAR_CONFIGURE,
+                   new Automake::Location 'configure.ac:15');
+
+  # Marking examined definitions.
+  $def->set_seen;
+  my $seen_p = $def->seen;
+
+  # Printing a variable for debugging.
+  print STDERR $def->dump;
+
+=head1 DESCRIPTION
+
+This class gathers data related to one Makefile-variable definition.
+
+=head2 Constants
+
+=over 4
+
+=item C<VAR_AUTOMAKE>, C<VAR_CONFIGURE>, C<VAR_MAKEFILE>
+
+Possible owners for variables.  A variable can be defined
+by Automake, in F<configure.ac> (using C<AC_SUBST>), or in
+the user's F<Makefile.am>.
+
+=cut
+
+# Defined so that the owner of a variable can only be increased (e.g
+# Automake should not override a configure or Makefile variable).
+use constant VAR_AUTOMAKE => 0; # Variable defined by Automake.
+use constant VAR_CONFIGURE => 1;# Variable defined in configure.ac.
+use constant VAR_MAKEFILE => 2; # Variable defined in Makefile.am.
+
+=item C<VAR_ASIS>, C<VAR_PRETTY>, C<VAR_SILENT>, C<VAR_SORTED>
+
+Possible print styles.  C<VAR_ASIS> variables should be output as-is.
+C<VAR_PRETTY> variables are wrapped on multiple lines if they cannot
+fit on one.  C<VAR_SILENT> variables are not output at all.  Finally,
+C<VAR_SORTED> variables should be sorted and then handled as
+C<VAR_PRETTY> variables.
+
+C<VAR_SILENT> variables can also be overridden silently (unlike the
+other kinds of variables whose overriding may sometimes produce
+warnings).
+
+=cut
+
+# Possible values for pretty.
+use constant VAR_ASIS => 0;	# Output as-is.
+use constant VAR_PRETTY => 1;	# Pretty printed on output.
+use constant VAR_SILENT => 2;	# Not output.  (Can also be
+				# overridden silently.)
+use constant VAR_SORTED => 3;	# Sorted and pretty-printed.
+
+=back
+
+=head2 Methods
+
+C<VarDef> defines the following methods in addition to those inherited
+from L<Automake::ItemDef>.
+
+=over 4
+
+=item C<my $def = new Automake::VarDef ($varname, $value, $comment, $location, $type, $owner, $pretty)>
+
+Create a new Makefile-variable definition.  C<$varname> is the name of
+the variable being defined and C<$value> its value.
+
+C<$comment> is any comment preceding the definition.  (Because
+Automake reorders variable definitions in the output, it also tries to
+carry comments around.)
+
+C<$location> is the place where the definition occurred, it should be
+an instance of L<Automake::Location>.
+
+C<$type> should be C<''> for definitions made with C<=>, and C<':'>
+for those made with C<:=>.
+
+C<$owner> specifies who owns the variables, it can be one of
+C<VAR_AUTOMAKE>, C<VAR_CONFIGURE>, or C<VAR_MAKEFILE> (see these
+definitions).
+
+Finally, C<$pretty> tells how the variable should be output, and can
+be one of C<VAR_ASIS>, C<VAR_PRETTY>, or C<VAR_SILENT>, or
+C<VAR_SORTED> (see these definitions).
+
+=cut
+
+sub new ($$$$$$$$)
+{
+  my ($class, $var, $value, $comment, $location, $type, $owner, $pretty) = @_;
+
+  # A user variable must be set by either '=' or ':=', and later
+  # promoted to '+='.
+  if ($owner != VAR_AUTOMAKE && $type eq '+')
+    {
+      error $location, "$var must be set with '=' before using '+='";
+    }
+
+  my $self = Automake::ItemDef::new ($class, $comment, $location, $owner);
+  $self->{'value'} = $value;
+  $self->{'type'} = $type;
+  $self->{'pretty'} = $pretty;
+  $self->{'seen'} = 0;
+  return $self;
+}
+
+=item C<$def-E<gt>append ($value, $comment)>
+
+Append C<$value> and <$comment> to the existing value and comment of
+C<$def>.  This is normally called on C<+=> definitions.
+
+=cut
+
+sub append ($$$)
+{
+  my ($self, $value, $comment) = @_;
+  $self->{'comment'} .= $comment;
+
+  my $val = $self->{'value'};
+
+  # Strip comments from augmented variables.  This is so that
+  #   VAR = foo # com
+  #   VAR += bar
+  # does not become
+  #   VAR = foo # com bar
+  # Furthermore keeping '#' would not be portable if the variable is
+  # output on multiple lines.
+  $val =~ s/ ?#.*//;
+  # Insert a separator, if required.
+  $val .= ' ' if $val;
+  $self->{'value'} = $val . $value;
+  # Turn ASIS appended variables into PRETTY variables.  This is to
+  # cope with 'make' implementation that cannot read very long lines.
+  $self->{'pretty'} = VAR_PRETTY if $self->{'pretty'} == VAR_ASIS;
+}
+
+=item C<$def-E<gt>value>
+
+=item C<$def-E<gt>raw_value>
+
+=item C<$def-E<gt>type>
+
+=item C<$def-E<gt>pretty>
+
+Accessors to the various constituents of a C<VarDef>.  See the
+documentation of C<new>'s arguments for a description of these.
+
+=cut
+
+sub value ($)
+{
+  my ($self) = @_;
+  my $val = $self->raw_value;
+  # Strip anything past '#'.  '#' characters cannot be escaped
+  # in Makefiles, so we don't have to be smart.
+  $val =~ s/#.*$//s;
+  # Strip backslashes.
+  $val =~ s/\\$/ /mg;
+  return $val;
+}
+
+sub raw_value ($)
+{
+  my ($self) = @_;
+  return $self->{'value'};
+}
+
+sub type ($)
+{
+  my ($self) = @_;
+  return $self->{'type'};
+}
+
+sub pretty ($)
+{
+  my ($self) = @_;
+  return $self->{'pretty'};
+}
+
+=item C<$def-E<gt>set_owner ($owner, $location)>
+
+Change the owner of a definition.  This usually happens because
+the user used C<+=> on an Automake variable, so (s)he now owns
+the content.  C<$location> should be an instance of L<Automake::Location>
+indicating where the change took place.
+
+=cut
+
+sub set_owner ($$$)
+{
+  my ($self, $owner, $location) = @_;
+  # We always adjust the location when the owner changes (even for
+  # '+=' statements).  The risk otherwise is to warn about
+  # a VAR_MAKEFILE variable and locate it in configure.ac...
+  $self->{'owner'} = $owner;
+  $self->{'location'} = $location;
+}
+
+=item C<$def-E<gt>set_seen>
+
+=item C<$bool = $def-E<gt>seen>
+
+These function allows Automake to mark (C<set_seen>) variable that
+it has examined in some way, and latter check (using C<seen>) for
+unused variables.  Unused variables usually indicate typos.
+
+=cut
+
+sub set_seen ($)
+{
+  my ($self) = @_;
+  $self->{'seen'} = 1;
+}
+
+sub seen ($)
+{
+  my ($self) = @_;
+  return $self->{'seen'};
+}
+
+=item C<$str = $def-E<gt>dump>
+
+Format the contents of C<$def> as a human-readable string,
+for debugging.
+
+=cut
+
+sub dump ($)
+{
+  my ($self) = @_;
+  my $owner = $self->owner;
+
+  if ($owner == VAR_AUTOMAKE)
+    {
+      $owner = 'Automake';
+    }
+  elsif ($owner == VAR_CONFIGURE)
+    {
+      $owner = 'Configure';
+    }
+  elsif ($owner == VAR_MAKEFILE)
+    {
+      $owner = 'Makefile';
+    }
+  else
+    {
+      prog_error ("unexpected owner");
+    }
+
+  my $where = $self->location->dump;
+  my $comment = $self->comment;
+  my $value = $self->raw_value;
+  my $type = $self->type;
+
+  return "{
+      type: $type=
+      where: $where      comment: $comment
+      value: $value
+      owner: $owner
+    }\n";
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::Variable>, L<Automake::ItemDef>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Variable.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Variable.pm
new file mode 100644
index 0000000..4751563
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Variable.pm
@@ -0,0 +1,1693 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Variable;
+
+use 5.006;
+use strict;
+use Carp;
+
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::Item;
+use Automake::VarDef;
+use Automake::Condition qw (TRUE FALSE);
+use Automake::DisjConditions;
+use Automake::General 'uniq';
+use Automake::Wrap 'makefile_wrap';
+
+require Exporter;
+use vars '@ISA', '@EXPORT', '@EXPORT_OK';
+@ISA = qw/Automake::Item Exporter/;
+@EXPORT = qw (err_var msg_var msg_cond_var reject_var
+	      var rvar vardef rvardef
+	      variables
+	      scan_variable_expansions check_variable_expansions
+	      variable_delete
+	      variables_dump
+	      set_seen
+	      require_variables
+	      variable_value
+	      output_variables
+	      transform_variable_recursively);
+
+=head1 NAME
+
+Automake::Variable - support for variable definitions
+
+=head1 SYNOPSIS
+
+  use Automake::Variable;
+  use Automake::VarDef;
+
+  # Defining a variable.
+  Automake::Variable::define($varname, $owner, $type,
+                             $cond, $value, $comment,
+                             $where, $pretty)
+
+  # Looking up a variable.
+  my $var = var $varname;
+  if ($var)
+    {
+      ...
+    }
+
+  # Looking up a variable that is assumed to exist.
+  my $var = rvar $varname;
+
+  # The list of conditions where $var has been defined.
+  # ($var->conditions is an Automake::DisjConditions,
+  # $var->conditions->conds is a list of Automake::Condition.)
+  my @conds = $var->conditions->conds
+
+  # Access to the definition in Condition $cond.
+  # $def is an Automake::VarDef.
+  my $def = $var->def ($cond);
+  if ($def)
+    {
+      ...
+    }
+
+  # When the conditional definition is assumed to exist, use
+  my $def = $var->rdef ($cond);
+
+
+=head1 DESCRIPTION
+
+This package provides support for Makefile variable definitions.
+
+An C<Automake::Variable> is a variable name associated to possibly
+many conditional definitions.  These definitions are instances
+of C<Automake::VarDef>.
+
+Therefore obtaining the value of a variable under a given
+condition involves two lookups.  One to look up the variable,
+and one to look up the conditional definition:
+
+  my $var = var $name;
+  if ($var)
+    {
+      my $def = $var->def ($cond);
+      if ($def)
+        {
+          return $def->value;
+        }
+      ...
+    }
+  ...
+
+When it is known that the variable and the definition
+being looked up exist, the above can be simplified to
+
+  return var ($name)->def ($cond)->value; # Do not write this.
+
+but is better written
+
+  return rvar ($name)->rdef ($cond)->value;
+
+or even
+
+  return rvardef ($name, $cond)->value;
+
+The I<r> variants of the C<var>, C<def>, and C<vardef> methods add an
+extra test to ensure that the lookup succeeded, and will diagnose
+failures as internal errors (with a message which is much more
+informative than Perl's warning about calling a method on a
+non-object).
+
+=cut
+
+my $_VARIABLE_CHARACTERS = '[.A-Za-z0-9_@]+';
+my $_VARIABLE_PATTERN = '^' . $_VARIABLE_CHARACTERS . "\$";
+my $_VARIABLE_RECURSIVE_PATTERN =
+    '^([.A-Za-z0-9_@]|\$[({]' . $_VARIABLE_CHARACTERS . '[})]?)+' . "\$";
+
+# The order in which variables should be output.  (May contain
+# duplicates -- only the first occurrence matters.)
+my @_var_order;
+
+# This keeps track of all variables defined by &_gen_varname.
+# $_gen_varname{$base} is a hash for all variables defined with
+# prefix '$base'.  Values stored in this hash are the variable names.
+# Keys have the form "(COND1)VAL1(COND2)VAL2..." where VAL1 and VAL2
+# are the values of the variable for condition COND1 and COND2.
+my %_gen_varname = ();
+# $_gen_varname_n{$base} is the number of variables generated by
+# _gen_varname() for $base.  This is not the same as keys
+# %{$_gen_varname{$base}} because %_gen_varname may also contain
+# variables not generated by _gen_varname.
+my %_gen_varname_n = ();
+
+# Declare the macros that define known variables, so we can
+# hint the user if she try to use one of these variables.
+
+# Macros accessible via aclocal.
+my %_am_macro_for_var =
+  (
+   CCAS => 'AM_PROG_AS',
+   CCASFLAGS => 'AM_PROG_AS',
+   EMACS => 'AM_PATH_LISPDIR',
+   GCJ => 'AM_PROG_GCJ',
+   LEX => 'AM_PROG_LEX',
+   LIBTOOL => 'LT_INIT',
+   lispdir => 'AM_PATH_LISPDIR',
+   pkgpyexecdir => 'AM_PATH_PYTHON',
+   pkgpythondir => 'AM_PATH_PYTHON',
+   pyexecdir => 'AM_PATH_PYTHON',
+   PYTHON => 'AM_PATH_PYTHON',
+   pythondir => 'AM_PATH_PYTHON',
+   );
+
+# Macros shipped with Autoconf.
+my %_ac_macro_for_var =
+  (
+   ALLOCA => 'AC_FUNC_ALLOCA',
+   CC => 'AC_PROG_CC',
+   CFLAGS => 'AC_PROG_CC',
+   CXX => 'AC_PROG_CXX',
+   CXXFLAGS => 'AC_PROG_CXX',
+   F77 => 'AC_PROG_F77',
+   FFLAGS => 'AC_PROG_F77',
+   FC => 'AC_PROG_FC',
+   FCFLAGS => 'AC_PROG_FC',
+   OBJC => 'AC_PROG_OBJC',
+   OBJCFLAGS => 'AC_PROG_OBJC',
+   OBJCXX => 'AC_PROG_OBJCXX',
+   OBJCXXFLAGS => 'AC_PROG_OBJCXX',
+   RANLIB => 'AC_PROG_RANLIB',
+   UPC => 'AM_PROG_UPC',
+   UPCFLAGS => 'AM_PROG_UPC',
+   YACC => 'AC_PROG_YACC',
+   );
+
+# The name of the configure.ac file.
+my $configure_ac;
+
+# Variables that can be overridden without complaint from -Woverride
+my %_silent_variable_override =
+  (AM_MAKEINFOHTMLFLAGS => 1,
+   AR => 1,
+   ARFLAGS => 1,
+   DEJATOOL => 1,
+   JAVAC => 1,
+   JAVAROOT => 1);
+
+# Count of helper variables used to implement conditional '+='.
+my $_appendvar;
+
+# Each call to C<Automake::Variable::traverse_recursively> gets an
+# unique label. This is used to detect recursively defined variables.
+my $_traversal = 0;
+
+
+=head2 Error reporting functions
+
+In these functions, C<$var> can be either a variable name, or
+an instance of C<Automake::Variable>.
+
+=over 4
+
+=item C<err_var ($var, $message, [%options])>
+
+Uncategorized errors about variables.
+
+=cut
+
+sub err_var ($$;%)
+{
+  msg_var ('error', @_);
+}
+
+=item C<msg_cond_var ($channel, $cond, $var, $message, [%options])>
+
+Messages about conditional variable.
+
+=cut
+
+sub msg_cond_var ($$$$;%)
+{
+  my ($channel, $cond, $var, $msg, %opts) = @_;
+  my $v = ref ($var) ? $var : rvar ($var);
+  msg $channel, $v->rdef ($cond)->location, $msg, %opts;
+}
+
+=item C<msg_var ($channel, $var, $message, [%options])>
+
+Messages about variables.
+
+=cut
+
+sub msg_var ($$$;%)
+{
+  my ($channel, $var, $msg, %opts) = @_;
+  my $v = ref ($var) ? $var : rvar ($var);
+  # Don't know which condition is concerned.  Pick any.
+  my $cond = $v->conditions->one_cond;
+  msg_cond_var $channel, $cond, $v, $msg, %opts;
+}
+
+=item C<$bool = reject_var ($varname, $error_msg)>
+
+Bail out with C<$error_msg> if a variable with name C<$varname> has
+been defined.
+
+Return true iff C<$varname> is defined.
+
+=cut
+
+sub reject_var ($$)
+{
+  my ($var, $msg) = @_;
+  my $v = var ($var);
+  if ($v)
+    {
+      err_var $v, $msg;
+      return 1;
+    }
+  return 0;
+}
+
+=back
+
+=head2 Administrative functions
+
+=over 4
+
+=item C<Automake::Variable::hook ($varname, $fun)>
+
+Declare a function to be called whenever a variable
+named C<$varname> is defined or redefined.
+
+C<$fun> should take two arguments: C<$type> and C<$value>.
+When type is C<''> or <':'>, C<$value> is the value being
+assigned to C<$varname>.  When C<$type> is C<'+'>, C<$value>
+is the value being appended to  C<$varname>.
+
+=cut
+
+use vars '%_hooks';
+sub hook ($$)
+{
+  my ($var, $fun) = @_;
+  $_hooks{$var} = $fun;
+}
+
+=item C<variables ([$suffix])>
+
+Returns the list of all L<Automake::Variable> instances.  (I.e., all
+variables defined so far.)  If C<$suffix> is supplied, return only
+the L<Automake::Variable> instances that ends with C<_$suffix>.
+
+=cut
+
+use vars '%_variable_dict', '%_primary_dict';
+sub variables (;$)
+{
+  my ($suffix) = @_;
+  my @vars = ();
+  if ($suffix)
+    {
+      if (exists $_primary_dict{$suffix})
+	{
+	  @vars = values %{$_primary_dict{$suffix}};
+	}
+    }
+  else
+    {
+      @vars = values %_variable_dict;
+    }
+  # The behaviour of the 'sort' built-in is undefined in scalar
+  # context, hence we need an ad-hoc handling for such context.
+  return wantarray ? sort { $a->name cmp $b->name } @vars : scalar @vars;
+}
+
+=item C<Automake::Variable::reset>
+
+The I<forget all> function.  Clears all know variables and reset some
+other internal data.
+
+=cut
+
+sub reset ()
+{
+  %_variable_dict = ();
+  %_primary_dict = ();
+  $_appendvar = 0;
+  @_var_order = ();
+  %_gen_varname = ();
+  %_gen_varname_n = ();
+  $_traversal = 0;
+}
+
+=item C<var ($varname)>
+
+Return the C<Automake::Variable> object for the variable
+named C<$varname> if defined.  Return 0 otherwise.
+
+=cut
+
+sub var ($)
+{
+  my ($name) = @_;
+  return $_variable_dict{$name} if exists $_variable_dict{$name};
+  return 0;
+}
+
+=item C<vardef ($varname, $cond)>
+
+Return the C<Automake::VarDef> object for the variable named
+C<$varname> if defined in condition C<$cond>.  Return false
+if the condition or the variable does not exist.
+
+=cut
+
+sub vardef ($$)
+{
+  my ($name, $cond) = @_;
+  my $var = var $name;
+  return $var && $var->def ($cond);
+}
+
+# Create the variable if it does not exist.
+# This is used only by other functions in this package.
+sub _cvar ($)
+{
+  my ($name) = @_;
+  my $v = var $name;
+  return $v if $v;
+  return _new Automake::Variable $name;
+}
+
+=item C<rvar ($varname)>
+
+Return the C<Automake::Variable> object for the variable named
+C<$varname>.  Abort with an internal error if the variable was not
+defined.
+
+The I<r> in front of C<var> stands for I<required>.  One
+should call C<rvar> to assert the variable's existence.
+
+=cut
+
+sub rvar ($)
+{
+  my ($name) = @_;
+  my $v = var $name;
+  prog_error ("undefined variable $name\n" . &variables_dump)
+    unless $v;
+  return $v;
+}
+
+=item C<rvardef ($varname, $cond)>
+
+Return the C<Automake::VarDef> object for the variable named
+C<$varname> if defined in condition C<$cond>.  Abort with an internal
+error if the condition or the variable does not exist.
+
+=cut
+
+sub rvardef ($$)
+{
+  my ($name, $cond) = @_;
+  return rvar ($name)->rdef ($cond);
+}
+
+=back
+
+=head2 Methods
+
+C<Automake::Variable> is a subclass of C<Automake::Item>.  See
+that package for inherited methods.
+
+Here are the methods specific to the C<Automake::Variable> instances.
+Use the C<define> function, described latter, to create such objects.
+
+=over 4
+
+=cut
+
+# Create Automake::Variable objects.  This is used
+# only in this file.  Other users should use
+# the "define" function.
+sub _new ($$)
+{
+  my ($class, $name) = @_;
+  my $self = Automake::Item::new ($class, $name);
+  $self->{'scanned'} = 0;
+  $self->{'last-append'} = []; # helper variable for last conditional append.
+  $_variable_dict{$name} = $self;
+  if ($name =~ /_([[:alnum:]]+)$/)
+    {
+      $_primary_dict{$1}{$name} = $self;
+    }
+  return $self;
+}
+
+# _check_ambiguous_condition ($SELF, $COND, $WHERE)
+# -------------------------------------------------
+# Check for an ambiguous conditional.  This is called when a variable
+# is being defined conditionally.  If we already know about a
+# definition that is true under the same conditions, then we have an
+# ambiguity.
+sub _check_ambiguous_condition ($$$)
+{
+  my ($self, $cond, $where) = @_;
+  my $var = $self->name;
+  my ($message, $ambig_cond) = $self->conditions->ambiguous_p ($var, $cond);
+
+  # We allow silent variables to be overridden silently,
+  # by either silent or non-silent variables.
+  my $def = $self->def ($ambig_cond);
+  if ($message && $def->pretty != VAR_SILENT)
+    {
+      msg 'syntax', $where, "$message ...", partial => 1;
+      msg_var ('syntax', $var, "... '$var' previously defined here");
+      verb ($self->dump);
+    }
+}
+
+=item C<$bool = $var-E<gt>check_defined_unconditionally ([$parent, $parent_cond])>
+
+Warn if the variable is conditionally defined.  C<$parent> is the name
+of the parent variable, and C<$parent_cond> the condition of the parent
+definition.  These two variables are used to display diagnostics.
+
+=cut
+
+sub check_defined_unconditionally ($;$$)
+{
+  my ($self, $parent, $parent_cond) = @_;
+
+  if (!$self->conditions->true)
+    {
+      if ($parent)
+	{
+	  msg_cond_var ('unsupported', $parent_cond, $parent,
+			"automake does not support conditional definition of "
+			. $self->name . " in $parent");
+	}
+      else
+	{
+	  msg_var ('unsupported', $self,
+		   "automake does not support " . $self->name
+		   . " being defined conditionally");
+	}
+    }
+}
+
+=item C<$str = $var-E<gt>output ([@conds])>
+
+Format all the definitions of C<$var> if C<@cond> is not specified,
+else only that corresponding to C<@cond>.
+
+=cut
+
+sub output ($@)
+{
+  my ($self, @conds) = @_;
+
+  @conds = $self->conditions->conds
+    unless @conds;
+
+  my $res = '';
+  my $name = $self->name;
+
+  foreach my $cond (@conds)
+    {
+      my $def = $self->def ($cond);
+      prog_error ("unknown condition '" . $cond->human . "' for '"
+		  . $self->name . "'")
+	unless $def;
+
+      next
+	if $def->pretty == VAR_SILENT;
+
+      $res .= $def->comment;
+
+      my $val = $def->raw_value;
+      my $equals = $def->type eq ':' ? ':=' : '=';
+      my $str = $cond->subst_string;
+
+
+      if ($def->pretty == VAR_ASIS)
+	{
+	  my $output_var = "$name $equals $val";
+	  $output_var =~ s/^/$str/meg;
+	  $res .= "$output_var\n";
+	}
+      elsif ($def->pretty == VAR_PRETTY)
+	{
+	  # Suppress escaped new lines.  &makefile_wrap will
+	  # add them back, maybe at other places.
+	  $val =~ s/\\$//mg;
+	  my $wrap = makefile_wrap ("$str$name $equals", "$str\t",
+				    split (' ', $val));
+
+	  # If the last line of the definition is made only of
+	  # @substitutions@, append an empty variable to make sure it
+	  # cannot be substituted as a blank line (that would confuse
+	  # HP-UX Make).
+	  $wrap = makefile_wrap ("$str$name $equals", "$str\t",
+				 split (' ', $val), '$(am__empty)')
+	    if $wrap =~ /\n(\s*@\w+@)+\s*$/;
+
+	  $res .= $wrap;
+	}
+      else # ($def->pretty == VAR_SORTED)
+	{
+	  # Suppress escaped new lines.  &makefile_wrap will
+	  # add them back, maybe at other places.
+	  $val =~ s/\\$//mg;
+	  $res .= makefile_wrap ("$str$name $equals", "$str\t",
+				 sort (split (' ' , $val)));
+	}
+    }
+  return $res;
+}
+
+=item C<@values = $var-E<gt>value_as_list ($cond, [$parent, $parent_cond])>
+
+Get the value of C<$var> as a list, given a specified condition,
+without recursing through any subvariables.
+
+C<$cond> is the condition of interest.  C<$var> does not need
+to be defined for condition C<$cond> exactly, but it needs
+to be defined for at most one condition implied by C<$cond>.
+
+C<$parent> and C<$parent_cond> designate the name and the condition
+of the parent variable, i.e., the variable in which C<$var> is
+being expanded.  These are used in diagnostics.
+
+For example, if C<A> is defined as "C<foo $(B) bar>" in condition
+C<TRUE>, calling C<rvar ('A')->value_as_list (TRUE)> will return
+C<("foo", "$(B)", "bar")>.
+
+=cut
+
+sub value_as_list ($$;$$)
+{
+  my ($self, $cond, $parent, $parent_cond) = @_;
+  my @result;
+
+  # Get value for given condition
+  my $onceflag;
+  foreach my $vcond ($self->conditions->conds)
+    {
+      if ($vcond->true_when ($cond))
+	{
+	  # If there is more than one definitions of $var matching
+	  # $cond then we are in trouble: tell the user we need a
+	  # paddle.  Continue by merging results from all conditions,
+	  # although it doesn't make much sense.
+	  $self->check_defined_unconditionally ($parent, $parent_cond)
+	    if $onceflag;
+	  $onceflag = 1;
+
+	  my $val = $self->rdef ($vcond)->value;
+	  push @result, split (' ', $val);
+	}
+    }
+  return @result;
+}
+
+=item C<@values = $var-E<gt>value_as_list_recursive ([%options])>
+
+Return the contents of C<$var> as a list, split on whitespace.  This
+will recursively follow C<$(...)> and C<${...}> inclusions.  It
+preserves C<@...@> substitutions.
+
+C<%options> is a list of option for C<Variable::traverse_recursively>
+(see this method).  The most useful is C<cond_filter>:
+
+  $var->value_as_list_recursive (cond_filter => $cond)
+
+will return the contents of C<$var> and any subvariable in all
+conditions implied by C<$cond>.
+
+C<%options> can also carry options specific to C<value_as_list_recursive>.
+Presently, the only such option is C<location =E<gt> 1> which instructs
+C<value_as_list_recursive> to return a list of C<[$location, @values]> pairs.
+
+=cut
+
+sub value_as_list_recursive ($;%)
+{
+  my ($var, %options) = @_;
+
+  return $var->traverse_recursively
+    (# Construct [$location, $value] pairs if requested.
+     sub {
+       my ($var, $val, $cond, $full_cond) = @_;
+       return [$var->rdef ($cond)->location, $val] if $options{'location'};
+       return $val;
+     },
+     # Collect results.
+     sub {
+       my ($var, $parent_cond, @allresults) = @_;
+       return map { my ($cond, @vals) = @$_; @vals } @allresults;
+     },
+     %options);
+}
+
+
+=item C<$bool = $var-E<gt>has_conditional_contents>
+
+Return 1 if C<$var> or one of its subvariable was conditionally
+defined.  Return 0 otherwise.
+
+=cut
+
+sub has_conditional_contents ($)
+{
+  my ($self) = @_;
+
+  # Traverse the variable recursively until we
+  # find a variable defined conditionally.
+  # Use 'die' to abort the traversal, and pass it '$full_cond'
+  # to we can find easily whether the 'eval' block aborted
+  # because we found a condition, or for some other error.
+  eval
+    {
+      $self->traverse_recursively
+	(sub
+	 {
+	   my ($subvar, $val, $cond, $full_cond) = @_;
+	   die $full_cond if ! $full_cond->true;
+	   return ();
+	 },
+	 sub { return (); });
+    };
+  if ($@)
+    {
+      return 1 if ref ($@) && $@->isa ("Automake::Condition");
+      # Propagate other errors.
+      die;
+    }
+  return 0;
+}
+
+
+=item C<$string = $var-E<gt>dump>
+
+Return a string describing all we know about C<$var>.
+For debugging.
+
+=cut
+
+sub dump ($)
+{
+  my ($self) = @_;
+
+  my $text = $self->name . ": \n  {\n";
+  foreach my $vcond ($self->conditions->conds)
+    {
+      $text .= "    " . $vcond->human . " => " . $self->rdef ($vcond)->dump;
+    }
+  $text .= "  }\n";
+  return $text;
+}
+
+
+=back
+
+=head2 Utility functions
+
+=over 4
+
+=item C<@list = scan_variable_expansions ($text)>
+
+Return the list of variable names expanded in C<$text>.  Note that
+unlike some other functions, C<$text> is not split on spaces before we
+check for subvariables.
+
+=cut
+
+sub scan_variable_expansions ($)
+{
+  my ($text) = @_;
+  my @result = ();
+
+  # Strip comments.
+  $text =~ s/#.*$//;
+
+  # Record each use of ${stuff} or $(stuff) that does not follow a $.
+  while ($text =~ /(?<!\$)\$(?:\{([^\}]*)\}|\(([^\)]*)\))/g)
+    {
+      my $var = $1 || $2;
+      # The occurrence may look like $(string1[:subst1=[subst2]]) but
+      # we want only 'string1'.
+      $var =~ s/:[^:=]*=[^=]*$//;
+      push @result, $var;
+    }
+
+  return @result;
+}
+
+=item C<check_variable_expansions ($text, $where)>
+
+Check variable expansions in C<$text> and warn about any name that
+does not conform to POSIX.  C<$where> is the location of C<$text>
+for the error message.
+
+=cut
+
+sub check_variable_expansions ($$)
+{
+  my ($text, $where) = @_;
+  # Catch expansion of variables whose name does not conform to POSIX.
+  foreach my $var (scan_variable_expansions ($text))
+    {
+      if ($var !~ /$_VARIABLE_PATTERN/o)
+	{
+	  # If the variable name contains a space, it's likely
+	  # to be a GNU make extension (such as $(addsuffix ...)).
+	  # Mention this in the diagnostic.
+	  my $gnuext = "";
+	  $gnuext = "\n(probably a GNU make extension)" if $var =~ / /;
+	  # Accept recursive variable expansions if so desired
+	  # (we hope they are rather portable in practice).
+	  if ($var =~ /$_VARIABLE_RECURSIVE_PATTERN/o)
+	    {
+	      msg ('portability-recursive', $where,
+		   "$var: non-POSIX recursive variable expansion$gnuext");
+	    }
+	  else
+	    {
+	      msg ('portability', $where, "$var: non-POSIX variable name$gnuext");
+	    }
+	}
+    }
+}
+
+
+
+=item C<Automake::Variable::define($varname, $owner, $type, $cond, $value, $comment, $where, $pretty)>
+
+Define or append to a new variable.
+
+C<$varname>: the name of the variable being defined.
+
+C<$owner>: owner of the variable (one of C<VAR_MAKEFILE>,
+C<VAR_CONFIGURE>, or C<VAR_AUTOMAKE>, defined by L<Automake::VarDef>).
+Variables can be overridden, provided the new owner is not weaker
+(C<VAR_AUTOMAKE> < C<VAR_CONFIGURE> < C<VAR_MAKEFILE>).
+
+C<$type>: the type of the assignment (C<''> for C<FOO = bar>,
+C<':'> for C<FOO := bar>, and C<'+'> for C<'FOO += bar'>).
+
+C<$cond>: the C<Condition> in which C<$var> is being defined.
+
+C<$value>: the value assigned to C<$var> in condition C<$cond>.
+
+C<$comment>: any comment (C<'# bla.'>) associated with the assignment.
+Comments from C<+=> assignments stack with comments from the last C<=>
+assignment.
+
+C<$where>: the C<Location> of the assignment.
+
+C<$pretty>: whether C<$value> should be pretty printed (one of
+C<VAR_ASIS>, C<VAR_PRETTY>, C<VAR_SILENT>, or C<VAR_SORTED>, defined
+by by L<Automake::VarDef>).  C<$pretty> applies only to real
+assignments.  I.e., it does not apply to a C<+=> assignment (except
+when part of it is being done as a conditional C<=> assignment).
+
+This function will all run any hook registered with the C<hook>
+function.
+
+=cut
+
+sub define ($$$$$$$$)
+{
+  my ($var, $owner, $type, $cond, $value, $comment, $where, $pretty) = @_;
+
+  prog_error "$cond is not a reference"
+    unless ref $cond;
+
+  prog_error "$where is not a reference"
+    unless ref $where;
+
+  prog_error "pretty argument missing"
+    unless defined $pretty && ($pretty == VAR_ASIS
+			       || $pretty == VAR_PRETTY
+			       || $pretty == VAR_SILENT
+			       || $pretty == VAR_SORTED);
+
+  error $where, "bad characters in variable name '$var'"
+    if $var !~ /$_VARIABLE_PATTERN/o;
+
+  # ':='-style assignments are not acknowledged by POSIX.  Moreover it
+  # has multiple meanings.  In GNU make or BSD make it means "assign
+  # with immediate expansion", while in OSF make it is used for
+  # conditional assignments.
+  msg ('portability', $where, "':='-style assignments are not portable")
+    if $type eq ':';
+
+  check_variable_expansions ($value, $where);
+
+  # If there's a comment, make sure it is \n-terminated.
+  if ($comment)
+    {
+      chomp $comment;
+      $comment .= "\n";
+    }
+  else
+    {
+      $comment = '';
+    }
+
+  my $self = _cvar $var;
+
+  my $def = $self->def ($cond);
+  my $new_var = $def ? 0 : 1;
+
+  # Additional checks for Automake definitions.
+  if ($owner == VAR_AUTOMAKE && ! $new_var)
+    {
+      # An Automake variable must be consistently defined with the same
+      # sign by Automake.
+      if ($def->type ne $type && $def->owner == VAR_AUTOMAKE)
+	{
+	  error ($def->location,
+		 "Automake variable '$var' was set with '"
+		 . $def->type . "=' here ...", partial => 1);
+	  error ($where, "... and is now set with '$type=' here.");
+	  prog_error ("Automake variable assignments should be consistently\n"
+		      . "defined with the same sign");
+	}
+
+      # If Automake tries to override a value specified by the user,
+      # just don't let it do.
+      if ($def->owner != VAR_AUTOMAKE)
+	{
+	  if (! exists $_silent_variable_override{$var})
+	    {
+	      my $condmsg = ($cond == TRUE
+			     ? '' : (" in condition '" . $cond->human . "'"));
+	      msg_cond_var ('override', $cond, $var,
+			    "user variable '$var' defined here$condmsg ...",
+			    partial => 1);
+	      msg ('override', $where,
+		   "... overrides Automake variable '$var' defined here");
+	    }
+	  verb ("refusing to override the user definition of:\n"
+		. $self->dump ."with '" . $cond->human . "' => '$value'");
+	  return;
+	}
+    }
+
+  # Differentiate assignment types.
+
+  # 1. append (+=) to a variable defined for current condition
+  if ($type eq '+' && ! $new_var)
+    {
+      $def->append ($value, $comment);
+      $self->{'last-append'} = [];
+
+      # Only increase owners.  A VAR_CONFIGURE variable augmented in a
+      # Makefile.am becomes a VAR_MAKEFILE variable.
+      $def->set_owner ($owner, $where->clone)
+	if $owner > $def->owner;
+    }
+  # 2. append (+=) to a variable defined for *another* condition
+  elsif ($type eq '+' && ! $self->conditions->false)
+    {
+      # * Generally, $cond is not TRUE.  For instance:
+      #     FOO = foo
+      #     if COND
+      #       FOO += bar
+      #     endif
+      #   In this case, we declare an helper variable conditionally,
+      #   and append it to FOO:
+      #     FOO = foo $(am__append_1)
+      #     @COND_TRUE@am__append_1 = bar
+      #   Of course if FOO is defined under several conditions, we add
+      #   $(am__append_1) to each definitions.
+      #
+      # * If $cond is TRUE, we don't need the helper variable.  E.g., in
+      #     if COND1
+      #       FOO = foo1
+      #     else
+      #       FOO = foo2
+      #     endif
+      #     FOO += bar
+      #   we can add bar directly to all definition of FOO, and output
+      #     @COND_TRUE@FOO = foo1 bar
+      #     @COND_FALSE@FOO = foo2 bar
+
+      my $lastappend = [];
+      # Do we need an helper variable?
+      if ($cond != TRUE)
+        {
+	  # Can we reuse the helper variable created for the previous
+	  # append?  (We cannot reuse older helper variables because
+	  # we must preserve the order of items appended to the
+	  # variable.)
+	  my $condstr = $cond->string;
+	  my $key = "$var:$condstr";
+	  my ($appendvar, $appendvarcond) = @{$self->{'last-append'}};
+	  if ($appendvar && $condstr eq $appendvarcond)
+	    {
+	      # Yes, let's simply append to it.
+	      $var = $appendvar;
+	      $owner = VAR_AUTOMAKE;
+	      $self = var ($var);
+	      $def = $self->rdef ($cond);
+	      $new_var = 0;
+	    }
+	  else
+	    {
+	      # No, create it.
+	      my $num = ++$_appendvar;
+	      my $hvar = "am__append_$num";
+	      $lastappend = [$hvar, $condstr];
+	      &define ($hvar, VAR_AUTOMAKE, '+',
+		       $cond, $value, $comment, $where, $pretty);
+
+	      # Now HVAR is to be added to VAR.
+	      $comment = '';
+	      $value = "\$($hvar)";
+	    }
+	}
+
+      # Add VALUE to all definitions of SELF.
+      foreach my $vcond ($self->conditions->conds)
+        {
+	  # We have a bit of error detection to do here.
+	  # This:
+	  #   if COND1
+	  #     X = Y
+	  #   endif
+	  #   X += Z
+	  # should be rejected because X is not defined for all conditions
+	  # where '+=' applies.
+	  my $undef_cond = $self->not_always_defined_in_cond ($cond);
+	  if (! $undef_cond->false)
+	    {
+	      error ($where,
+		     "cannot apply '+=' because '$var' is not defined "
+		     . "in\nthe following conditions:\n  "
+		     . join ("\n  ", map { $_->human } $undef_cond->conds)
+		     . "\neither define '$var' in these conditions,"
+		     . " or use\n'+=' in the same conditions as"
+		     . " the definitions.");
+	    }
+	  else
+	    {
+	      &define ($var, $owner, '+', $vcond, $value, $comment,
+		       $where, $pretty);
+	    }
+	}
+      $self->{'last-append'} = $lastappend;
+    }
+  # 3. first assignment (=, :=, or +=)
+  else
+    {
+      # There must be no previous value unless the user is redefining
+      # an Automake variable or an AC_SUBST variable for an existing
+      # condition.
+      _check_ambiguous_condition ($self, $cond, $where)
+	unless (!$new_var
+		&& (($def->owner == VAR_AUTOMAKE && $owner != VAR_AUTOMAKE)
+		    || $def->owner == VAR_CONFIGURE));
+
+      # Never decrease an owner.
+      $owner = $def->owner
+	if ! $new_var && $owner < $def->owner;
+
+      # Assignments to a macro set its location.  We don't adjust
+      # locations for '+='.  Ideally I suppose we would associate
+      # line numbers with random bits of text.
+      $def = new Automake::VarDef ($var, $value, $comment, $where->clone,
+				   $type, $owner, $pretty);
+      $self->set ($cond, $def);
+      push @_var_order, $var;
+    }
+
+  # Call any defined hook.  This helps to update some internal state
+  # *while* parsing the file.  For instance the handling of SUFFIXES
+  # requires this (see var_SUFFIXES_trigger).
+  &{$_hooks{$var}}($type, $value) if exists $_hooks{$var};
+}
+
+=item C<variable_delete ($varname, [@conds])>
+
+Forget about C<$varname> under the conditions C<@conds>, or completely
+if C<@conds> is empty.
+
+=cut
+
+sub variable_delete ($@)
+{
+  my ($var, @conds) = @_;
+
+  if (!@conds)
+    {
+      delete $_variable_dict{$var};
+    }
+  else
+    {
+      for my $cond (@conds)
+	{
+	  delete $_variable_dict{$var}{'defs'}{$cond};
+	}
+    }
+  if ($var =~ /_([[:alnum:]]+)$/)
+    {
+      delete $_primary_dict{$1}{$var};
+    }
+}
+
+=item C<$str = variables_dump>
+
+Return a string describing all we know about all variables.
+For debugging.
+
+=cut
+
+sub variables_dump ()
+{
+  my $text = "all variables:\n{\n";
+  foreach my $var (variables())
+    {
+      $text .= $var->dump;
+    }
+  $text .= "}\n";
+  return $text;
+}
+
+
+=item C<$var = set_seen ($varname)>
+
+=item C<$var = $var-E<gt>set_seen>
+
+Mark all definitions of this variable as examined, if the variable
+exists.  See L<Automake::VarDef::set_seen>.
+
+Return the C<Variable> object if the variable exists, or 0
+otherwise (i.e., as the C<var> function).
+
+=cut
+
+sub set_seen ($)
+{
+  my ($self) = @_;
+  $self = ref $self ? $self : var $self;
+
+  return 0 unless $self;
+
+  for my $c ($self->conditions->conds)
+    {
+      $self->rdef ($c)->set_seen;
+    }
+
+  return $self;
+}
+
+
+=item C<$count = require_variables ($where, $reason, $cond, @variables)>
+
+Make sure that each supplied variable is defined in C<$cond>.
+Otherwise, issue a warning showing C<$reason> (C<$reason> should be
+the reason why these variables are required, for instance C<'option foo
+used'>).  If we know which macro can define this variable, hint the
+user.  Return the number of undefined variables.
+
+=cut
+
+sub require_variables ($$$@)
+{
+  my ($where, $reason, $cond, @vars) = @_;
+  my $res = 0;
+  $reason .= ' but ' unless $reason eq '';
+
+  $configure_ac = find_configure_ac
+    unless defined $configure_ac;
+
+ VARIABLE:
+  foreach my $var (@vars)
+    {
+      # Nothing to do if the variable exists.
+      next VARIABLE
+	if vardef ($var, $cond);
+
+      my $text = "$reason'$var' is undefined\n";
+      my $v = var $var;
+      if ($v)
+	{
+	  my $undef_cond = $v->not_always_defined_in_cond ($cond);
+	  next VARIABLE
+	    if $undef_cond->false;
+	  $text .= ("in the following conditions:\n  "
+		    . join ("\n  ", map { $_->human } $undef_cond->conds)
+		    . "\n");
+	}
+
+      ++$res;
+
+      if (exists $_am_macro_for_var{$var})
+	{
+	  my $mac = $_am_macro_for_var{$var};
+	  $text .= "  The usual way to define '$var' is to add "
+	    . "'$mac'\n  to '$configure_ac' and run 'aclocal' and "
+	    . "'autoconf' again.";
+	  # aclocal will not warn about undefined macros unless it
+	  # starts with AM_.
+	  $text .= "\n  If '$mac' is in '$configure_ac', make sure\n"
+	    . "  its definition is in aclocal's search path."
+	    unless $mac =~ /^AM_/;
+	}
+      elsif (exists $_ac_macro_for_var{$var})
+	{
+	  $text .= "  The usual way to define '$var' is to add "
+	    . "'$_ac_macro_for_var{$var}'\n  to '$configure_ac' and "
+	    . "run 'autoconf' again.";
+	}
+
+      error $where, $text, uniq_scope => US_GLOBAL;
+    }
+  return $res;
+}
+
+=item C<$count = $var->requires_variables ($reason, @variables)>
+
+Same as C<require_variables>, but a method of Automake::Variable.
+C<@variables> should be defined in the same conditions as C<$var> is
+defined.
+
+=cut
+
+sub requires_variables ($$@)
+{
+  my ($var, $reason, @args) = @_;
+  my $res = 0;
+  for my $cond ($var->conditions->conds)
+    {
+      $res += require_variables ($var->rdef ($cond)->location, $reason,
+				 $cond, @args);
+    }
+  return $res;
+}
+
+
+=item C<variable_value ($var)>
+
+Get the C<TRUE> value of a variable, warn if the variable is
+conditionally defined.  C<$var> can be either a variable name
+or a C<Automake::Variable> instance (this allows calls such
+as C<$var-E<gt>variable_value>).
+
+=cut
+
+sub variable_value ($)
+{
+    my ($var) = @_;
+    my $v = ref ($var) ? $var : var ($var);
+    return () unless $v;
+    $v->check_defined_unconditionally;
+    my $d = $v->def (TRUE);
+    return $d ? $d->value : "";
+}
+
+=item C<$str = output_variables>
+
+Format definitions for all variables.
+
+=cut
+
+sub output_variables ()
+{
+  my $res = '';
+  # We output variables it in the same order in which they were
+  # defined (skipping duplicates).
+  my @vars = uniq @_var_order;
+
+  # Output all the Automake variables.  If the user changed one,
+  # then it is now marked as VAR_CONFIGURE or VAR_MAKEFILE.
+  foreach my $var (@vars)
+    {
+      my $v = rvar $var;
+      foreach my $cond ($v->conditions->conds)
+	{
+	  $res .= $v->output ($cond)
+	    if $v->rdef ($cond)->owner == VAR_AUTOMAKE;
+	}
+    }
+
+  # Now dump the user variables that were defined.
+  foreach my $var (@vars)
+    {
+      my $v = rvar $var;
+      foreach my $cond ($v->conditions->conds)
+	{
+	  $res .= $v->output ($cond)
+	    if $v->rdef ($cond)->owner != VAR_AUTOMAKE;
+	}
+    }
+  return $res;
+}
+
+=item C<$var-E<gt>traverse_recursively (&fun_item, &fun_collect, [cond_filter =E<gt> $cond_filter], [inner_expand =E<gt> 1], [skip_ac_subst =E<gt> 1])>
+
+Split the value of the Automake::Variable C<$var> on space, and
+traverse its components recursively.
+
+If C<$cond_filter> is an C<Automake::Condition>, process any
+conditions which are true when C<$cond_filter> is true.  Otherwise,
+process all conditions.
+
+We distinguish two kinds of items in the content of C<$var>.
+Terms that look like C<$(foo)> or C<${foo}> are subvariables
+and cause recursion.  Other terms are assumed to be filenames.
+
+Each time a filename is encountered, C<&fun_item> is called with the
+following arguments:
+
+  ($var,        -- the Automake::Variable we are currently
+                   traversing
+   $val,        -- the item (i.e., filename) to process
+   $cond,       -- the Condition for the $var definition we are
+                   examining (ignoring the recursion context)
+   $full_cond)  -- the full Condition, taking into account
+                   conditions inherited from parent variables
+                   during recursion
+
+If C<inner_expand> is set, variable references occurring in filename
+(as in C<$(BASE).ext>) are expanded before the filename is passed to
+C<&fun_item>.
+
+If C<skip_ac_subst> is set, Autoconf @substitutions@ will be skipped,
+i.e., C<&fun_item> will never be called for them.
+
+C<&fun_item> may return a list of items, they will be passed to
+C<&fun_store> later on.  Define C<&fun_item> or @<&fun_store> as
+C<undef> when they serve no purpose.
+
+Once all items of a variable have been processed, the result (of the
+calls to C<&fun_items>, or of recursive traversals of subvariables)
+are passed to C<&fun_collect>.  C<&fun_collect> receives three
+arguments:
+
+  ($var,         -- the variable being traversed
+   $parent_cond, -- the Condition inherited from parent
+                    variables during recursion
+   @condlist)    -- a list of [$cond, @results] pairs
+                    where each $cond appear only once, and @result
+                    are all the results for this condition.
+
+Typically you should do C<$cond->merge ($parent_cond)> to recompute
+the C<$full_cond> associated to C<@result>.  C<&fun_collect> may
+return a list of items, that will be used as the result of
+C<Automake::Variable::traverse_recursively> (the top-level, or its
+recursive calls).
+
+=cut
+
+# Contains a stack of 'from' and 'to' parts of variable
+# substitutions currently in force.
+my @_substfroms;
+my @_substtos;
+sub traverse_recursively ($&&;%)
+{
+  ++$_traversal;
+  @_substfroms = ();
+  @_substtos = ();
+  my ($var, $fun_item, $fun_collect, %options) = @_;
+  my $cond_filter = $options{'cond_filter'};
+  my $inner_expand = $options{'inner_expand'};
+  my $skip_ac_subst = $options{'skip_ac_subst'};
+  return $var->_do_recursive_traversal ($var,
+					$fun_item, $fun_collect,
+					$cond_filter, TRUE, $inner_expand,
+					$skip_ac_subst)
+}
+
+# The guts of Automake::Variable::traverse_recursively.
+sub _do_recursive_traversal ($$&&$$$$)
+{
+  my ($var, $parent, $fun_item, $fun_collect, $cond_filter, $parent_cond,
+      $inner_expand, $skip_ac_subst) = @_;
+
+  $var->set_seen;
+
+  if ($var->{'scanned'} == $_traversal)
+    {
+      err_var $var, "variable '" . $var->name() . "' recursively defined";
+      return ();
+    }
+  $var->{'scanned'} = $_traversal;
+
+  my @allresults = ();
+  my $cond_once = 0;
+  foreach my $cond ($var->conditions->conds)
+    {
+      if (ref $cond_filter)
+	{
+	  # Ignore conditions that don't match $cond_filter.
+	  next if ! $cond->true_when ($cond_filter);
+	  # If we found out several definitions of $var
+	  # match $cond_filter then we are in trouble.
+	  # Tell the user we don't support this.
+	  $var->check_defined_unconditionally ($parent, $parent_cond)
+	    if $cond_once;
+	  $cond_once = 1;
+	}
+      my @result = ();
+      my $full_cond = $cond->merge ($parent_cond);
+
+      my @to_process = $var->value_as_list ($cond, $parent, $parent_cond);
+      while (@to_process)
+	{
+	  my $val = shift @to_process;
+	  # If $val is a variable (i.e. ${foo} or $(bar), not a filename),
+	  # handle the sub variable recursively.
+	  # (Backslashes before '}' and ')' within brackets are here to
+	  # please Emacs's indentation.)
+	  if ($val =~ /^\$\{([^\}]*)\}$/ || $val =~ /^\$\(([^\)]*)\)$/)
+	    {
+	      my $subvarname = $1;
+
+	      # If the user uses a losing variable name, just ignore it.
+	      # This isn't ideal, but people have requested it.
+	      next if ($subvarname =~ /\@.*\@/);
+
+	      # See if the variable is actually a substitution reference
+	      my ($from, $to);
+              # This handles substitution references like ${foo:.a=.b}.
+	      if ($subvarname =~ /^([^:]*):([^=]*)=(.*)$/o)
+		{
+		  $subvarname = $1;
+		  $to = $3;
+		  $from = quotemeta $2;
+		}
+
+	      my $subvar = var ($subvarname);
+	      # Don't recurse into undefined variables.
+	      next unless $subvar;
+
+	      push @_substfroms, $from;
+	      push @_substtos, $to;
+
+	      my @res = $subvar->_do_recursive_traversal ($parent,
+							  $fun_item,
+							  $fun_collect,
+							  $cond_filter,
+							  $full_cond,
+							  $inner_expand,
+							  $skip_ac_subst);
+	      push (@result, @res);
+
+	      pop @_substfroms;
+	      pop @_substtos;
+
+	      next;
+	    }
+	  # Try to expand variable references inside filenames such as
+	  # '$(NAME).txt'.  We do not handle ':.foo=.bar'
+	  # substitutions, but it would make little sense to use this
+	  # here anyway.
+	  elsif ($inner_expand
+		 && ($val =~ /\$\{([^\}]*)\}/ || $val =~ /\$\(([^\)]*)\)/))
+	    {
+	      my $subvarname = $1;
+	      my $subvar = var $subvarname;
+	      if ($subvar)
+		{
+		  # Replace the reference by its value, and reschedule
+		  # for expansion.
+		  foreach my $c ($subvar->conditions->conds)
+		    {
+		      if (ref $cond_filter)
+			{
+			  # Ignore conditions that don't match $cond_filter.
+			  next if ! $c->true_when ($cond_filter);
+			  # If we found out several definitions of $var
+			  # match $cond_filter then we are in trouble.
+			  # Tell the user we don't support this.
+			  $subvar->check_defined_unconditionally ($var,
+								  $full_cond)
+			    if $cond_once;
+			  $cond_once = 1;
+			}
+		      my $subval = $subvar->rdef ($c)->value;
+		      $val =~ s/\$\{$subvarname\}/$subval/g;
+		      $val =~ s/\$\($subvarname\)/$subval/g;
+		      unshift @to_process, split (' ', $val);
+		    }
+		  next;
+		}
+	      # We do not know any variable with this name.  Fall through
+	      # to filename processing.
+	    }
+	  elsif ($skip_ac_subst && $val =~ /^\@.+\@$/)
+	    {
+	      next;
+	    }
+
+	  if ($fun_item) # $var is a filename we must process
+	    {
+	      my $substnum=$#_substfroms;
+	      while ($substnum >= 0)
+		{
+		  $val =~ s/$_substfroms[$substnum]$/$_substtos[$substnum]/
+		    if defined $_substfroms[$substnum];
+		  $substnum -= 1;
+		}
+
+	      # Make sure you update the doc of
+	      # Automake::Variable::traverse_recursively
+	      # if you change the prototype of &fun_item.
+	      my @transformed = &$fun_item ($var, $val, $cond, $full_cond);
+	      push (@result, @transformed);
+	    }
+	}
+      push (@allresults, [$cond, @result]) if @result;
+    }
+
+  # We only care about _recursive_ variable definitions.  The user
+  # is free to use the same variable several times in the same definition.
+  $var->{'scanned'} = -1;
+
+  return ()
+    unless $fun_collect;
+  # Make sure you update the doc of Automake::Variable::traverse_recursively
+  # if you change the prototype of &fun_collect.
+  return &$fun_collect ($var, $parent_cond, @allresults);
+}
+
+# _hash_varname ($VAR)
+# --------------------
+# Compute the key associated $VAR in %_gen_varname.
+# See _gen_varname() below.
+sub _hash_varname ($)
+{
+  my ($var) = @_;
+  my $key = '';
+  foreach my $cond ($var->conditions->conds)
+    {
+      my @values = $var->value_as_list ($cond);
+      $key .= "($cond)@values";
+    }
+  return $key;
+}
+
+# _hash_values (@VALUES)
+# ----------------------
+# Hash @VALUES for %_gen_varname.  @VALUES should be a list
+# of pairs: ([$cond, @values], [$cond, @values], ...).
+# See _gen_varname() below.
+sub _hash_values (@)
+{
+  my $key = '';
+  foreach my $pair (@_)
+    {
+      my ($cond, @values) = @$pair;
+      $key .= "($cond)@values";
+    }
+  return $key;
+}
+# ($VARNAME, $GENERATED)
+# _gen_varname ($BASE, @DEFINITIONS)
+# ----------------------------------
+# Return a variable name starting with $BASE, that will be
+# used to store definitions @DEFINITIONS.
+# @DEFINITIONS is a list of pair [$COND, @OBJECTS].
+#
+# If we already have a $BASE-variable containing @DEFINITIONS, reuse
+# it and set $GENERATED to 0.  Otherwise construct a new name and set
+# $GENERATED to 1.
+#
+# This way, we avoid combinatorial explosion of the generated
+# variables.  Especially, in a Makefile such as:
+#
+# | if FOO1
+# | A1=1
+# | endif
+# |
+# | if FOO2
+# | A2=2
+# | endif
+# |
+# | ...
+# |
+# | if FOON
+# | AN=N
+# | endif
+# |
+# | B=$(A1) $(A2) ... $(AN)
+# |
+# | c_SOURCES=$(B)
+# | d_SOURCES=$(B)
+#
+# The generated c_OBJECTS and d_OBJECTS will share the same variable
+# definitions.
+#
+# This setup can be the case of a testsuite containing lots (>100) of
+# small C programs, all testing the same set of source files.
+sub _gen_varname ($@)
+{
+  my $base = shift;
+  my $key = _hash_values @_;
+
+  return ($_gen_varname{$base}{$key}, 0)
+    if exists $_gen_varname{$base}{$key};
+
+  my $num = 1 + ($_gen_varname_n{$base} || 0);
+  $_gen_varname_n{$base} = $num;
+  my $name = "${base}_${num}";
+  $_gen_varname{$base}{$key} = $name;
+
+  return ($name, 1);
+}
+
+=item C<$resvar = transform_variable_recursively ($var, $resvar, $base, $nodefine, $where, &fun_item, [%options])>
+
+=item C<$resvar = $var-E<gt>transform_variable_recursively ($resvar, $base, $nodefine, $where, &fun_item, [%options])>
+
+Traverse C<$var> recursively, and create a C<$resvar> variable in
+which each filename in C<$var> have been transformed using
+C<&fun_item>.  (C<$var> may be a variable name in the first syntax.
+It must be an C<Automake::Variable> otherwise.)
+
+Helper variables (corresponding to sub-variables of C<$var>) are
+created as needed, using C<$base> as prefix.
+
+Arguments are:
+  $var       source variable to traverse
+  $resvar    resulting variable to define
+  $base      prefix to use when naming subvariables of $resvar
+  $nodefine  if true, traverse $var but do not define any variable
+             (this assumes &fun_item has some useful side-effect)
+  $where     context into which variable definitions are done
+  &fun_item  a transformation function -- see the documentation
+             of &fun_item in Automake::Variable::traverse_recursively.
+
+This returns the string C<"\$($RESVAR)">.
+
+C<%options> is a list of options to pass to
+C<Variable::traverse_recursively> (see this method).
+
+=cut
+
+sub transform_variable_recursively ($$$$$&;%)
+{
+  my ($var, $resvar, $base, $nodefine, $where, $fun_item, %options) = @_;
+
+  $var = ref $var ? $var : rvar $var;
+
+  my $res = $var->traverse_recursively
+    ($fun_item,
+     # The code that defines the variable holding the result
+     # of the recursive transformation of a subvariable.
+     sub {
+       my ($subvar, $parent_cond, @allresults) = @_;
+       # If no definition is required, return anything: the result is
+       # not expected to be used, only the side effect of $fun_item
+       # should matter.
+       return 'report-me' if $nodefine;
+       # Cache $subvar, so that we reuse it if @allresults is the same.
+       my $key = _hash_varname $subvar;
+       $_gen_varname{$base}{$key} = $subvar->name;
+
+       # Find a name for the variable, unless this is the top-variable
+       # for which we want to use $resvar.
+       my ($varname, $generated) =
+	 ($var != $subvar) ? _gen_varname ($base, @allresults) : ($resvar, 1);
+
+       # Define the variable if we are not reusing a previously
+       # defined variable.  At the top-level, we can also avoid redefining
+       # the variable if it already contains the same values.
+       if ($generated
+	   && !($varname eq $var->name && $key eq _hash_values @allresults))
+	 {
+	   # If the new variable is the source variable, we assume
+	   # we are trying to override a user variable.  Delete
+	   # the old variable first.
+	   variable_delete ($varname) if $varname eq $var->name;
+	   # Define an empty variable in condition TRUE if there is no
+	   # result.
+	   @allresults = ([TRUE, '']) unless @allresults;
+	   # Define the rewritten variable in all conditions not
+	   # already covered by user definitions.
+	   foreach my $pair (@allresults)
+	     {
+	       my ($cond, @result) = @$pair;
+	       my $var = var $varname;
+	       my @conds = ($var
+			    ? $var->not_always_defined_in_cond ($cond)->conds
+			    : $cond);
+
+	       foreach (@conds)
+		 {
+		   define ($varname, VAR_AUTOMAKE, '', $_, "@result",
+			   '', $where, VAR_PRETTY);
+		 }
+	     }
+	 }
+       set_seen $varname;
+       return "\$($varname)";
+     },
+     %options);
+  return $res;
+}
+
+
+=back
+
+=head1 SEE ALSO
+
+L<Automake::VarDef>, L<Automake::Condition>,
+L<Automake::DisjConditions>, L<Automake::Location>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Version.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Version.pm
new file mode 100644
index 0000000..c2d9dd7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Version.pm
@@ -0,0 +1,159 @@
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Version;
+
+use 5.006;
+use strict;
+use Automake::ChannelDefs;
+
+=head1 NAME
+
+Automake::Version - version comparison
+
+=head1 SYNOPSIS
+
+  use Automake::Version;
+
+  print "Version $version is older than required version $required\n"
+    if Automake::Version::check ($version, $required);
+
+=head1 DESCRIPTION
+
+This module provides support for comparing versions string
+as they are used in Automake.
+
+A version is a string that looks like
+C<MAJOR.MINOR[.MICRO][ALPHA][-FORK]> where C<MAJOR>, C<MINOR>, and
+C<MICRO> are digits, C<ALPHA> is a character, and C<FORK> any
+alphanumeric word.
+
+Usually, C<ALPHA> is used to label alpha releases or intermediate
+snapshots, C<FORK> is used for git branches or patched releases, and
+C<MICRO> is used for bug fixes releases on the C<MAJOR.MINOR> branch.
+
+For the purpose of ordering, C<1.4> is the same as C<1.4.0>, but
+C<1.4g> is the same as C<1.4.99g>.  The C<FORK> identifier is ignored
+in the ordering, except when it looks like C<-pMINOR[ALPHA]>: some
+versions were labeled like C<1.4-p3a>, this is the same as an alpha
+release labeled C<1.4.3a>.  Yes, it's horrible, but Automake did not
+support two-dot versions in the past.
+
+=head2 FUNCTIONS
+
+=over 4
+
+=item C<split ($version)>
+
+Split the string C<$version> into the corresponding C<(MAJOR, MINOR,
+MICRO, ALPHA, FORK)> tuple.  For instance C<'1.4g'> would be split
+into C<(1, 4, 99, 'g', '')>.  Return C<()> on error.
+
+=cut
+
+sub split ($)
+{
+  my ($ver) = @_;
+
+  # Special case for versions like 1.4-p2a.
+  if ($ver =~ /^(\d+)\.(\d+)(?:-p(\d+)([a-z]+)?)$/)
+  {
+    return ($1, $2, $3, $4 || '', '');
+  }
+  # Common case.
+  elsif ($ver =~ /^(\d+)\.(\d+)(?:\.(\d+))?([a-z])?(?:-([A-Za-z0-9]+))?$/)
+  {
+    return ($1, $2, $3 || (defined $4 ? 99 : 0), $4 || '', $5 || '');
+  }
+  return ();
+}
+
+=item C<compare (\@LVERSION, \@RVERSION)>
+
+Compare two version tuples, as returned by C<split>.
+
+Return 1, 0, or -1, if C<LVERSION> is found to be respectively
+greater than, equal to, or less than C<RVERSION>.
+
+=cut
+
+sub compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+
+  for my $i (0, 1, 2)
+  {
+    return 1  if ($l[$i] > $r[$i]);
+    return -1 if ($l[$i] < $r[$i]);
+  }
+  for my $i (3, 4)
+  {
+    return 1  if ($l[$i] gt $r[$i]);
+    return -1 if ($l[$i] lt $r[$i]);
+  }
+  return 0;
+}
+
+=item C<check($VERSION, $REQUIRED)>
+
+Handles the logic of requiring a version number in Automake.
+C<$VERSION> should be Automake's version, while C<$REQUIRED>
+is the version required by the user input.
+
+Return 0 if the required version is satisfied, 1 otherwise.
+
+=cut
+
+sub check ($$)
+{
+  my ($version, $required) = @_;
+  my @version = Automake::Version::split ($version);
+  my @required = Automake::Version::split ($required);
+
+  prog_error "version is incorrect: $version"
+    if $#version == -1;
+
+  # This should not happen, because process_option_list and split_version
+  # use similar regexes.
+  prog_error "required version is incorrect: $required"
+    if $#required == -1;
+
+  # If we require 3.4n-foo then we require something
+  # >= 3.4n, with the 'foo' fork identifier.
+  return 1
+    if ($required[4] ne '' && $required[4] ne $version[4]);
+
+  return 0 > compare (@version, @required);
+}
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Wrap.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Wrap.pm
new file mode 100644
index 0000000..0aa42e1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/Wrap.pm
@@ -0,0 +1,166 @@
+# Copyright (C) 2003-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+package Automake::Wrap;
+
+use 5.006;
+use strict;
+
+require Exporter;
+use vars '@ISA', '@EXPORT_OK';
+@ISA = qw/Exporter/;
+@EXPORT_OK = qw/wrap makefile_wrap/;
+
+=head1 NAME
+
+Automake::Wrap - a paragraph formatter
+
+=head1 SYNOPSIS
+
+  use Automake::Wrap 'wrap', 'makefile_wrap';
+
+  print wrap ($first_ident, $next_ident, $end_of_line, $max_length,
+              @values);
+
+  print makefile_wrap ("VARIABLE = ", "    ", @values);
+
+=head1 DESCRIPTION
+
+This modules provide facility to format list of strings.  It is
+comparable to Perl's L<Text::Wrap>, however we can't use L<Text::Wrap>
+because some versions will abort when some word to print exceeds the
+maximum length allowed.  (Ticket #17141, fixed in Perl 5.8.0.)
+
+=head2 Functions
+
+=over 4
+
+=cut
+
+# _tab_length ($TXT)
+# ------------------
+# Compute the length of TXT, counting tab characters as 8 characters.
+sub _tab_length($)
+{
+  my ($txt) = @_;
+  my $len = length ($txt);
+  $len += 7 * ($txt =~ tr/\t/\t/);
+  return $len;
+}
+
+=item C<wrap ($head, $fill, $eol, $max_len, @values)>
+
+Format C<@values> as a block of text that starts with C<$head>,
+followed by the strings in C<@values> separated by spaces or by
+C<"$eol\n$fill"> so that the length of each line never exceeds
+C<$max_len>.
+
+The C<$max_len> constraint is ignored for C<@values> items which
+are too big to fit alone one a line.
+
+The constructed paragraph is C<"\n">-terminated.
+
+=cut
+
+sub wrap($$$$@)
+{
+  my ($head, $fill, $eol, $max_len, @values) = @_;
+
+  my $result = $head;
+  my $column = _tab_length ($head);
+
+  my $fill_len = _tab_length ($fill);
+  my $eol_len = _tab_length ($eol);
+
+  my $not_first_word = 0;
+
+  foreach (@values)
+    {
+      my $len = _tab_length ($_);
+
+      # See if the new variable fits on this line.
+      # (The + 1 is for the space we add in front of the value.).
+      if ($column + $len + $eol_len + 1 > $max_len
+	  # Do not break before the first word if it does not fit on
+	  # the next line anyway.
+	  && ($not_first_word || $fill_len + $len + $eol_len + 1 <= $max_len))
+	{
+	  # Start a new line.
+	  $result .= "$eol\n" . $fill;
+	  $column = $fill_len;
+	}
+      elsif ($not_first_word)
+	{
+	  # Add a space only if result does not already end
+	  # with a space.
+	  $_ = " $_" if $result =~ /\S\z/;
+	  ++$len;
+	}
+      $result .= $_;
+      $column += $len;
+      $not_first_word = 1;
+    }
+
+  $result .= "\n";
+  return $result;
+}
+
+
+=item C<makefile_wrap ($head, $fill, @values)>
+
+Format C<@values> in a way which is suitable for F<Makefile>s.
+This is comparable to C<wrap>, except C<$eol> is known to
+be C<" \\">, and the maximum length has been hardcoded to C<72>.
+
+A space is appended to C<$head> when this is not already
+the case.
+
+This can be used to format variable definitions or dependency lines.
+
+  makefile_wrap ('VARIABLE =', "\t", @values);
+  makefile_wrap ('rule:', "\t", @dependencies);
+
+=cut
+
+sub makefile_wrap ($$@)
+{
+  my ($head, $fill, @values) = @_;
+  if (@values)
+    {
+      $head .= ' ' if $head =~ /\S\z/;
+      return wrap $head, $fill, " \\", 72, @values;
+    }
+  return "$head\n";
+}
+
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/XFile.pm b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/XFile.pm
new file mode 100644
index 0000000..97307f9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/Automake/XFile.pm
@@ -0,0 +1,324 @@
+# Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Akim Demaille <akim@freefriends.org>.
+
+###############################################################
+# The main copy of this file is in Automake's git repository. #
+# Updates should be sent to automake-patches@gnu.org.         #
+###############################################################
+
+package Automake::XFile;
+
+=head1 NAME
+
+Automake::XFile - supply object methods for filehandles with error handling
+
+=head1 SYNOPSIS
+
+    use Automake::XFile;
+
+    $fh = new Automake::XFile;
+    $fh->open ("file", "<");
+    # No need to check $FH: we died if open failed.
+    print <$fh>;
+    $fh->close;
+    # No need to check the return value of close: we died if it failed.
+
+    $fh = new Automake::XFile "file", ">";
+    # No need to check $FH: we died if new failed.
+    print $fh "bar\n";
+    $fh->close;
+
+    $fh = new Automake::XFile "file", "r";
+    # No need to check $FH: we died if new failed.
+    defined $fh
+    print <$fh>;
+    undef $fh;   # automatically closes the file and checks for errors.
+
+    $fh = new Automake::XFile "file", O_WRONLY | O_APPEND;
+    # No need to check $FH: we died if new failed.
+    print $fh "corge\n";
+
+    $pos = $fh->getpos;
+    $fh->setpos ($pos);
+
+    undef $fh;   # automatically closes the file and checks for errors.
+
+    autoflush STDOUT 1;
+
+=head1 DESCRIPTION
+
+C<Automake::XFile> inherits from C<IO::File>.  It provides the method
+C<name> returning the file name.  It provides dying versions of the
+methods C<close>, C<lock> (corresponding to C<flock>), C<new>,
+C<open>, C<seek>, and C<truncate>.  It also overrides the C<getline>
+and C<getlines> methods to translate C<\r\n> to C<\n>.
+
+=cut
+
+use 5.006;
+use strict;
+use vars qw($VERSION @EXPORT @EXPORT_OK $AUTOLOAD @ISA);
+use Carp;
+use Errno;
+use IO::File;
+use File::Basename;
+use Automake::ChannelDefs;
+use Automake::Channels qw(msg);
+use Automake::FileUtils;
+
+require Exporter;
+require DynaLoader;
+
+@ISA = qw(IO::File Exporter DynaLoader);
+
+$VERSION = "1.2";
+
+@EXPORT = @IO::File::EXPORT;
+
+eval {
+  # Make all Fcntl O_XXX and LOCK_XXX constants available for importing
+  require Fcntl;
+  my @O = grep /^(LOCK|O)_/, @Fcntl::EXPORT, @Fcntl::EXPORT_OK;
+  Fcntl->import (@O);  # first we import what we want to export
+  push (@EXPORT, @O);
+};
+
+=head2 Methods
+
+=over
+
+=item C<$fh = new Automake::XFile ([$expr, ...]>
+
+Constructor a new XFile object.  Additional arguments
+are passed to C<open>, if any.
+
+=cut
+
+sub new
+{
+  my $type = shift;
+  my $class = ref $type || $type || "Automake::XFile";
+  my $fh = $class->SUPER::new ();
+  if (@_)
+    {
+      $fh->open (@_);
+    }
+  $fh;
+}
+
+=item C<$fh-E<gt>open ([$file, ...])>
+
+Open a file, passing C<$file> and further arguments to C<IO::File::open>.
+Die if opening fails.  Store the name of the file.  Use binmode for writing.
+
+=cut
+
+sub open
+{
+  my $fh = shift;
+  my ($file, $mode) = @_;
+
+  # WARNING: Gross hack: $FH is a typeglob: use its hash slot to store
+  # the 'name' of the file we are opening.  See the example with
+  # io_socket_timeout in IO::Socket for more, and read Graham's
+  # comment in IO::Handle.
+  ${*$fh}{'autom4te_xfile_file'} = "$file";
+
+  if (!$fh->SUPER::open (@_))
+    {
+      fatal "cannot open $file: $!";
+    }
+
+  # In case we're running under MSWindows, don't write with CRLF.
+  # (This circumvents a bug in at least Cygwin bash where the shell
+  # parsing fails on lines ending with the continuation character '\'
+  # and CRLF).
+  # Correctly recognize usages like:
+  #  - open ($file, "w")
+  #  - open ($file, "+<")
+  #  - open (" >$file")
+  binmode $fh
+    if (defined $mode && $mode =~ /^[+>wa]/ or $file =~ /^\s*>/);
+}
+
+=item C<$fh-E<gt>close>
+
+Close the file, handling errors.
+
+=cut
+
+sub close
+{
+  my $fh = shift;
+  if (!$fh->SUPER::close (@_))
+    {
+      my $file = $fh->name;
+      Automake::FileUtils::handle_exec_errors $file
+	unless $!;
+      fatal "cannot close $file: $!";
+    }
+}
+
+=item C<$line = $fh-E<gt>getline>
+
+Read and return a line from the file.  Ensure C<\r\n> is translated to
+C<\n> on input files.
+
+=cut
+
+# Some native Windows/perl installations fail to translate \r\n to \n on
+# input so we do that here.
+sub getline
+{
+  local $_ = $_[0]->SUPER::getline;
+  # Perform a _global_ replacement: $_ may can contains many lines
+  # in slurp mode ($/ = undef).
+  s/\015\012/\n/gs if defined $_;
+  return $_;
+}
+
+=item C<@lines = $fh-E<gt>getlines>
+
+Slurp lines from the files.
+
+=cut
+
+sub getlines
+{
+  my @res = ();
+  my $line;
+  push @res, $line while $line = $_[0]->getline;
+  return @res;
+}
+
+=item C<$name = $fh-E<gt>name>
+
+Return the name of the file.
+
+=cut
+
+sub name
+{
+  my $fh = shift;
+  return ${*$fh}{'autom4te_xfile_file'};
+}
+
+=item C<$fh-E<gt>lock>
+
+Lock the file using C<flock>.  If locking fails for reasons other than
+C<flock> being unsupported, then error out if C<$ENV{'MAKEFLAGS'}> indicates
+that we are spawned from a parallel C<make>.
+
+=cut
+
+sub lock
+{
+  my ($fh, $mode) = @_;
+  # Cannot use @_ here.
+
+  # Unless explicitly configured otherwise, Perl implements its 'flock' with the
+  # first of flock(2), fcntl(2), or lockf(3) that works.  These can fail on
+  # NFS-backed files, with ENOLCK (GNU/Linux) or EOPNOTSUPP (FreeBSD); we
+  # usually ignore these errors.  If $ENV{MAKEFLAGS} suggests that a parallel
+  # invocation of 'make' has invoked the tool we serve, report all locking
+  # failures and abort.
+  #
+  # On Unicos, flock(2) and fcntl(2) over NFS hang indefinitely when 'lockd' is
+  # not running.  NetBSD NFS clients silently grant all locks.  We do not
+  # attempt to defend against these dangers.
+  #
+  # -j is for parallel BSD make, -P is for parallel HP-UX make.
+  if (!flock ($fh, $mode))
+    {
+      my $make_j = (exists $ENV{'MAKEFLAGS'}
+		    && " -$ENV{'MAKEFLAGS'}" =~ / (-[BdeikrRsSw]*[jP]|--[jP]|---?jobs)/);
+      my $note = "\nforgo \"make -j\" or use a file system that supports locks";
+      my $file = $fh->name;
+
+      msg ($make_j ? 'fatal' : 'unsupported',
+	   "cannot lock $file with mode $mode: $!" . ($make_j ? $note : ""))
+	if $make_j || !($!{ENOLCK} || $!{EOPNOTSUPP});
+    }
+}
+
+=item C<$fh-E<gt>seek ($position, [$whence])>
+
+Seek file to C<$position>.  Die if seeking fails.
+
+=cut
+
+sub seek
+{
+  my $fh = shift;
+  # Cannot use @_ here.
+  if (!seek ($fh, $_[0], $_[1]))
+    {
+      my $file = $fh->name;
+      fatal "cannot rewind $file with @_: $!";
+    }
+}
+
+=item C<$fh-E<gt>truncate ($len)>
+
+Truncate the file to length C<$len>.  Die on failure.
+
+=cut
+
+sub truncate
+{
+  my ($fh, $len) = @_;
+  if (!truncate ($fh, $len))
+    {
+      my $file = $fh->name;
+      fatal "cannot truncate $file at $len: $!";
+    }
+}
+
+=back
+
+=head1 SEE ALSO
+
+L<perlfunc>,
+L<perlop/"I/O Operators">,
+L<IO::File>
+L<IO::Handle>
+L<IO::Seekable>
+
+=head1 HISTORY
+
+Derived from IO::File.pm by Akim Demaille E<lt>F<akim@freefriends.org>E<gt>.
+
+=cut
+
+1;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/COPYING b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/COPYING
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/INSTALL b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/INSTALL
new file mode 100644
index 0000000..2099840
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/INSTALL
@@ -0,0 +1,370 @@
+Installation Instructions
+*************************
+
+Copyright (C) 1994-1996, 1999-2002, 2004-2013 Free Software Foundation,
+Inc.
+
+   Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.  This file is offered as-is,
+without warranty of any kind.
+
+Basic Installation
+==================
+
+   Briefly, the shell command `./configure && make && make install'
+should configure, build, and install this package.  The following
+more-detailed instructions are generic; see the `README' file for
+instructions specific to this package.  Some packages provide this
+`INSTALL' file but do not implement all of the features documented
+below.  The lack of an optional feature in a given package is not
+necessarily a bug.  More recommendations for GNU packages can be found
+in *note Makefile Conventions: (standards)Makefile Conventions.
+
+   The `configure' shell script attempts to guess correct values for
+various system-dependent variables used during compilation.  It uses
+those values to create a `Makefile' in each directory of the package.
+It may also create one or more `.h' files containing system-dependent
+definitions.  Finally, it creates a shell script `config.status' that
+you can run in the future to recreate the current configuration, and a
+file `config.log' containing compiler output (useful mainly for
+debugging `configure').
+
+   It can also use an optional file (typically called `config.cache'
+and enabled with `--cache-file=config.cache' or simply `-C') that saves
+the results of its tests to speed up reconfiguring.  Caching is
+disabled by default to prevent problems with accidental use of stale
+cache files.
+
+   If you need to do unusual things to compile the package, please try
+to figure out how `configure' could check whether to do them, and mail
+diffs or instructions to the address given in the `README' so they can
+be considered for the next release.  If you are using the cache, and at
+some point `config.cache' contains results you don't want to keep, you
+may remove or edit it.
+
+   The file `configure.ac' (or `configure.in') is used to create
+`configure' by a program called `autoconf'.  You need `configure.ac' if
+you want to change it or regenerate `configure' using a newer version
+of `autoconf'.
+
+   The simplest way to compile this package is:
+
+  1. `cd' to the directory containing the package's source code and type
+     `./configure' to configure the package for your system.
+
+     Running `configure' might take a while.  While running, it prints
+     some messages telling which features it is checking for.
+
+  2. Type `make' to compile the package.
+
+  3. Optionally, type `make check' to run any self-tests that come with
+     the package, generally using the just-built uninstalled binaries.
+
+  4. Type `make install' to install the programs and any data files and
+     documentation.  When installing into a prefix owned by root, it is
+     recommended that the package be configured and built as a regular
+     user, and only the `make install' phase executed with root
+     privileges.
+
+  5. Optionally, type `make installcheck' to repeat any self-tests, but
+     this time using the binaries in their final installed location.
+     This target does not install anything.  Running this target as a
+     regular user, particularly if the prior `make install' required
+     root privileges, verifies that the installation completed
+     correctly.
+
+  6. You can remove the program binaries and object files from the
+     source code directory by typing `make clean'.  To also remove the
+     files that `configure' created (so you can compile the package for
+     a different kind of computer), type `make distclean'.  There is
+     also a `make maintainer-clean' target, but that is intended mainly
+     for the package's developers.  If you use it, you may have to get
+     all sorts of other programs in order to regenerate files that came
+     with the distribution.
+
+  7. Often, you can also type `make uninstall' to remove the installed
+     files again.  In practice, not all packages have tested that
+     uninstallation works correctly, even though it is required by the
+     GNU Coding Standards.
+
+  8. Some packages, particularly those that use Automake, provide `make
+     distcheck', which can by used by developers to test that all other
+     targets like `make install' and `make uninstall' work correctly.
+     This target is generally not run by end users.
+
+Compilers and Options
+=====================
+
+   Some systems require unusual options for compilation or linking that
+the `configure' script does not know about.  Run `./configure --help'
+for details on some of the pertinent environment variables.
+
+   You can give `configure' initial values for configuration parameters
+by setting variables in the command line or in the environment.  Here
+is an example:
+
+     ./configure CC=c99 CFLAGS=-g LIBS=-lposix
+
+   *Note Defining Variables::, for more details.
+
+Compiling For Multiple Architectures
+====================================
+
+   You can compile the package for more than one kind of computer at the
+same time, by placing the object files for each architecture in their
+own directory.  To do this, you can use GNU `make'.  `cd' to the
+directory where you want the object files and executables to go and run
+the `configure' script.  `configure' automatically checks for the
+source code in the directory that `configure' is in and in `..'.  This
+is known as a "VPATH" build.
+
+   With a non-GNU `make', it is safer to compile the package for one
+architecture at a time in the source code directory.  After you have
+installed the package for one architecture, use `make distclean' before
+reconfiguring for another architecture.
+
+   On MacOS X 10.5 and later systems, you can create libraries and
+executables that work on multiple system types--known as "fat" or
+"universal" binaries--by specifying multiple `-arch' options to the
+compiler but only a single `-arch' option to the preprocessor.  Like
+this:
+
+     ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CPP="gcc -E" CXXCPP="g++ -E"
+
+   This is not guaranteed to produce working output in all cases, you
+may have to build one architecture at a time and combine the results
+using the `lipo' tool if you have problems.
+
+Installation Names
+==================
+
+   By default, `make install' installs the package's commands under
+`/usr/local/bin', include files under `/usr/local/include', etc.  You
+can specify an installation prefix other than `/usr/local' by giving
+`configure' the option `--prefix=PREFIX', where PREFIX must be an
+absolute file name.
+
+   You can specify separate installation prefixes for
+architecture-specific files and architecture-independent files.  If you
+pass the option `--exec-prefix=PREFIX' to `configure', the package uses
+PREFIX as the prefix for installing programs and libraries.
+Documentation and other data files still use the regular prefix.
+
+   In addition, if you use an unusual directory layout you can give
+options like `--bindir=DIR' to specify different values for particular
+kinds of files.  Run `configure --help' for a list of the directories
+you can set and what kinds of files go in them.  In general, the
+default for these options is expressed in terms of `${prefix}', so that
+specifying just `--prefix' will affect all of the other directory
+specifications that were not explicitly provided.
+
+   The most portable way to affect installation locations is to pass the
+correct locations to `configure'; however, many packages provide one or
+both of the following shortcuts of passing variable assignments to the
+`make install' command line to change installation locations without
+having to reconfigure or recompile.
+
+   The first method involves providing an override variable for each
+affected directory.  For example, `make install
+prefix=/alternate/directory' will choose an alternate location for all
+directory configuration variables that were expressed in terms of
+`${prefix}'.  Any directories that were specified during `configure',
+but not in terms of `${prefix}', must each be overridden at install
+time for the entire installation to be relocated.  The approach of
+makefile variable overrides for each directory variable is required by
+the GNU Coding Standards, and ideally causes no recompilation.
+However, some platforms have known limitations with the semantics of
+shared libraries that end up requiring recompilation when using this
+method, particularly noticeable in packages that use GNU Libtool.
+
+   The second method involves providing the `DESTDIR' variable.  For
+example, `make install DESTDIR=/alternate/directory' will prepend
+`/alternate/directory' before all installation names.  The approach of
+`DESTDIR' overrides is not required by the GNU Coding Standards, and
+does not work on platforms that have drive letters.  On the other hand,
+it does better at avoiding recompilation issues, and works well even
+when some directory options were not specified in terms of `${prefix}'
+at `configure' time.
+
+Optional Features
+=================
+
+   If the package supports it, you can cause programs to be installed
+with an extra prefix or suffix on their names by giving `configure' the
+option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
+
+   Some packages pay attention to `--enable-FEATURE' options to
+`configure', where FEATURE indicates an optional part of the package.
+They may also pay attention to `--with-PACKAGE' options, where PACKAGE
+is something like `gnu-as' or `x' (for the X Window System).  The
+`README' should mention any `--enable-' and `--with-' options that the
+package recognizes.
+
+   For packages that use the X Window System, `configure' can usually
+find the X include and library files automatically, but if it doesn't,
+you can use the `configure' options `--x-includes=DIR' and
+`--x-libraries=DIR' to specify their locations.
+
+   Some packages offer the ability to configure how verbose the
+execution of `make' will be.  For these packages, running `./configure
+--enable-silent-rules' sets the default to minimal output, which can be
+overridden with `make V=1'; while running `./configure
+--disable-silent-rules' sets the default to verbose, which can be
+overridden with `make V=0'.
+
+Particular systems
+==================
+
+   On HP-UX, the default C compiler is not ANSI C compatible.  If GNU
+CC is not installed, it is recommended to use the following options in
+order to use an ANSI C compiler:
+
+     ./configure CC="cc -Ae -D_XOPEN_SOURCE=500"
+
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
+
+   HP-UX `make' updates targets which have the same time stamps as
+their prerequisites, which makes it generally unusable when shipped
+generated files such as `configure' are involved.  Use GNU `make'
+instead.
+
+   On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
+parse its `<wchar.h>' header file.  The option `-nodtk' can be used as
+a workaround.  If GNU CC is not installed, it is therefore recommended
+to try
+
+     ./configure CC="cc"
+
+and if that doesn't work, try
+
+     ./configure CC="cc -nodtk"
+
+   On Solaris, don't put `/usr/ucb' early in your `PATH'.  This
+directory contains several dysfunctional programs; working variants of
+these programs are available in `/usr/bin'.  So, if you need `/usr/ucb'
+in your `PATH', put it _after_ `/usr/bin'.
+
+   On Haiku, software installed for all users goes in `/boot/common',
+not `/usr/local'.  It is recommended to use the following options:
+
+     ./configure --prefix=/boot/common
+
+Specifying the System Type
+==========================
+
+   There may be some features `configure' cannot figure out
+automatically, but needs to determine by the type of machine the package
+will run on.  Usually, assuming the package is built to be run on the
+_same_ architectures, `configure' can figure that out, but if it prints
+a message saying it cannot guess the machine type, give it the
+`--build=TYPE' option.  TYPE can either be a short name for the system
+type, such as `sun4', or a canonical name which has the form:
+
+     CPU-COMPANY-SYSTEM
+
+where SYSTEM can have one of these forms:
+
+     OS
+     KERNEL-OS
+
+   See the file `config.sub' for the possible values of each field.  If
+`config.sub' isn't included in this package, then this package doesn't
+need to know the machine type.
+
+   If you are _building_ compiler tools for cross-compiling, you should
+use the option `--target=TYPE' to select the type of system they will
+produce code for.
+
+   If you want to _use_ a cross compiler, that generates code for a
+platform different from the build platform, you should specify the
+"host" platform (i.e., that on which the generated programs will
+eventually be run) with `--host=TYPE'.
+
+Sharing Defaults
+================
+
+   If you want to set default values for `configure' scripts to share,
+you can create a site shell script called `config.site' that gives
+default values for variables like `CC', `cache_file', and `prefix'.
+`configure' looks for `PREFIX/share/config.site' if it exists, then
+`PREFIX/etc/config.site' if it exists.  Or, you can set the
+`CONFIG_SITE' environment variable to the location of the site script.
+A warning: not all `configure' scripts look for a site script.
+
+Defining Variables
+==================
+
+   Variables not defined in a site shell script can be set in the
+environment passed to `configure'.  However, some packages may run
+configure again during the build, and the customized values of these
+variables may be lost.  In order to avoid this problem, you should set
+them in the `configure' command line, using `VAR=value'.  For example:
+
+     ./configure CC=/usr/local2/bin/gcc
+
+causes the specified `gcc' to be used as the C compiler (unless it is
+overridden in the site shell script).
+
+Unfortunately, this technique does not work for `CONFIG_SHELL' due to
+an Autoconf limitation.  Until the limitation is lifted, you can use
+this workaround:
+
+     CONFIG_SHELL=/bin/bash ./configure CONFIG_SHELL=/bin/bash
+
+`configure' Invocation
+======================
+
+   `configure' recognizes the following options to control how it
+operates.
+
+`--help'
+`-h'
+     Print a summary of all of the options to `configure', and exit.
+
+`--help=short'
+`--help=recursive'
+     Print a summary of the options unique to this package's
+     `configure', and exit.  The `short' variant lists options used
+     only in the top level, while the `recursive' variant lists options
+     also present in any nested packages.
+
+`--version'
+`-V'
+     Print the version of Autoconf used to generate the `configure'
+     script, and exit.
+
+`--cache-file=FILE'
+     Enable the cache: use and save the results of the tests in FILE,
+     traditionally `config.cache'.  FILE defaults to `/dev/null' to
+     disable caching.
+
+`--config-cache'
+`-C'
+     Alias for `--cache-file=config.cache'.
+
+`--quiet'
+`--silent'
+`-q'
+     Do not print messages saying which checks are being made.  To
+     suppress all normal output, redirect it to `/dev/null' (any error
+     messages will still be shown).
+
+`--srcdir=DIR'
+     Look for the package's source code in directory DIR.  Usually
+     `configure' can determine that directory automatically.
+
+`--prefix=DIR'
+     Use DIR as the installation prefix.  *note Installation Names::
+     for more details, including other options available for fine-tuning
+     the installation locations.
+
+`--no-create'
+`-n'
+     Run the configure checks, but stop before creating any output
+     files.
+
+`configure' also accepts some other, not widely useful, options.  Run
+`configure --help' for more details.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check.am
new file mode 100644
index 0000000..7012d5a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check.am
@@ -0,0 +1,571 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+am__tty_colors_dummy = \
+  mgn= red= grn= lgn= blu= brg= std=; \
+  am__color_tests=no
+
+am__tty_colors = { \
+  $(am__tty_colors_dummy); \
+  if test "X$(AM_COLOR_TESTS)" = Xno; then \
+    am__color_tests=no; \
+  elif test "X$(AM_COLOR_TESTS)" = Xalways; then \
+    am__color_tests=yes; \
+## If stdout is a non-dumb tty, use colors.  If test -t is not supported,
+## then this check fails; a conservative approach.  Of course do not
+## redirect stdout here, just stderr.
+  elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \
+    am__color_tests=yes; \
+  fi; \
+  if test $$am__color_tests = yes; then \
+    red=''; \
+    grn=''; \
+    lgn=''; \
+    blu=''; \
+    mgn=''; \
+    brg=''; \
+    std=''; \
+  fi; \
+}
+
+.PHONY: check-TESTS
+
+if !%?SERIAL_TESTS%
+
+include inst-vars.am
+
+## New parallel test driver.
+##
+## The first version of the code here was adapted from check.mk, which was
+## originally written at EPITA/LRDE, further developed at Gostai, then made
+## its way from GNU coreutils to end up, largely rewritten, in Automake.
+## The current version is an heavy rewrite of that, to allow for support
+## of more test metadata, and the use of custom test drivers and protocols
+## (among them, TAP).
+
+am__recheck_rx = ^[ 	]*:recheck:[ 	]*
+am__global_test_result_rx = ^[ 	]*:global-test-result:[ 	]*
+am__copy_in_global_log_rx = ^[ 	]*:copy-in-global-log:[ 	]*
+
+# A command that, given a newline-separated list of test names on the
+# standard input, print the name of the tests that are to be re-run
+# upon "make recheck".
+am__list_recheck_tests = $(AWK) '{ \
+## By default, we assume the test is to be re-run.
+  recheck = 1; \
+  while ((rc = (getline line < ($$0 ".trs"))) != 0) \
+    { \
+      if (rc < 0) \
+        { \
+## If we've encountered an I/O error here, there are three possibilities:
+##
+##  [1] The '.log' file exists, but the '.trs' does not; in this case,
+##      we "gracefully" recover by assuming the corresponding test is
+##      to be re-run (which will re-create the missing '.trs' file).
+##
+##  [2] Both the '.log' and '.trs' files are missing; this means that
+##      the corresponding test has not been run, and is thus *not* to
+##      be re-run.
+##
+##  [3] We have encountered some corner-case problem (e.g., a '.log' or
+##      '.trs' files somehow made unreadable, or issues with a bad NFS
+##      connection, or whatever); we don't handle such corner cases.
+##
+          if ((getline line2 < ($$0 ".log")) < 0) \
+	    recheck = 0; \
+          break; \
+        } \
+      else if (line ~ /$(am__recheck_rx)[nN][Oo]/) \
+## A directive explicitly specifying the test is *not* to be re-run.
+        { \
+          recheck = 0; \
+          break; \
+        } \
+      else if (line ~ /$(am__recheck_rx)[yY][eE][sS]/) \
+        { \
+## A directive explicitly specifying the test *is* to be re-run.
+          break; \
+        } \
+## else continue with the next iteration.
+    }; \
+  if (recheck) \
+    print $$0; \
+## Don't leak open file descriptors, as this could cause serious
+## problems when there are many tests (yes, even on Linux).
+  close ($$0 ".trs"); \
+  close ($$0 ".log"); \
+}'
+
+# A command that, given a newline-separated list of test names on the
+# standard input, create the global log from their .trs and .log files.
+am__create_global_log = $(AWK) ' \
+function fatal(msg) \
+{ \
+  print "fatal: making $@: " msg | "cat >&2"; \
+  exit 1; \
+} \
+function rst_section(header) \
+{ \
+  print header; \
+  len = length(header); \
+  for (i = 1; i <= len; i = i + 1) \
+    printf "="; \
+  printf "\n\n"; \
+} \
+{ \
+## By default, we assume the test log is to be copied in the global log,
+## and that its result is simply "RUN" (i.e., we still don't know what
+## it outcome was, but we know that at least it has run).
+  copy_in_global_log = 1; \
+  global_test_result = "RUN"; \
+  while ((rc = (getline line < ($$0 ".trs"))) != 0) \
+    { \
+      if (rc < 0) \
+         fatal("failed to read from " $$0 ".trs"); \
+      if (line ~ /$(am__global_test_result_rx)/) \
+        { \
+          sub("$(am__global_test_result_rx)", "", line); \
+          sub("[ 	]*$$", "", line); \
+          global_test_result = line; \
+        } \
+      else if (line ~ /$(am__copy_in_global_log_rx)[nN][oO]/) \
+        copy_in_global_log = 0; \
+    }; \
+  if (copy_in_global_log) \
+    { \
+      rst_section(global_test_result ": " $$0); \
+      while ((rc = (getline line < ($$0 ".log"))) != 0) \
+      { \
+        if (rc < 0) \
+          fatal("failed to read from " $$0 ".log"); \
+        print line; \
+      }; \
+      printf "\n"; \
+    }; \
+## Don't leak open file descriptors, as this could cause serious
+## problems when there are many tests (yes, even on Linux).
+  close ($$0 ".trs"); \
+  close ($$0 ".log"); \
+}'
+
+# Restructured Text title.
+am__rst_title = { sed 's/.*/   &   /;h;s/./=/g;p;x;s/ *$$//;p;g' && echo; }
+
+# Solaris 10 'make', and several other traditional 'make' implementations,
+# pass "-e" to $(SHELL), and POSIX 2008 even requires this.  Work around it
+# by disabling -e (using the XSI extension "set +e") if it's set.
+am__sh_e_setup = case $$- in *e*) set +e;; esac
+
+# Default flags passed to test drivers.
+am__common_driver_flags = \
+  --color-tests "$$am__color_tests" \
+  --enable-hard-errors "$$am__enable_hard_errors" \
+  --expect-failure "$$am__expect_failure"
+
+# To be inserted before the command running the test.  Creates the
+# directory for the log if needed.  Stores in $dir the directory
+# containing $f, in $tst the test, in $log the log.  Executes the
+# developer- defined test setup AM_TESTS_ENVIRONMENT (if any), and
+# passes TESTS_ENVIRONMENT.  Set up options for the wrapper that
+# will run the test scripts (or their associated LOG_COMPILER, if
+# thy have one).
+am__check_pre =						\
+$(am__sh_e_setup);					\
+$(am__vpath_adj_setup) $(am__vpath_adj)			\
+$(am__tty_colors);					\
+srcdir=$(srcdir); export srcdir;			\
+case "$@" in						\
+  */*) am__odir=`echo "./$@" | sed 's|/[^/]*$$||'`;;	\
+    *) am__odir=.;; 					\
+esac;							\
+test "x$$am__odir" = x"." || test -d "$$am__odir" 	\
+  || $(MKDIR_P) "$$am__odir" || exit $$?;		\
+if test -f "./$$f"; then dir=./;			\
+elif test -f "$$f"; then dir=;				\
+else dir="$(srcdir)/"; fi;				\
+tst=$$dir$$f; log='$@'; 				\
+if test -n '$(DISABLE_HARD_ERRORS)'; then		\
+  am__enable_hard_errors=no; 				\
+else							\
+  am__enable_hard_errors=yes; 				\
+fi; 							\
+## The use of $dir below is required to account for VPATH
+## rewriting done by Sun make.
+case " $(XFAIL_TESTS) " in				\
+  *[\ \	]$$f[\ \	]* | *[\ \	]$$dir$$f[\ \	]*) \
+    am__expect_failure=yes;;				\
+  *)							\
+    am__expect_failure=no;;				\
+esac; 							\
+$(AM_TESTS_ENVIRONMENT) $(TESTS_ENVIRONMENT)
+
+# A shell command to get the names of the tests scripts with any registered
+# extension removed (i.e., equivalently, the names of the test logs, with
+# the '.log' extension removed).  The result is saved in the shell variable
+# '$bases'.  This honors runtime overriding of TESTS and TEST_LOGS.  Sadly,
+# we cannot use something simpler, involving e.g., "$(TEST_LOGS:.log=)",
+# since that might cause problem with VPATH rewrites for suffix-less tests.
+# See also 'test-harness-vpath-rewrite.sh' and 'test-trs-basic.sh'.
+am__set_TESTS_bases = \
+  bases='$(TEST_LOGS)'; \
+  bases=`for i in $$bases; do echo $$i; done | sed 's/\.log$$//'`; \
+## Trim away any extra whitespace.  This has already proved useful
+## in avoiding weird bug on lesser make implementations.  It also
+## works around the GNU make 3.80 bug where trailing whitespace in
+## "TESTS = foo.test $(empty)" causes $(TESTS_LOGS)  to erroneously
+## expand to "foo.log .log".
+  bases=`echo $$bases`
+
+# Recover from deleted '.trs' file; this should ensure that
+# "rm -f foo.log; make foo.trs" re-run 'foo.test', and re-create
+# both 'foo.log' and 'foo.trs'.  Break the recipe in two subshells
+# to avoid problems with "make -n".
+.log.trs:
+	rm -f $< $@
+	$(MAKE) $(AM_MAKEFLAGS) $<
+
+# Leading 'am--fnord' is there to ensure the list of targets does not
+# expand to empty, as could happen e.g. with make check TESTS=''.
+am--fnord $(TEST_LOGS) $(TEST_LOGS:.log=.trs): $(am__force_recheck)
+am--force-recheck:
+	@:
+
+$(TEST_SUITE_LOG): $(TEST_LOGS)
+	@$(am__set_TESTS_bases); \
+## Helper shell function, tells whether a path refers to an existing,
+## regular, readable file.
+	am__f_ok () { test -f "$$1" && test -r "$$1"; }; \
+## We need to ensures that all the required '.trs' and '.log' files will
+## be present and readable.  The direct dependencies of $(TEST_SUITE_LOG)
+## only ensure that all the '.log' files exists; they don't ensure that
+## the '.log' files are readable, and worse, they don't ensure that the
+## '.trs' files even exist.
+	redo_bases=`for i in $$bases; do \
+	              am__f_ok $$i.trs && am__f_ok $$i.log || echo $$i; \
+	            done`; \
+	if test -n "$$redo_bases"; then \
+## Uh-oh, either some '.log' files were unreadable, or some '.trs' files
+## were missing (or unreadable).  We need to re-run the corresponding
+## tests in order to re-create them.
+	  redo_logs=`for i in $$redo_bases; do echo $$i.log; done`; \
+	  redo_results=`for i in $$redo_bases; do echo $$i.trs; done`; \
+	  if $(am__make_dryrun); then :; else \
+## Break "rm -f" into two calls to minimize the possibility of exceeding
+## command line length limits.
+	    rm -f $$redo_logs && rm -f $$redo_results || exit 1; \
+	  fi; \
+	fi; \
+## Use a trick to to ensure that we don't go into an infinite recursion
+## in case a test log in $(TEST_LOGS) is the same as $(TEST_SUITE_LOG).
+## Yes, this has already happened in practice.  Sigh!
+	if test -n "$$am__remaking_logs"; then \
+	  echo "fatal: making $(TEST_SUITE_LOG): possible infinite" \
+	       "recursion detected" >&2; \
+	else \
+	  am__remaking_logs=yes $(MAKE) $(AM_MAKEFLAGS) $$redo_logs; \
+	fi; \
+	if $(am__make_dryrun); then :; else \
+## Sanity check: each unreadable or non-existent test result file should
+## has been properly remade at this point, as should the corresponding log
+## file.
+	  st=0;  \
+	  errmsg="fatal: making $(TEST_SUITE_LOG): failed to create"; \
+	  for i in $$redo_bases; do \
+	    test -f $$i.trs && test -r $$i.trs \
+	      || { echo "$$errmsg $$i.trs" >&2; st=1; }; \
+	    test -f $$i.log && test -r $$i.log \
+	      || { echo "$$errmsg $$i.log" >&2; st=1; }; \
+	  done; \
+	  test $$st -eq 0 || exit 1; \
+	fi
+## We need a new subshell to work portably with "make -n", since the
+## previous part of the recipe contained a $(MAKE) invocation.
+	@$(am__sh_e_setup); $(am__tty_colors); $(am__set_TESTS_bases); \
+	ws='[ 	]'; \
+## List of test result files.
+	results=`for b in $$bases; do echo $$b.trs; done`; \
+	test -n "$$results" || results=/dev/null; \
+## Prepare data for the test suite summary.  These do not take into account
+## unreadable test results, but they'll be appropriately updated later if
+## needed.
+	all=`  grep "^$$ws*:test-result:"           $$results | wc -l`; \
+	pass=` grep "^$$ws*:test-result:$$ws*PASS"  $$results | wc -l`; \
+	fail=` grep "^$$ws*:test-result:$$ws*FAIL"  $$results | wc -l`; \
+	skip=` grep "^$$ws*:test-result:$$ws*SKIP"  $$results | wc -l`; \
+	xfail=`grep "^$$ws*:test-result:$$ws*XFAIL" $$results | wc -l`; \
+	xpass=`grep "^$$ws*:test-result:$$ws*XPASS" $$results | wc -l`; \
+	error=`grep "^$$ws*:test-result:$$ws*ERROR" $$results | wc -l`; \
+## Whether the testsuite was successful or not.
+	if test `expr $$fail + $$xpass + $$error` -eq 0; then \
+	  success=true; \
+	else \
+	  success=false; \
+	fi; \
+## Make $br a line of exactly 76 '=' characters, that will be used to
+## enclose the testsuite summary report when displayed on the console.
+	br='==================='; br=$$br$$br$$br$$br; \
+## When writing the test summary to the console, we want to color a line
+## reporting the count of some result *only* if at least one test
+## experienced such a result.  This function is handy in this regard.
+	result_count () \
+	{ \
+	    if test x"$$1" = x"--maybe-color"; then \
+	      maybe_colorize=yes; \
+	    elif test x"$$1" = x"--no-color"; then \
+	      maybe_colorize=no; \
+	    else \
+	      echo "$@: invalid 'result_count' usage" >&2; exit 4; \
+	    fi; \
+	    shift; \
+	    desc=$$1 count=$$2; \
+	    if test $$maybe_colorize = yes && test $$count -gt 0; then \
+	      color_start=$$3 color_end=$$std; \
+	    else \
+	      color_start= color_end=; \
+	    fi; \
+	    echo "$${color_start}# $$desc $$count$${color_end}"; \
+	}; \
+## A shell function that creates the testsuite summary.  We need it
+## because we have to create *two* summaries, one for test-suite.log,
+## and a possibly-colorized one for console output.
+	create_testsuite_report () \
+	{ \
+	  result_count $$1 "TOTAL:" $$all   "$$brg"; \
+	  result_count $$1 "PASS: " $$pass  "$$grn"; \
+	  result_count $$1 "SKIP: " $$skip  "$$blu"; \
+	  result_count $$1 "XFAIL:" $$xfail "$$lgn"; \
+	  result_count $$1 "FAIL: " $$fail  "$$red"; \
+	  result_count $$1 "XPASS:" $$xpass "$$red"; \
+	  result_count $$1 "ERROR:" $$error "$$mgn"; \
+	}; \
+## Write "global" testsuite log.
+	{								\
+	  echo "$(PACKAGE_STRING): $(subdir)/$(TEST_SUITE_LOG)" |	\
+	    $(am__rst_title);						\
+	  create_testsuite_report --no-color;				\
+	  echo;								\
+	  echo ".. contents:: :depth: 2";				\
+	  echo;								\
+	  for b in $$bases; do echo $$b; done				\
+	    | $(am__create_global_log);					\
+	} >$(TEST_SUITE_LOG).tmp || exit 1;				\
+	mv $(TEST_SUITE_LOG).tmp $(TEST_SUITE_LOG);			\
+## Emit the test summary on the console.
+	if $$success; then						\
+	  col="$$grn";							\
+	 else								\
+	  col="$$red";							\
+	  test x"$$VERBOSE" = x || cat $(TEST_SUITE_LOG);		\
+	fi;								\
+## Multi line coloring is problematic with "less -R", so we really need
+## to color each line individually.
+	echo "$${col}$$br$${std}"; 					\
+	echo "$${col}Testsuite summary for $(PACKAGE_STRING)$${std}";	\
+	echo "$${col}$$br$${std}"; 					\
+## This is expected to go to the console, so it might have to be colorized.
+	create_testsuite_report --maybe-color;				\
+	echo "$$col$$br$$std";						\
+	if $$success; then :; else					\
+	  echo "$${col}See $(subdir)/$(TEST_SUITE_LOG)$${std}";		\
+	  if test -n "$(PACKAGE_BUGREPORT)"; then			\
+	    echo "$${col}Please report to $(PACKAGE_BUGREPORT)$${std}";	\
+	  fi;								\
+	  echo "$$col$$br$$std";					\
+	fi;								\
+## Be sure to exit with the proper exit status.  The use of "exit 1" below
+## is required to work around a FreeBSD make bug (present only when running
+## in concurrent mode).  See automake bug#9245:
+##  <http://debbugs.gnu.org/cgi/bugreport.cgi?bug=9245>
+## and FreeBSD PR bin/159730:
+##  <http://www.freebsd.org/cgi/query-pr.cgi?pr=159730>.
+	$$success || exit 1
+
+RECHECK_LOGS = $(TEST_LOGS)
+
+## ------------------------------------------ ##
+## Running all tests, or rechecking failures. ##
+## ------------------------------------------ ##
+
+check-TESTS:
+	@list='$(RECHECK_LOGS)';           test -z "$$list" || rm -f $$list
+	@list='$(RECHECK_LOGS:.log=.trs)'; test -z "$$list" || rm -f $$list
+## We always have to remove $(TEST_SUITE_LOG), to ensure its rule is run
+## in any case even in lazy mode: otherwise, if no test needs rerunning,
+## or a prior run plus reruns all happen within the same timestamp (can
+## happen with a prior "make TESTS=<subset>"), then we get no log output.
+## OTOH, this means that, in the rule for '$(TEST_SUITE_LOG)', we
+## cannot use '$?' to compute the set of lazily rerun tests, lest
+## we rely on .PHONY to work portably.
+	@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
+	@set +e; $(am__set_TESTS_bases); \
+	log_list=`for i in $$bases; do echo $$i.log; done`; \
+	trs_list=`for i in $$bases; do echo $$i.trs; done`; \
+## Remove newlines and normalize whitespace.  Trailing (and possibly
+## leading) whitespace is known to cause segmentation faults on
+## Solaris 10 XPG4 make.
+	log_list=`echo $$log_list`; trs_list=`echo $$trs_list`; \
+	$(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) TEST_LOGS="$$log_list"; \
+## Be sure to exit with the proper exit status (automake bug#9245).  See
+## comments in the recipe of $(TEST_SUITE_LOG) above for more information.
+	exit $$?;
+
+## Recheck must depend on $(check_SCRIPTS), $(check_PROGRAMS), etc.
+## It must also depend on the 'all' target.  See automake bug#11252.
+recheck: all %CHECK_DEPS%
+## See comments above in the check-TESTS recipe for why remove
+## $(TEST_SUITE_LOG) here.
+	@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
+	@set +e; $(am__set_TESTS_bases); \
+## We must only consider tests that had an unexpected outcome (FAIL
+## or XPASS) in the earlier run.
+	bases=`for i in $$bases; do echo $$i; done \
+	         | $(am__list_recheck_tests)` || exit 1; \
+	log_list=`for i in $$bases; do echo $$i.log; done`; \
+## Remove newlines and normalize whitespace.  Trailing (and possibly
+## leading) whitespace is known to cause segmentation faults on
+## Solaris 10 XPG4 make.
+	log_list=`echo $$log_list`; \
+## Move the '.log' and '.trs' files associated with the tests to be
+## re-run out of the way, so that those tests will be re-run by the
+## "make test-suite.log" recursive invocation below.
+## Two tricky requirements:
+##   - we must avoid extra files removal when running under "make -n";
+##   - in case the test is a compiled program whose compilation fails,
+##     we must ensure that any '.log' and '.trs' file referring to such
+##     test are preserved, so that future "make recheck" invocations
+##     will still try to re-compile and re-run it (automake bug#11791).
+## The tricky recursive make invocation below should cater to such
+## requirements.
+	$(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) \
+	        am__force_recheck=am--force-recheck \
+	        TEST_LOGS="$$log_list"; \
+## Be sure to exit with the proper exit status (automake bug#9245).  See
+## comments in the recipe of $(TEST_SUITE_LOG) above for more information.
+	exit $$?
+
+AM_RECURSIVE_TARGETS += check recheck
+
+.PHONY: recheck
+
+else %?SERIAL_TESTS%
+
+## Obsolescent serial testsuite driver.
+
+check-TESTS: $(TESTS)
+	@failed=0; all=0; xfail=0; xpass=0; skip=0; \
+	srcdir=$(srcdir); export srcdir; \
+## Make sure Solaris VPATH-expands all members of this list, even
+## the first and the last one; thus the spaces around $(TESTS)
+	list=' $(TESTS) '; \
+	$(am__tty_colors); \
+	if test -n "$$list"; then \
+	  for tst in $$list; do \
+	    if test -f ./$$tst; then dir=./; \
+## Note: Solaris 2.7 seems to expand TESTS using VPATH.  That's
+## why we also try 'dir='.
+	    elif test -f $$tst; then dir=; \
+	    else dir="$(srcdir)/"; fi; \
+	    if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \
+## Success
+	      all=`expr $$all + 1`; \
+	      case " $(XFAIL_TESTS) " in \
+	      *[\ \	]$$tst[\ \	]*) \
+		xpass=`expr $$xpass + 1`; \
+		failed=`expr $$failed + 1`; \
+		col=$$red; res=XPASS; \
+	      ;; \
+	      *) \
+		col=$$grn; res=PASS; \
+	      ;; \
+	      esac; \
+	    elif test $$? -ne 77; then \
+## Failure
+	      all=`expr $$all + 1`; \
+	      case " $(XFAIL_TESTS) " in \
+	      *[\ \	]$$tst[\ \	]*) \
+		xfail=`expr $$xfail + 1`; \
+		col=$$lgn; res=XFAIL; \
+	      ;; \
+	      *) \
+		failed=`expr $$failed + 1`; \
+		col=$$red; res=FAIL; \
+	      ;; \
+	      esac; \
+	    else \
+## Skipped
+	      skip=`expr $$skip + 1`; \
+	      col=$$blu; res=SKIP; \
+	    fi; \
+	    echo "$${col}$$res$${std}: $$tst"; \
+	  done; \
+## Prepare the banner
+	  if test "$$all" -eq 1; then \
+	    tests="test"; \
+	    All=""; \
+	  else \
+	    tests="tests"; \
+	    All="All "; \
+	  fi; \
+	  if test "$$failed" -eq 0; then \
+	    if test "$$xfail" -eq 0; then \
+	      banner="$$All$$all $$tests passed"; \
+	    else \
+	      if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \
+	      banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \
+	    fi; \
+	  else \
+	    if test "$$xpass" -eq 0; then \
+	      banner="$$failed of $$all $$tests failed"; \
+	    else \
+	      if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \
+	      banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \
+	    fi; \
+	  fi; \
+## DASHES should contain the largest line of the banner.
+	  dashes="$$banner"; \
+	  skipped=""; \
+	  if test "$$skip" -ne 0; then \
+	    if test "$$skip" -eq 1; then \
+	      skipped="($$skip test was not run)"; \
+	    else \
+	      skipped="($$skip tests were not run)"; \
+	    fi; \
+	    test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \
+	      dashes="$$skipped"; \
+	  fi; \
+	  report=""; \
+	  if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \
+	    report="Please report to $(PACKAGE_BUGREPORT)"; \
+	    test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \
+	      dashes="$$report"; \
+	  fi; \
+	  dashes=`echo "$$dashes" | sed s/./=/g`; \
+	  if test "$$failed" -eq 0; then \
+	    col="$$grn"; \
+	  else \
+	    col="$$red"; \
+	  fi; \
+## Multi line coloring is problematic with "less -R", so we really need
+## to color each line individually.
+	  echo "$${col}$$dashes$${std}"; \
+	  echo "$${col}$$banner$${std}"; \
+	  test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \
+	  test -z "$$report" || echo "$${col}$$report$${std}"; \
+	  echo "$${col}$$dashes$${std}"; \
+	  test "$$failed" -eq 0; \
+	else :; fi
+
+endif %?SERIAL_TESTS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check2.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check2.am
new file mode 100644
index 0000000..79d2b5b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/check2.am
@@ -0,0 +1,60 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2008-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?FIRST%
+## When BSD make is run in parallel mode, it apparently strips any
+## leading directory component from the automatic variable '$*' (of
+## course, against what POSIX mandates).  Try to detect and work
+## around this incompatibility.
+am__set_b = \
+  case '$@' in \
+    */*) \
+      case '$*' in \
+        */*) b='$*';; \
+          *) b=`echo '$@' | sed 's/\.log$$//'`; \
+       esac;; \
+    *) \
+      b='$*';; \
+  esac
+endif %?FIRST%
+
+## From a test file to a .log and .trs file.
+?GENERIC?%EXT%.log:
+?!GENERIC?%OBJ%: %SOURCE%
+	@p='%SOURCE%'; \
+## Another hack to support BSD make in parallel mode.
+?!GENERIC?	b='%BASE%'; \
+?GENERIC?	$(am__set_b); \
+	$(am__check_pre) %DRIVER% --test-name "$$f" \
+	--log-file $$b.log --trs-file $$b.trs \
+	$(am__common_driver_flags) %DRIVER_FLAGS% -- %COMPILE% \
+	"$$tst" $(AM_TESTS_FD_REDIRECT)
+
+## If no programs are built in this package, then this rule is removed
+## at automake time.  Otherwise, %am__EXEEXT% expands to a configure time
+## conditional, true if $(EXEEXT) is nonempty, thus this rule does not
+## conflict with the previous one.
+if %am__EXEEXT%
+?GENERIC?%EXT%$(EXEEXT).log:
+	@p='%SOURCE%'; \
+	## Another hack to support BSD make in parallel mode.
+?!GENERIC?	b='%BASE%'; \
+?GENERIC?	$(am__set_b); \
+	$(am__check_pre) %DRIVER% --test-name "$$f" \
+	--log-file $$b.log --trs-file $$b.trs \
+	$(am__common_driver_flags) %DRIVER_FLAGS% -- %COMPILE% \
+	"$$tst" $(AM_TESTS_FD_REDIRECT)
+endif %am__EXEEXT%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean-hdr.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean-hdr.am
new file mode 100644
index 0000000..3c0d761
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean-hdr.am
@@ -0,0 +1,20 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+.PHONY: distclean-hdr
+distclean-am: distclean-hdr
+distclean-hdr:
+	-rm -f %FILES%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean.am
new file mode 100644
index 0000000..8c37566
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/clean.am
@@ -0,0 +1,62 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## We must test each macro because it might be empty, and an empty "rm
+## -rf" command looks disturbing.  Also, the Solaris 2.4 "rm" will
+## return an error if there are no arguments other than "-f".
+mostlyclean-am: mostlyclean-generic
+mostlyclean-generic:
+%MOSTLYCLEAN_RMS%
+
+clean-am: clean-generic mostlyclean-am
+clean-generic:
+%CLEAN_RMS%
+
+distclean-am: distclean-generic clean-am
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+%DISTCLEAN_RMS%
+
+## Makefiles and their dependencies cannot be cleaned by
+## an -am dependency, because that would prevent other distclean
+## dependencies from calling make recursively.  (The multilib
+## cleaning rules do this.)
+##
+## If you change distclean here, you probably also want to change
+## maintainer-clean below.
+distclean:
+	-rm -f %MAKEFILE%
+
+maintainer-clean-am: maintainer-clean-generic distclean-am
+maintainer-clean-generic:
+## FIXME: shouldn't we really print these messages before running
+## the dependencies?
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+%MAINTAINER_CLEAN_RMS%
+
+## See comment for distclean.
+maintainer-clean:
+	-rm -f %MAKEFILE%
+
+.PHONY: clean mostlyclean distclean maintainer-clean \
+clean-generic mostlyclean-generic distclean-generic maintainer-clean-generic
+
+?!SUBDIRS?clean: clean-am
+?!SUBDIRS?distclean: distclean-am
+?!SUBDIRS?mostlyclean: mostlyclean-am
+?!SUBDIRS?maintainer-clean: maintainer-clean-am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/compile.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/compile.am
new file mode 100644
index 0000000..3e3b171
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/compile.am
@@ -0,0 +1,29 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+DEFAULT_INCLUDES = %DEFAULT_INCLUDES%
+
+mostlyclean-am: mostlyclean-compile
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+?MOSTLYRMS?%MOSTLYRMS%
+
+distclean-am: distclean-compile
+distclean-compile:
+	-rm -f *.tab.c
+?DISTRMS?%DISTRMS%
+
+.PHONY: mostlyclean-compile distclean-compile
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/configure.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/configure.am
new file mode 100644
index 0000000..6f39f7d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/configure.am
@@ -0,0 +1,162 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+## This dummy rule is called from subdirectories whenever one of the
+## top-level Makefile's dependencies must be updated.  It does depend
+## on %MAKEFILE% for the benefit of non-GNU make implementations (GNU
+## make will always make sure %MAKEFILE% is updated before considering
+## the am--refresh target anyway).
+if %?TOPDIR_P%
+.PHONY: am--refresh
+am--refresh: %MAKEFILE%
+	@:
+endif %?TOPDIR_P%
+
+## --------------------- ##
+## Building Makefile.*.  ##
+## --------------------- ##
+
+## This rule remakes the Makefile.in.
+%MAKEFILE-IN%: %MAINTAINER-MODE% %MAKEFILE-AM% %MAKEFILE-IN-DEPS% $(am__configure_deps)
+## If configure.ac or one of configure's dependencies has changed, all
+## Makefile.in are to be updated; it is then more efficient to run
+## automake on all the Makefiles at once.  It also allow Automake to be
+## run for newly added directories.
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+?TOPDIR_P?	      echo ' cd $(srcdir) && $(AUTOMAKE) %AUTOMAKE-OPTIONS%'; \
+?TOPDIR_P?	      $(am__cd) $(srcdir) && $(AUTOMAKE) %AUTOMAKE-OPTIONS% \
+?TOPDIR_P?		&& exit 0; \
+?!TOPDIR_P?	      ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+## If on the other hand, subdir/Makefile.in has been removed, then toplevel
+## am--refresh will not be aware of any need to run.  We still invoke it
+## due to $? listing all prerequisites.  Fix up for it by running the rebuild
+## rule for this file only, below.
+?!TOPDIR_P?	        && { if test -f $@; then exit 0; else break; fi; }; \
+	      exit 1;; \
+	  esac; \
+	done; \
+## Otherwise, rebuild only this file.
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) %AUTOMAKE-OPTIONS% %MAKEFILE-AM-SOURCES%'; \
+	$(am__cd) $(top_srcdir) && \
+	  $(AUTOMAKE) %AUTOMAKE-OPTIONS% %MAKEFILE-AM-SOURCES%
+
+## Ensure that GNU make doesn't remove Makefile if ./config.status (below)
+## is interrupted.  Otherwise, the user would need to know to rerun
+## ./config.status to recreate the lost Makefile.
+.PRECIOUS: %MAKEFILE%
+## This rule remakes the Makefile.
+%MAKEFILE%: %MAKEFILE-DEPS% $(top_builddir)/config.status
+## If Makefile is to be updated because of config.status, then run
+## config.status without argument in order to (i) rerun all the
+## AC_CONFIG_COMMANDS including those that are not visible to
+## Automake, and (ii) to save time by running config.status all with
+## all the files, instead of once per file (iii) generate Makefiles
+## in newly added directories.
+	@case '$?' in \
+## Don't prefix $(top_builddir), because GNU make will strip it out
+## when it's '.'.
+	  *config.status*) \
+?TOPDIR_P?	    echo ' $(SHELL) ./config.status'; \
+?TOPDIR_P?	    $(SHELL) ./config.status;; \
+?!TOPDIR_P?	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+## FIXME: $(am__depfiles_maybe) lets us re-run the rule to create the
+## .P files.  Ideally we wouldn't have to do this by hand.
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status %CONFIG-MAKEFILE% $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status %CONFIG-MAKEFILE% $(am__depfiles_maybe);; \
+	esac;
+
+## Avoid the "deleted header file" problem for the dependencies.
+?HAVE-MAKEFILE-IN-DEPS?%MAKEFILE-IN-DEPS%:
+
+DIST_COMMON += %MAKEFILE-AM%
+
+
+## --------------------------- ##
+## config.status & configure.  ##
+## --------------------------- ##
+
+if %?TOPDIR_P%
+## Always require configure.ac and configure at top level, even if they
+## don't exist.  This is especially important for configure, since it
+## won't be created until autoconf is run -- which might be after
+## automake is run.
+DIST_COMMON += $(top_srcdir)/configure $(am__configure_deps)
+endif %?TOPDIR_P%
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+?TOPDIR_P?	$(SHELL) ./config.status --recheck
+?!TOPDIR_P?	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: %MAINTAINER-MODE% $(am__configure_deps)
+?TOPDIR_P?	$(am__cd) $(srcdir) && $(AUTOCONF)
+?!TOPDIR_P?	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+
+## ------------ ##
+## aclocal.m4.  ##
+## ------------ ##
+
+## Whenever a configure dependency changes we need to rebuild
+## aclocal.m4 too.  Changing configure.ac, or any file included by
+## aclocal.m4 might require adding more files to aclocal.m4.  Hence
+## the $(am__configure_deps) dependency.
+## We still need $(ACLOCAL_AMFLAGS) for sake of backward-compatibility;
+## we should hopefully be able to get rid of it in a not-so-distant
+## future.
+if %?REGEN-ACLOCAL-M4%
+$(ACLOCAL_M4): %MAINTAINER-MODE% $(am__aclocal_m4_deps)
+?TOPDIR_P?	$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+?!TOPDIR_P?	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+## Avoid the "deleted header file" problem for the dependencies.
+$(am__aclocal_m4_deps):
+endif %?REGEN-ACLOCAL-M4%
+
+
+## --------- ##
+## cleanup.  ##
+## --------- ##
+
+## We special-case config.status here.  If we do it as part of the
+## normal clean processing for this directory, then it might be
+## removed before some subdir is cleaned.  However, that subdir's
+## Makefile depends on config.status.
+
+if %?TOPDIR_P%
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno config.status.lineno
+distclean:
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+
+## Note: you might think we should remove Makefile.in, configure, or
+## aclocal.m4 here in a maintainer-clean rule.  However, the GNU
+## Coding Standards explicitly prohibit this.
+
+maintainer-clean:
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+## autom4te.cache is created by Autoconf; the only valid target to
+## remove it is maintainer-clean, not distclean.
+## If you have an autom4te.cache that cause distcheck to fail, then
+## it is good news: you finally discovered that autoconf and/or
+## autoheader is needed to use your tarball, which is wrong.
+	-rm -rf $(top_srcdir)/autom4te.cache
+
+
+endif %?TOPDIR_P%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/data.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/data.am
new file mode 100644
index 0000000..e0ef562
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/data.am
@@ -0,0 +1,100 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%%PRIMARY%
+?!EXEC?.PHONY install-data-am: install-%DIR%%PRIMARY%
+install-%DIR%%PRIMARY%: $(%DIR%_%PRIMARY%)
+	@$(NORMAL_INSTALL)
+if %?BASE%
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_%PRIMARY%)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+## A file can be in the source directory or the build directory.
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+## If the _%PRIMARY% variable has an entry like foo/bar, install it as
+## $(destdir)/bar, not $(destdir)/foo/bar.  The user can make a
+## new dir variable or use a nobase_ target for the latter case.
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_%ONE_PRIMARY%) $$files '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(INSTALL_%ONE_PRIMARY%) $$files "$(DESTDIR)$(%NDIR%dir)" || exit $$?; \
+	done
+else !%?BASE%
+	@list='$(%DIR%_%PRIMARY%)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	$(am__nobase_list) | while read dir files; do \
+	  xfiles=; for file in $$files; do \
+	    if test -f "$$file"; then xfiles="$$xfiles $$file"; \
+	    else xfiles="$$xfiles $(srcdir)/$$file"; fi; done; \
+	  test -z "$$xfiles" || { \
+	    test "x$$dir" = x. || { \
+	      echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	      $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir"; }; \
+	    echo " $(INSTALL_%ONE_PRIMARY%) $$xfiles '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	    $(INSTALL_%ONE_PRIMARY%) $$xfiles "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?; }; \
+	done
+endif !%?BASE%
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%%PRIMARY%
+uninstall-%DIR%%PRIMARY%:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_%PRIMARY%)'; test -n "$(%NDIR%dir)" || list=; \
+?BASE?	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+?!BASE?	$(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \
+	dir='$(DESTDIR)$(%NDIR%dir)'; $(am__uninstall_files_from_dir)
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+## Nothing.
+
+
+## -------------- ##
+## Distributing.  ##
+## -------------- ##
+
+if %?DIST%
+DIST_COMMON += %DISTVAR%
+endif %?DIST%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/dejagnu.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/dejagnu.am
new file mode 100644
index 0000000..d2ca534
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/dejagnu.am
@@ -0,0 +1,93 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## Name of tool to use.  Default is the same as the package.
+DEJATOOL = $(PACKAGE)
+
+## Default flags to pass to dejagnu.  The user can override this.
+RUNTESTDEFAULTFLAGS = --tool $$tool --srcdir $$srcdir
+
+EXPECT = expect
+RUNTEST = runtest
+
+
+.PHONY: check-DEJAGNU
+check-DEJAGNU: site.exp
+## Life is easiest with an absolute srcdir, so do that.
+	srcdir='$(srcdir)'; export srcdir; \
+	EXPECT=$(EXPECT); export EXPECT; \
+## If runtest can't be found, print a warning but don't die.  It is
+## pointless to cause a failure if the tests cannot be run at all.
+	if $(SHELL) -c "$(RUNTEST) --version" > /dev/null 2>&1; then \
+	  exit_status=0; l='$(DEJATOOL)'; for tool in $$l; do \
+	    if $(RUNTEST) $(AM_RUNTESTFLAGS) $(RUNTESTDEFAULTFLAGS) $(RUNTESTFLAGS); \
+	    then :; else exit_status=1; fi; \
+	  done; \
+	else echo "WARNING: could not find '$(RUNTEST)'" 1>&2; :;\
+	fi; \
+	exit $$exit_status
+
+
+## ------------------- ##
+## Building site.exp.  ##
+## ------------------- ##
+
+## Note that in the rule we don't directly generate site.exp to avoid
+## the possibility of a corrupted site.exp if make is interrupted.
+## Jim Meyering has some useful text on this topic.
+site.exp: Makefile $(EXTRA_DEJAGNU_SITE_CONFIG)
+	@echo 'Making a new site.exp file ...'
+	@echo '## these variables are automatically generated by make ##' >site.tmp
+	@echo '# Do not edit here.  If you wish to override these values' >>site.tmp
+	@echo '# edit the last section' >>site.tmp
+	@echo 'set srcdir "$(srcdir)"' >>site.tmp
+	@echo "set objdir `pwd`" >>site.tmp
+## Quote the *_alias variables because they might be empty.
+?BUILD?	@echo 'set build_alias "$(build_alias)"' >>site.tmp
+?BUILD?	@echo 'set build_triplet $(build_triplet)' >>site.tmp
+?HOST?	@echo 'set host_alias "$(host_alias)"' >>site.tmp
+?HOST?	@echo 'set host_triplet $(host_triplet)' >>site.tmp
+?TARGET?	@echo 'set target_alias "$(target_alias)"' >>site.tmp
+?TARGET?	@echo 'set target_triplet $(target_triplet)' >>site.tmp
+## Allow the package author to extend site.exp.
+	@list='$(EXTRA_DEJAGNU_SITE_CONFIG)'; for f in $$list; do \
+	  echo "## Begin content included from file $$f.  Do not modify. ##" \
+	   && cat `test -f "$$f" || echo '$(srcdir)/'`$$f \
+	   && echo "## End content included from file $$f. ##" \
+	   || exit 1; \
+	 done >> site.tmp
+	@echo "## End of auto-generated content; you can edit from here. ##" >> site.tmp
+	@if test -f site.exp; then \
+	   sed -e '1,/^## End of auto-generated content.*##/d' site.exp >> site.tmp; \
+	 fi
+	@-rm -f site.bak
+	@test ! -f site.exp || mv site.exp site.bak
+	@mv site.tmp site.exp
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY distclean-am: distclean-DEJAGNU
+
+distclean-DEJAGNU:
+## Any other cleaning must be done by the user or by the test suite
+## itself.  We can't predict what dejagnu or the test suite might
+## generate.
+	-rm -f site.exp site.bak
+	-l='$(DEJATOOL)'; for tool in $$l; do \
+	  rm -f $$tool.sum $$tool.log; \
+	done
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend.am
new file mode 100644
index 0000000..1c00aae
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend.am
@@ -0,0 +1,27 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+am__mv = mv -f
+
+## This Makefile depends on Depdirs' files, so we should never
+## erase them in -am or -recursive rules; that would prevent any other
+## rules from being recursive (for instance multilib clean rules are
+## recursive).
+distclean:
+	-rm -rf %DEPDIRS%
+
+maintainer-clean:
+	-rm -rf %DEPDIRS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend2.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend2.am
new file mode 100644
index 0000000..5c6439a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/depend2.am
@@ -0,0 +1,114 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## This file is read several times:
+## - once per *extension* (not per language) for generic compilation rules
+## - once for each file which requires specific flags.
+
+## Note it is on purpose we wrote "if %AMDEP%", since:
+##
+## - if deps are turned off, %AMDEP% is mapped onto FALSE, and therefore
+##   the "if FALSE" chunk is removed (automake-time conditionals).
+##
+## - if deps are on, %AMDEP% is mapped onto AMDEP,  and therefore
+##   the "if AMDEP" chunk is prefix with @AMDEP_TRUE@ just like for any
+##   other configure-time conditional.
+##
+## We do likewise for %FASTDEP%; this expands to an ordinary configure-time
+## conditional.  %FASTDEP% is used to speed up the common case of building
+## a package with gcc 3.x or later.  In this case we can skip the use of
+## depcomp and easily inline the dependency tracking.
+
+if %?NONLIBTOOL%
+?GENERIC?%EXT%.o:
+?!GENERIC?%OBJ%: %SOURCE%
+if %FASTDEP%
+## In fast-dep mode, we can always use -o.
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	%VERBOSE%%COMPILE% -MT %OBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJ% %SOURCEFLAG%`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+?!GENERIC?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+?GENERIC??!SUBDIROBJ?	%VERBOSE%%COMPILE% -MT %OBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJ% %SOURCEFLAG%%SOURCE%
+?GENERIC??!SUBDIROBJ?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+?GENERIC??SUBDIROBJ?	%VERBOSE%depbase=`echo %OBJ% | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
+?GENERIC??SUBDIROBJ?	%COMPILE% -MT %OBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJ% %SOURCEFLAG%%SOURCE% &&\
+?GENERIC??SUBDIROBJ?	$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+else !%FASTDEP%
+if %AMDEP%
+	%VERBOSE%source='%SOURCE%' object='%OBJ%' libtool=no @AMDEPBACKSLASH@
+	DEPDIR=$(DEPDIR) $(%FPFX%DEPMODE) $(depcomp) @AMDEPBACKSLASH@
+endif %AMDEP%
+if %?GENERIC%
+?-o?	%VERBOSE-NODEP%%COMPILE% %-c% %-o% %OBJ% %SOURCEFLAG%%SOURCE%
+?!-o?	%VERBOSE-NODEP%%COMPILE% %-c% %SOURCEFLAG%%SOURCE%
+else !%?GENERIC%
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?-o?	%VERBOSE-NODEP%%COMPILE% %-c% %-o% %OBJ% %SOURCEFLAG%`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+?!-o?	%VERBOSE-NODEP%%COMPILE% %-c% %SOURCEFLAG%`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+endif !%?GENERIC%
+endif !%FASTDEP%
+
+?GENERIC?%EXT%.obj:
+?!GENERIC?%OBJOBJ%: %SOURCE%
+if %FASTDEP%
+## In fast-dep mode, we can always use -o.
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	%VERBOSE%%COMPILE% -MT %OBJOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJOBJ% %SOURCEFLAG%`if test -f '%SOURCE%'; then $(CYGPATH_W) '%SOURCE%'; else $(CYGPATH_W) '$(srcdir)/%SOURCE%'; fi`
+?!GENERIC?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+?GENERIC??!SUBDIROBJ?	%VERBOSE%%COMPILE% -MT %OBJOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJOBJ% %SOURCEFLAG%`$(CYGPATH_W) '%SOURCE%'`
+?GENERIC??!SUBDIROBJ?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+?GENERIC??SUBDIROBJ?	%VERBOSE%depbase=`echo %OBJ% | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
+?GENERIC??SUBDIROBJ?	%COMPILE% -MT %OBJOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %OBJOBJ% %SOURCEFLAG%`$(CYGPATH_W) '%SOURCE%'` &&\
+?GENERIC??SUBDIROBJ?	$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Po
+else !%FASTDEP%
+if %AMDEP%
+	%VERBOSE%source='%SOURCE%' object='%OBJOBJ%' libtool=no @AMDEPBACKSLASH@
+	DEPDIR=$(DEPDIR) $(%FPFX%DEPMODE) $(depcomp) @AMDEPBACKSLASH@
+endif %AMDEP%
+if %?GENERIC%
+?-o?	%VERBOSE-NODEP%%COMPILE% %-c% %-o% %OBJOBJ% %SOURCEFLAG%`$(CYGPATH_W) '%SOURCE%'`
+?!-o?	%VERBOSE-NODEP%%COMPILE% %-c% `$(CYGPATH_W) %SOURCEFLAG%'%SOURCE%'`
+else !%?GENERIC%
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?-o?	%VERBOSE-NODEP%%COMPILE% %-c% %-o% %OBJOBJ% %SOURCEFLAG%`if test -f '%SOURCE%'; then $(CYGPATH_W) '%SOURCE%'; else $(CYGPATH_W) '$(srcdir)/%SOURCE%'; fi`
+?!-o?	%VERBOSE-NODEP%%COMPILE% %-c% %SOURCEFLAG%`if test -f '%SOURCE%'; then $(CYGPATH_W) '%SOURCE%'; else $(CYGPATH_W) '$(srcdir)/%SOURCE%'; fi`
+endif !%?GENERIC%
+endif !%FASTDEP%
+endif %?NONLIBTOOL%
+
+if %?LIBTOOL%
+?GENERIC?%EXT%.lo:
+?!GENERIC?%LTOBJ%: %SOURCE%
+if %FASTDEP%
+## In fast-dep mode, we can always use -o.
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	%VERBOSE%%LTCOMPILE% -MT %LTOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %LTOBJ% %SOURCEFLAG%`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+?!GENERIC?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Plo
+?GENERIC??!SUBDIROBJ?	%VERBOSE%%LTCOMPILE% -MT %LTOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %LTOBJ% %SOURCEFLAG%%SOURCE%
+?GENERIC??!SUBDIROBJ?	%SILENT%$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Plo
+?GENERIC??SUBDIROBJ?	%VERBOSE%depbase=`echo %OBJ% | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
+?GENERIC??SUBDIROBJ?	%LTCOMPILE% -MT %LTOBJ% -MD -MP -MF %DEPBASE%.Tpo %-c% -o %LTOBJ% %SOURCEFLAG%%SOURCE% &&\
+?GENERIC??SUBDIROBJ?	$(am__mv) %DEPBASE%.Tpo %DEPBASE%.Plo
+else !%FASTDEP%
+if %AMDEP%
+	%VERBOSE%source='%SOURCE%' object='%LTOBJ%' libtool=yes @AMDEPBACKSLASH@
+	DEPDIR=$(DEPDIR) $(%FPFX%DEPMODE) $(depcomp) @AMDEPBACKSLASH@
+endif %AMDEP%
+## We can always use '-o' with Libtool.
+?GENERIC?	%VERBOSE-NODEP%%LTCOMPILE% %-c% -o %LTOBJ% %SOURCEFLAG%%SOURCE%
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	%VERBOSE-NODEP%%LTCOMPILE% %-c% -o %LTOBJ% %SOURCEFLAG%`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+endif !%FASTDEP%
+endif %?LIBTOOL%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/distdir.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/distdir.am
new file mode 100644
index 0000000..a8ad63c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/distdir.am
@@ -0,0 +1,546 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+
+if %?TOPDIR_P%
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+
+am__remove_distdir = \
+  if test -d "$(distdir)"; then \
+    find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
+      && rm -rf "$(distdir)" \
+## On MSYS (1.0.17) it is not possible to remove a directory that is in
+## use; so, if the first rm fails, we sleep some seconds and retry, to
+## give pending processes some time to exit and "release" the directory
+## before we remove it.  The value of "some seconds" is 5 for the moment,
+## which is mostly an arbitrary value, but seems high enough in practice.
+## See automake bug#10470.
+      || { sleep 5 && rm -rf "$(distdir)"; }; \
+  else :; fi
+am__post_remove_distdir = $(am__remove_distdir)
+endif %?TOPDIR_P%
+
+if %?SUBDIRS%
+## computes a relative pathname RELDIR such that DIR1/RELDIR = DIR2.
+## Input:
+## - DIR1            relative pathname, relative to the current directory
+## - DIR2            relative pathname, relative to the current directory
+## Output:
+## - reldir          relative pathname of DIR2, relative to DIR1
+am__relativize = \
+  dir0=`pwd`; \
+  sed_first='s,^\([^/]*\)/.*$$,\1,'; \
+  sed_rest='s,^[^/]*/*,,'; \
+  sed_last='s,^.*/\([^/]*\)$$,\1,'; \
+  sed_butlast='s,/*[^/]*$$,,'; \
+  while test -n "$$dir1"; do \
+    first=`echo "$$dir1" | sed -e "$$sed_first"`; \
+    if test "$$first" != "."; then \
+      if test "$$first" = ".."; then \
+        dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
+        dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
+      else \
+        first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
+        if test "$$first2" = "$$first"; then \
+          dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
+        else \
+          dir2="../$$dir2"; \
+        fi; \
+        dir0="$$dir0"/"$$first"; \
+      fi; \
+    fi; \
+    dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
+  done; \
+  reldir="$$dir2"
+endif %?SUBDIRS%
+
+.PHONY: distdir
+if %?SUBDIRS%
+AM_RECURSIVE_TARGETS += distdir
+endif %?SUBDIRS%
+
+distdir: $(DISTFILES)
+##
+## For Gnits users, this is pretty handy.  Look at 15 lines
+## in case some explanatory text is desirable.
+##
+if %?TOPDIR_P%
+if  %?CK-NEWS%
+	@case `sed 15q $(srcdir)/NEWS` in \
+	*"$(VERSION)"*) : ;; \
+	*) \
+	  echo "NEWS not updated; not releasing" 1>&2; \
+	  exit 1;; \
+	esac
+endif  %?CK-NEWS%
+endif %?TOPDIR_P%
+##
+## Only for the top dir.
+##
+if %?TOPDIR_P%
+	$(am__remove_distdir)
+	test -d "$(distdir)" || mkdir "$(distdir)"
+endif %?TOPDIR_P%
+##
+##
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+##
+## Yet another hack to support SUN make.
+##
+## Let's assume 'foo' appears in DISTFILES and is not a built file.
+## When building with VPATH=$(srcdir), SUN make and OSF1/Tru64 will
+## rewrite 'foo' as '$(srcdir)/foo'.  An attempt to install the file
+## with
+##    cp $file $(distdir)/$file
+## will thus install $(srcdir)/foo as $(distdir)/$(srcdir)/foo
+## instead of $(distdir)/foo.
+##
+## So let's strip this leading $(srcdir)/ when it exists.  (As far we
+## know, only SUN make and OSF1/Tru64 make add it.)  Searching whether
+## the file is to be found in the source or build directory will be
+## done later.
+##
+## In case we are _not_ using SUN or OSF1/Tru64 make, how can we be sure
+## we are not stripping a legitimate filename that starts with the
+## same pattern as $(srcdir)?
+## Well, it can't happen without the Makefile author distributing
+## something out of the distribution (which is bad).  As an example,
+## consider "EXTRA_DIST = ../bar".  This is an issue if $srcdir is
+## '..', however getting this value for srcdir is impossible:
+## "EXTRA_DIST = ../bar" implies we are in a subdirectory (so '../bar'
+## is within the package), hence '$srcdir' is something like
+## '../../subdir'.
+##
+## There is more to say about files which are above the current directory,
+## like '../bar' in the previous example.  The OSF1/Tru64 make
+## implementation can simplify filenames resulting from a VPATH lookup.
+## For instance if "VPATH = ../../subdir" and '../bar' is found in that
+## VPATH directory, then occurrences of '../bar' will be replaced by
+## '../../bar' (instead of '../../subdir/../bar').  This obviously defeats
+## any attempt to strip a leading $srcdir.  Presently we have no workaround
+## for this.  We avoid this issue by writing "EXTRA_DIST = $(srcdir)/../bar"
+## instead of "EXTRA_DIST = ../bar".  This prefixing is needed only for files
+## above the current directory.  Fortunately, apart from auxdir files which
+## can be located in .. or ../.., this situation hardly occurs in practice.
+##
+## Also rewrite $(top_srcdir) (which sometimes appears in DISTFILES, and can
+## be absolute) by $(top_builddir) (which is always relative).  $(srcdir) will
+## be prepended later.
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+## (The second 't' command clears the flag for the next round.)
+##
+## Make the subdirectories for the files.
+##
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+##
+##
+	for file in $$dist_files; do \
+##
+## Always look for the file in the build directory first.  That way
+## for something like yacc output we will correctly pick up the latest
+## version.  Also check for directories in the build directory first,
+## so one can ship generated directories.
+##
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+##
+## Use cp, not ln.  There are situations in which "ln" can fail.  For
+## instance a file to distribute could actually be a cross-filesystem
+## symlink -- this can easily happen if "gettextize" was run on the
+## distribution.
+##
+	  if test -d $$d/$$file; then \
+## Don't mention $$file in the destination argument, since this fails if
+## the destination directory already exists.  Also, use '-R' and not '-r'.
+## '-r' is almost always incorrect.
+##
+## If a directory exists both in '.' and $(srcdir), then we copy the
+## files from $(srcdir) first and then install those from '.'.  This
+## can help people who distribute directories made of source files
+## *and* generated files.  It is also important when the directory
+## exists only in $(srcdir), because some vendor Make (such as Tru64)
+## will magically create an empty directory in '.'.
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+## If the destination directory already exists, it may contain read-only
+## files, e.g., during "make distcheck".
+	    if test -d "$(distdir)/$$file"; then \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+	  else \
+## Test for file existence because sometimes a file gets included in
+## DISTFILES twice.  For example this happens when a single source
+## file is used in building more than one program.
+## See also test 'dist-repeated.sh'.
+	    test -f "$(distdir)/$$file" \
+	    || cp -p $$d/$$file "$(distdir)/$$file" \
+	    || exit 1; \
+	  fi; \
+	done
+##
+## Test for directory existence here because previous automake
+## invocation might have created some directories.  Note that we
+## explicitly set distdir for the subdir make; that lets us mix-n-match
+## many automake-using packages into one large package, and have "dist"
+## at the top level do the right thing.  If we're in the topmost
+## directory, then we use 'distdir' instead of 'top_distdir'; this lets
+## us work correctly with an enclosing package.
+if %?SUBDIRS%
+	@list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    $(am__make_dryrun) \
+	      || test -d "$(distdir)/$$subdir" \
+	      || $(MKDIR_P) "$(distdir)/$$subdir" \
+	      || exit 1; \
+	    dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
+	    $(am__relativize); \
+	    new_distdir=$$reldir; \
+	    dir1=$$subdir; dir2="$(top_distdir)"; \
+	    $(am__relativize); \
+	    new_top_distdir=$$reldir; \
+	    echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
+	    echo "     am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
+	    ($(am__cd) $$subdir && \
+	      $(MAKE) $(AM_MAKEFLAGS) \
+	        top_distdir="$$new_top_distdir" \
+	        distdir="$$new_distdir" \
+## Disable am__remove_distdir so that sub-packages do not clear a
+## directory we have already cleared and might even have populated
+## (e.g. shared AUX dir in the sub-package).
+		am__remove_distdir=: \
+## Disable filename length check:
+		am__skip_length_check=: \
+## No need to fix modes more than once:
+		am__skip_mode_fix=: \
+	        distdir) \
+	      || exit 1; \
+	  fi; \
+	done
+endif %?SUBDIRS%
+##
+## We might have to perform some last second updates, such as updating
+## info files.
+## We must explicitly set distdir and top_distdir for these sub-makes.
+##
+if %?DIST-TARGETS%
+	$(MAKE) $(AM_MAKEFLAGS) \
+	  top_distdir="$(top_distdir)" distdir="$(distdir)" \
+	  %DIST-TARGETS%
+endif %?DIST-TARGETS%
+##
+## This complex find command will try to avoid changing the modes of
+## links into the source tree, in case they're hard-linked.
+##
+## Ignore return result from chmod, because it might give an error
+## if we chmod a symlink.
+##
+## Another nastiness: if the file is unreadable by us, we make it
+## readable regardless of the number of links to it.  This only
+## happens in perverse cases.
+##
+## We use $(install_sh) because that is a known-portable way to modify
+## the file in place in the source tree.
+##
+## If we are being invoked recursively, then there is no need to walk
+## the whole subtree again.  This is a complexity reduction for a deep
+## hierarchy of subpackages.
+##
+if %?TOPDIR_P%
+	-test -n "$(am__skip_mode_fix)" \
+	|| find "$(distdir)" -type d ! -perm -755 \
+		-exec chmod u+rwx,go+rx {} \; -o \
+	  ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
+	|| chmod -R a+r "$(distdir)"
+if %?FILENAME_FILTER%
+	@if test -z "$(am__skip_length_check)" && find "$(distdir)" -type f -print | \
+	  grep '^%FILENAME_FILTER%' 1>&2; then \
+	  echo 'error: the above filenames are too long' 1>&2; \
+	  exit 1; \
+	else :; fi
+endif %?FILENAME_FILTER%
+endif %?TOPDIR_P%
+
+
+
+## --------------------------------------- ##
+## Building various distribution flavors.  ##
+## --------------------------------------- ##
+
+## Note that we don't use GNU tar's '-z' option.  One reason (but not
+## the only reason) is that some versions of tar (e.g., OSF1)
+## interpret '-z' differently.
+##
+## The -o option of GNU tar used to exclude empty directories.  This
+## behavior was fixed in tar 1.12 (released on 1997-04-25).  But older
+## versions of tar are still used (for instance NetBSD 1.6.1 ships
+## with tar 1.11.2).  We do not do anything specific w.r.t. this
+## incompatibility since packages where empty directories need to be
+## present in the archive are really unusual.
+##
+## We order DIST_TARGETS by expected duration of the compressors,
+## slowest first, for better parallelism in "make dist".  Do not
+## reorder DIST_ARCHIVES, users may expect gzip to be first.
+
+if %?TOPDIR_P%
+
+?GZIP?DIST_ARCHIVES += $(distdir).tar.gz
+GZIP_ENV = --best
+.PHONY: dist-gzip
+dist-gzip: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__post_remove_distdir)
+
+?BZIP2?DIST_ARCHIVES += $(distdir).tar.bz2
+.PHONY: dist-bzip2
+dist-bzip2: distdir
+	tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2
+	$(am__post_remove_distdir)
+
+?LZIP?DIST_ARCHIVES += $(distdir).tar.lz
+.PHONY: dist-lzip
+dist-lzip: distdir
+	tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz
+	$(am__post_remove_distdir)
+
+?XZ?DIST_ARCHIVES += $(distdir).tar.xz
+.PHONY: dist-xz
+dist-xz: distdir
+	tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz
+	$(am__post_remove_distdir)
+
+?COMPRESS?DIST_ARCHIVES += $(distdir).tar.Z
+.PHONY: dist-tarZ
+dist-tarZ: distdir
+	@echo WARNING: "Support for shar distribution archives is" \
+	               "deprecated." >&2
+	@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
+	tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+	$(am__post_remove_distdir)
+
+?SHAR?DIST_ARCHIVES += $(distdir).shar.gz
+.PHONY: dist-shar
+dist-shar: distdir
+	@echo WARNING: "Support for distribution archives compressed with" \
+		       "legacy program 'compress' is deprecated." >&2
+	@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
+	shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+	$(am__post_remove_distdir)
+
+?ZIP?DIST_ARCHIVES += $(distdir).zip
+.PHONY: dist-zip
+dist-zip: distdir
+	-rm -f $(distdir).zip
+	zip -rq $(distdir).zip $(distdir)
+	$(am__post_remove_distdir)
+
+?LZIP?DIST_TARGETS += dist-lzip
+?XZ?DIST_TARGETS += dist-xz
+?SHAR?DIST_TARGETS += dist-shar
+?BZIP2?DIST_TARGETS += dist-bzip2
+?GZIP?DIST_TARGETS += dist-gzip
+?ZIP?DIST_TARGETS += dist-zip
+?COMPRESS?DIST_TARGETS += dist-tarZ
+
+endif %?TOPDIR_P%
+
+
+
+## ------------------------------------------------- ##
+## Building all the requested distribution flavors.  ##
+## ------------------------------------------------- ##
+
+## Currently we cannot use if/endif inside a rule.  The file_contents
+## parser needs work.
+
+if %?TOPDIR_P%
+
+.PHONY: dist dist-all
+if %?SUBDIRS%
+AM_RECURSIVE_TARGETS += dist dist-all
+endif %?SUBDIRS%
+
+dist dist-all:
+	$(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:'
+	$(am__post_remove_distdir)
+
+endif %?TOPDIR_P%
+
+
+## ------------------------- ##
+## Checking a distribution.  ##
+## ------------------------- ##
+
+
+if %?TOPDIR_P%
+if %?SUBDIRS%
+AM_RECURSIVE_TARGETS += distcheck
+endif %?SUBDIRS%
+
+# This target untars the dist file and tries a VPATH configuration.  Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+.PHONY: distcheck
+distcheck: dist
+	case '$(DIST_ARCHIVES)' in \
+	*.tar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
+	*.tar.bz2*) \
+	  bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
+	*.tar.lz*) \
+	  lzip -dc $(distdir).tar.lz | $(am__untar) ;;\
+	*.tar.xz*) \
+	  xz -dc $(distdir).tar.xz | $(am__untar) ;;\
+	*.tar.Z*) \
+	  uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+	*.shar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
+	*.zip*) \
+	  unzip $(distdir).zip ;;\
+	esac
+## Make the new source tree read-only.  Distributions ought to work in
+## this case.  However, make the top-level directory writable so we
+## can make our new subdirs.
+	chmod -R a-w $(distdir)
+	chmod u+w $(distdir)
+	mkdir $(distdir)/_build $(distdir)/_inst
+## Undo the write access.
+	chmod a-w $(distdir)
+## With GNU make, the following command will be executed even with "make -n",
+## due to the presence of '$(MAKE)'.  That is normally all well (and '$(MAKE)'
+## is necessary for things like parallel distcheck), but here we don't want
+## execution.  To avoid MAKEFLAGS parsing hassles, use a witness file that a
+## non-'-n' run would have just created.
+	test -d $(distdir)/_build || exit 0; \
+## Compute the absolute path of '_inst'.  Strip any leading DOS drive
+## to allow DESTDIR installations.  Otherwise "$(DESTDIR)$(prefix)" would
+## expand to "c:/temp/am-dc-5668/c:/src/package/package-1.0/_inst".
+	dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+## We will attempt a DESTDIR install in $dc_destdir.  We don't
+## create this directory under $dc_install_base, because it would
+## create very long directory names.
+	  && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+?DISTCHECK-HOOK?	  && $(MAKE) $(AM_MAKEFLAGS) distcheck-hook \
+## Parallel BSD make may not start a new shell for each command in a recipe,
+## so be sure to 'cd' back to the original directory after this.
+	  && am__cwd=`pwd` \
+	  && $(am__cd) $(distdir)/_build \
+	  && ../configure \
+?GETTEXT?	    --with-included-gettext \
+## Additional flags for configure.
+	    $(AM_DISTCHECK_CONFIGURE_FLAGS) \
+	    $(DISTCHECK_CONFIGURE_FLAGS) \
+## At the moment, the code doesn't actually support changes in these --srcdir
+## and --prefix values, so don't allow them to be overridden by the user or
+## the developer.  That used to be allowed, and caused issues in practice
+## (in corner-case usages); see automake bug#14991.
+	    --srcdir=.. --prefix="$$dc_install_base" \
+	  && $(MAKE) $(AM_MAKEFLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) dvi \
+	  && $(MAKE) $(AM_MAKEFLAGS) check \
+	  && $(MAKE) $(AM_MAKEFLAGS) install \
+	  && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+	  && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+	  && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+	        distuninstallcheck \
+## Make sure the package has proper DESTDIR support (we could not test this
+## in the previous install/installcheck/uninstall test, because it's reasonable
+## for installcheck to fail in a DESTDIR install).
+## We make the '$dc_install_base' read-only because this is where files
+## with missing DESTDIR support are likely to be installed.
+	  && chmod -R a-w "$$dc_install_base" \
+## The logic here is quite convoluted because we must clean $dc_destdir
+## whatever happens (it won't be erased by the next run of distcheck like
+## $(distdir) is).
+	  && ({ \
+## Build the directory, so we can cd into it even if "make install"
+## didn't create it.  Use mkdir, not $(MKDIR_P) because we want to
+## fail if the directory already exists (PR/413).
+	       (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+	            distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+	      } || { rm -rf "$$dc_destdir"; exit 1; }) \
+	  && rm -rf "$$dc_destdir" \
+	  && $(MAKE) $(AM_MAKEFLAGS) dist \
+## Make sure to remove the dists we created in the test build directory.
+	  && rm -rf $(DIST_ARCHIVES) \
+	  && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
+## Cater to parallel BSD make (see above).
+	  && cd "$$am__cwd" \
+	  || exit 1
+	$(am__post_remove_distdir)
+	@(echo "$(distdir) archives ready for distribution: "; \
+	  list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+	  sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
+
+## Define distuninstallcheck_listfiles and distuninstallcheck separately
+## from distcheck, so that they can be overridden by the user.
+.PHONY: distuninstallcheck
+distuninstallcheck_listfiles = find . -type f -print
+## The 'dir' file (created by install-info) might still exist after
+## uninstall, so we must be prepared to account for it.  The following
+## check is not 100% strict, but is definitely good enough, and even
+## accounts for overridden $(infodir).
+am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \
+  | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$'
+distuninstallcheck:
+	@test -n '$(distuninstallcheck_dir)' || { \
+	  echo 'ERROR: trying to run $@ with an empty' \
+	       '$$(distuninstallcheck_dir)' >&2; \
+	  exit 1; \
+	}; \
+	$(am__cd) '$(distuninstallcheck_dir)' || { \
+	  echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \
+	  exit 1; \
+	}; \
+	test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \
+	   || { echo "ERROR: files left after uninstall:" ; \
+	        if test -n "$(DESTDIR)"; then \
+	          echo "  (check DESTDIR support)"; \
+	        fi ; \
+	        $(distuninstallcheck_listfiles) ; \
+	        exit 1; } >&2
+
+## Define distcleancheck_listfiles and distcleancheck separately
+## from distcheck, so that they can be overridden by the user.
+.PHONY: distcleancheck
+distcleancheck_listfiles = find . -type f -print
+distcleancheck: distclean
+	@if test '$(srcdir)' = . ; then \
+	  echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+	  exit 1 ; \
+	fi
+	@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+	  || { echo "ERROR: files left in build directory after distclean:" ; \
+	       $(distcleancheck_listfiles) ; \
+	       exit 1; } >&2
+endif %?TOPDIR_P%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/footer.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/footer.am
new file mode 100644
index 0000000..dbc3ae4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/footer.am
@@ -0,0 +1,19 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header-vars.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header-vars.am
new file mode 100644
index 0000000..d25efa6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header-vars.am
@@ -0,0 +1,141 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+VPATH = @srcdir@
+
+@SET_MAKE@
+
+## We used to define this.  However, we don't because vendor makes
+## (e.g., Solaris, Irix) won't correctly propagate variables that are
+## defined in Makefile.  This particular variable can't be correctly
+## defined by configure (at least, not the current configure), so we
+## simply avoid defining it to allow the user to use this feature with
+## a vendor make.
+## DESTDIR =
+
+## Shell code that determines whether we are running under GNU make.
+## This is somewhat of an hack, and might be improved, but is good
+## enough for now.
+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
+
+## Shell code that determines whether the current make instance is
+## running with a given one-letter option (e.g., -k, -n) that takes
+## no argument.
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+## The format of $(MAKEFLAGS) is quite tricky with GNU make; the
+## variable $(MFLAGS) behaves much better in that regard.  So use it.
+    sane_makeflags=$$MFLAGS; \
+  else \
+## Non-GNU make: we must rely on $(MAKEFLAGS).  This is tricker and more
+## brittle, but is the best we can do.
+    case $$MAKEFLAGS in \
+## If we run "make TESTS='snooze nap'", FreeBSD make will export MAKEFLAGS
+## to " TESTS=foo\ nap", so that the simpler loop below (on word-split
+## $$MAKEFLAGS) would see a "make flag" equal to "nap", and would wrongly
+## misinterpret that as and indication that make is running in dry mode.
+## This has already happened in practice.  So we need this hack.
+      *\\[\ \	]*) \
+## Extra indirection with ${bs} required by FreeBSD 8.x make.
+## Not sure why (so sorry for the cargo-cult programming here).
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs	]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+##
+## GNU make 4.0 has changed the format of $MFLAGS, and removed the space
+## between an option and its argument (e.g., from "-I dir" to "-Idir").
+## So we need to handle both formats, at least for options valid in GNU
+## make.  OTOH, BSD make formats $(MAKEFLAGS) by separating all options,
+## and separating any option from its argument, so things are easier
+## there.
+##
+## For GNU make and BSD make.
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+## For GNU make >= 4.0.
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+## For GNU make (possibly overkill, this one).
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+## For BSD make.
+      -[dEDm]) skip_next=yes;; \
+## For NetBSD make.
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+
+## Shell code that determines whether make is running in "dry mode"
+## ("make -n") or not.  Useful in rules that invoke make recursively,
+## and are thus executed also with "make -n" -- either because they
+## are declared as dependencies to '.MAKE' (NetBSD make), or because
+## their recipes contain the "$(MAKE)" string (GNU and Solaris make).
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+
+## Shell code that determines whether make is running in "keep-going mode"
+## ("make -k") or not.  Useful in rules that must recursively descend into
+## subdirectories, and decide whether to stop at the first error or not.
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+
+## Some derived variables that have been found to be useful.
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+
+## These are defined because otherwise make on NetBSD V1.1 will print
+## (eg): $(NORMAL_INSTALL) expands to empty string.
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+
+## dejagnu.am uses these variables.  Some users might rely on them too.
+?BUILD?build_triplet = @build@
+?HOST?host_triplet = @host@
+?TARGET?target_triplet = @target@
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header.am
new file mode 100644
index 0000000..0d93293
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/header.am
@@ -0,0 +1,19 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+## Exactly the same as data.am.
+include data.am
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/inst-vars.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/inst-vars.am
new file mode 100644
index 0000000..6e5c37b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/inst-vars.am
@@ -0,0 +1,73 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2004-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?FIRST%
+
+## These variables help stripping any $(VPATH) that some
+## Make implementations prepend before VPATH-found files.
+## The issue is discussed at length in distdir.am.
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+## Strip all directories.
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+## Number of files to install concurrently.
+am__install_max = 40
+## Take a $list of nobase files, strip $(srcdir) from them.
+## Split apart in setup variable and an action that can be used
+## in backticks or in a pipe.
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+## Take a $list of nobase files, collect them, indexed by their
+## srcdir-stripped dirnames.  For up to am__install_max files, output
+## a line containing the dirname and the files, space-separated.
+## The arbitrary limit helps avoid the quadratic scaling exhibited by
+## string concatenation in most shells, and should avoid line length
+## limitations, while still offering only negligible performance impact
+## through spawning more install commands than absolutely needed.
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+## Collect up to 40 files per line from stdin.
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+
+## A shell code fragment to uninstall files from a given directory.
+## It expects the $dir and $files shell variables to be defined respectively
+## to the directory where the files to be removed are, and to the list of
+## such files.
+am__uninstall_files_from_dir = { \
+## Some rm implementations complain if 'rm -f' is used without arguments.
+  test -z "$$files" \
+## At least Solaris /bin/sh still lacks 'test -e', so we use the multiple
+## tests below instead.  We expect $dir to be either non-existent or a
+## directory, so the failure we'll experience if it is a regular file
+## is indeed desired and welcome (better to fail loudly thasn silently).
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+
+endif %?FIRST%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/install.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/install.am
new file mode 100644
index 0000000..47f7e67
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/install.am
@@ -0,0 +1,105 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## ----------------------------------------- ##
+## installdirs -- Creating the installdirs.  ##
+## ----------------------------------------- ##
+
+## The reason we loop over %am__installdirs% (instead of simply running
+## $(MKDIR_P) %am__installdirs%) is that directories variable such as
+## "$(DESTDIR)$(mydir)" can potentially expand to "" if $(mydir) is
+## conditionally defined.  BTW, those directories are quoted in order
+## to support installation paths with spaces.
+
+if %?SUBDIRS%
+.PHONY: installdirs installdirs-am
+RECURSIVE_TARGETS += installdirs-recursive
+installdirs: installdirs-recursive
+installdirs-am:%installdirs-local%
+?am__installdirs?	for dir in %am__installdirs%; do \
+?am__installdirs?	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+?am__installdirs?	done
+else !%?SUBDIRS%
+.PHONY: installdirs
+installdirs:%installdirs-local%
+?am__installdirs?	for dir in %am__installdirs%; do \
+?am__installdirs?	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+?am__installdirs?	done
+endif !%?SUBDIRS%
+
+
+## ----------------- ##
+## Install targets.  ##
+## ----------------- ##
+
+.PHONY: install install-exec install-data uninstall
+.PHONY: install-exec-am install-data-am uninstall-am
+
+if %?SUBDIRS%
+RECURSIVE_TARGETS += install-data-recursive install-exec-recursive \
+		     install-recursive uninstall-recursive
+install:%maybe_BUILT_SOURCES% install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+else !%?SUBDIRS%
+install:%maybe_BUILT_SOURCES% install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+endif !%?SUBDIRS%
+
+if %?maybe_BUILT_SOURCES%
+.MAKE: install
+endif %?maybe_BUILT_SOURCES%
+
+.MAKE .PHONY: install-am
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+
+.PHONY: installcheck
+?SUBDIRS?installcheck: installcheck-recursive
+?!SUBDIRS?installcheck: installcheck-am
+?!SUBDIRS?.PHONY: installcheck-am
+?!SUBDIRS?installcheck-am:
+
+## If you ever modify this, keep in mind that INSTALL_PROGRAM is used
+## in subdirectories, so never set it to a value relative to the top
+## directory.
+.MAKE .PHONY: install-strip
+install-strip:
+## Beware that there are two variables used to install programs:
+##   INSTALL_PROGRAM is used for ordinary *_PROGRAMS
+##   install_sh_PROGRAM is used for nobase_*_PROGRAMS (because install-sh
+##                                                     creates directories)
+## It's OK to override both with INSTALL_STRIP_PROGRAM, because
+## INSTALL_STRIP_PROGRAM uses install-sh (see m4/strip.m4 for a rationale).
+##
+## Use double quotes for the *_PROGRAM settings because we might need to
+## interpolate some backquotes at runtime.
+##
+## The case for empty $(STRIP) is separate so that it is quoted correctly for
+## multiple words, but does not expand to an empty words if STRIP is empty.
+	if test -z '$(STRIP)'; then \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	      install; \
+	else \
+	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	    "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+	fi
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/java.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/java.am
new file mode 100644
index 0000000..e541214
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/java.am
@@ -0,0 +1,92 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1998-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+## ---------- ##
+## Building.  ##
+## ---------- ##
+
+if %?FIRST%
+JAVAC = javac
+CLASSPATH_ENV = CLASSPATH=$(JAVAROOT):$(srcdir)/$(JAVAROOT)$${CLASSPATH:+":$$CLASSPATH"}
+JAVAROOT = $(top_builddir)
+endif %?FIRST%
+
+class%NDIR%.stamp: $(am__java_sources)
+	@list1='$?'; list2=; if test -n "$$list1"; then \
+	  for p in $$list1; do \
+	    if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+	    list2="$$list2 $$d$$p"; \
+	  done; \
+	  echo '$(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) '"$$list2"; \
+	  $(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) $$list2; \
+	else :; fi
+	echo timestamp > $@
+
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%JAVA
+?!EXEC?.PHONY install-data-am: install-%DIR%JAVA
+install-%DIR%JAVA: class%NDIR%.stamp
+	@$(NORMAL_INSTALL)
+## A single .java file can be compiled into multiple .class files.  So
+## we just install all the .class files that got built into this
+## directory.  This is not optimal, but will have to do for now.
+	@test -n "$(%DIR%_JAVA)" && test -n "$(%NDIR%dir)" || exit 0; \
+	echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	$(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)"; \
+	set x *.class; shift; test "$$1" != "*.class" || exit 0; \
+	echo " $(INSTALL_DATA)" "$$@" "'$(DESTDIR)$(%NDIR%dir)/$$p'"; \
+	$(INSTALL_DATA) "$$@" "$(DESTDIR)$(%NDIR%dir)"
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%JAVA
+uninstall-%DIR%JAVA:
+	@$(NORMAL_UNINSTALL)
+	@test -n "$(%DIR%_JAVA)" && test -n "$(%NDIR%dir)" || exit 0; \
+	set x *.class; shift; test "$$1" != "*.class" || exit 0; \
+	echo " ( cd '$(DESTDIR)$(%NDIR%dir)' && rm -f" "$$@" ")"; \
+	cd "$(DESTDIR)$(%NDIR%dir)" && rm -f "$$@"
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY clean-am: clean-%NDIR%JAVA
+clean-%NDIR%JAVA:
+	-rm -f *.class class%NDIR%.stamp
+
+
+## -------------- ##
+## Distributing.  ##
+## -------------- ##
+
+if %?DIST%
+DIST_COMMON += %DISTVAR%
+endif %?DIST%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lang-compile.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lang-compile.am
new file mode 100644
index 0000000..9002720
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lang-compile.am
@@ -0,0 +1,39 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## This file is read once per *language*, not per extension.
+
+## ------------------------- ##
+## Preprocessed Fortran 77.  ##
+## ------------------------- ##
+
+## We also handle the case of preprocessing '.F' files into '.f' files.
+if %?PPF77%
+.F.f:
+	$(F77COMPILE) -F $<
+endif %?PPF77%
+
+
+
+## -------- ##
+## Ratfor.  ##
+## -------- ##
+
+## We also handle the case of preprocessing `.r' files into `.f' files.
+if %?RATFOR%
+.r.f:
+	$(RCOMPILE) -F $<
+endif %?RATFOR%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lex.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lex.am
new file mode 100644
index 0000000..7fcc9c1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lex.am
@@ -0,0 +1,30 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2001-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## See the comment about am__skipyacc in yacc.am.
+if %?MAINTAINER-MODE%
+if %?FIRST%
+@MAINTAINER_MODE_FALSE@am__skiplex = test -f $@ ||
+endif %?FIRST%
+endif %?MAINTAINER-MODE%
+
+?GENERIC?%EXT%%DERIVED-EXT%:
+?!GENERIC?%OBJ%: %SOURCE%
+?GENERIC?	%VERBOSE%$(am__skiplex) $(SHELL) $(YLWRAP) %SOURCE% $(LEX_OUTPUT_ROOT).c %OBJ% -- %COMPILE%
+?!GENERIC?	%VERBOSE% \
+?!GENERIC??DIST_SOURCE?	$(am__skiplex) \
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	$(SHELL) $(YLWRAP) `test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE% $(LEX_OUTPUT_ROOT).c %OBJ% -- %COMPILE%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/library.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/library.am
new file mode 100644
index 0000000..c01d221
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/library.am
@@ -0,0 +1,20 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+%LIBRARY%: $(%XLIBRARY%_OBJECTS) $(%XLIBRARY%_DEPENDENCIES) $(EXTRA_%XLIBRARY%_DEPENDENCIES) %DIRSTAMP%
+	%SILENT%-rm -f %LIBRARY%
+	%VERBOSE%$(%XLIBRARY%_AR) %LIBRARY% $(%XLIBRARY%_OBJECTS) $(%XLIBRARY%_LIBADD)
+	%SILENT%$(RANLIB) %LIBRARY%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libs.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libs.am
new file mode 100644
index 0000000..f2eb17b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libs.am
@@ -0,0 +1,106 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%LIBRARIES
+?!EXEC?.PHONY install-data-am: install-%DIR%LIBRARIES
+install-%DIR%LIBRARIES: $(%DIR%_LIBRARIES)
+	@$(NORMAL_INSTALL)
+if %?BASE%
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_LIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+	list2=; for p in $$list; do \
+	  if test -f $$p; then \
+	    list2="$$list2 $$p"; \
+	  else :; fi; \
+	done; \
+	test -z "$$list2" || { \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	  echo " $(INSTALL_DATA) $$list2 '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(INSTALL_DATA) $$list2 "$(DESTDIR)$(%NDIR%dir)" || exit $$?; }
+else !%?BASE%
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_LIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	$(am__nobase_list) | while read dir files; do \
+	  xfiles=; for p in $$files; do \
+	    if test -f "$$p"; then xfiles="$$xfiles $$p"; else :; fi; done; \
+	  test -z "$$xfiles" || { \
+	    test "x$$dir" = x. || { \
+	      echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	      $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir"; }; \
+	    echo " $(INSTALL_DATA) $$xfiles '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	    $(INSTALL_DATA) $$xfiles "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?; }; \
+	done
+endif !%?BASE%
+## We do two loops here so that $(POST_INSTALL) can be empty.  If we
+## merge the two loops, we get a syntax error from sh.  Anyway, having
+## $(POST_INSTALL) in the middle of the loop essentially renders it
+## useless; sh never actually executes this command.  Read the GNU
+## Standards for a little enlightenment on this.
+	@$(POST_INSTALL)
+	@list='$(%DIR%_LIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+	for p in $$list; do \
+	  if test -f $$p; then \
+?BASE?	    $(am__strip_dir) \
+?!BASE?	    f=$$p; \
+## Must ranlib after installing because mod time changes.
+## cd to target directory because AIX ranlib messes up with whitespace
+## in the argument.
+	    echo " ( cd '$(DESTDIR)$(%NDIR%dir)' && $(RANLIB) $$f )"; \
+	    ( cd "$(DESTDIR)$(%NDIR%dir)" && $(RANLIB) $$f ) || exit $$?; \
+	  else :; fi; \
+	done
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%LIBRARIES
+uninstall-%DIR%LIBRARIES:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_LIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+?BASE?	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+?!BASE?	$(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \
+	dir='$(DESTDIR)$(%NDIR%dir)'; $(am__uninstall_files_from_dir)
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY clean-am: clean-%DIR%LIBRARIES
+clean-%DIR%LIBRARIES:
+	-test -z "$(%DIR%_LIBRARIES)" || rm -f $(%DIR%_LIBRARIES)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libtool.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libtool.am
new file mode 100644
index 0000000..0aa8c08
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/libtool.am
@@ -0,0 +1,28 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+.PHONY: mostlyclean-libtool clean-libtool distclean-libtool
+mostlyclean-am: mostlyclean-libtool
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-am: clean-libtool
+clean-libtool:
+?LTRMS?%LTRMS%
+
+?TOPDIR_P?distclean-am: distclean-libtool
+?TOPDIR_P?distclean-libtool:
+?TOPDIR_P?	-rm -f libtool config.lt
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lisp.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lisp.am
new file mode 100644
index 0000000..bfb023f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/lisp.am
@@ -0,0 +1,124 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+## ---------- ##
+## Building.  ##
+## ---------- ##
+
+.el.elc:
+## We add $(builddir) and $(srcdir) to load-path, so that any '.el' files
+## that $< depends upon can be found (including generated ones).
+## We prefer files from the build directory to those from the source
+## directory, in true VPATH spirit.
+## The destination file is normally determined by appending "c" to the
+## input (which would erronously put it in $(srcdir) in VPATH builds),
+## so we override that, too.
+	if test "$(EMACS)" != "no"; then \
+	  am__dir=. am__subdir_includes=''; \
+	  case $@ in */*) \
+	    am__dir=`echo '$@' | sed 's,/[^/]*$$,,'`; \
+	    am__subdir_includes="-L $$am__dir -L $(srcdir)/$$am__dir"; \
+	  esac; \
+## Emacs byte-compilation won't create this automatically, sadly.
+	  test -d "$$am__dir" || $(MKDIR_P) "$$am__dir" || exit 1; \
+	  $(EMACS) --batch \
+	    $(AM_ELCFLAGS) $(ELCFLAGS) \
+	    $$am__subdir_includes -L $(builddir) -L $(srcdir) \
+	    --eval "(defun byte-compile-dest-file (f) \"$@\")" \
+	    --eval "(unless (byte-compile-file \"$<\") (kill-emacs 1))"; \
+	else :; fi
+
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+
+?BASE?%DIR%LISP_INSTALL = $(INSTALL_DATA)
+?!BASE?%DIR%LISP_INSTALL = $(install_sh_DATA)
+
+?EXEC?.PHONY install-exec-am: install-%DIR%LISP
+?!EXEC?.PHONY install-data-am: install-%DIR%LISP
+
+install-%DIR%LISP: $(%DIR%_LISP) $(ELCFILES)
+	@$(NORMAL_INSTALL)
+## Do not install anything if EMACS was not found.
+	@if test "$(EMACS)" != no && test -n "$(%NDIR%dir)"; then \
+?!BASE?	  $(am__vpath_adj_setup) \
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	  list='$(%DIR%_LISP)'; \
+	  if test -n "$$list"; then \
+	    echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	    $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	  fi; \
+	  for p in $$list; do \
+## A lisp file can be in the source directory or the build directory.
+	    if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+?BASE?	    $(am__strip_dir) \
+?!BASE?	    $(am__vpath_adj) \
+	    echo " $(%DIR%LISP_INSTALL) '$$d$$p' '$(DESTDIR)$(%NDIR%dir)/$$f'"; \
+	    $(%DIR%LISP_INSTALL) "$$d$$p" "$(DESTDIR)$(%NDIR%dir)/$$f" || exit $$?; \
+## Only install .elc file if it exists.
+	    if test -f $${p}c; then \
+	      echo " $(%DIR%LISP_INSTALL) '$${p}c' '$(DESTDIR)$(%NDIR%dir)/$${f}c'"; \
+	      $(%DIR%LISP_INSTALL) "$${p}c" "$(DESTDIR)$(%NDIR%dir)/$${f}c" || exit $$?; \
+	    else : ; fi; \
+	  done; \
+	else : ; fi
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%LISP
+uninstall-%DIR%LISP:
+	@$(NORMAL_UNINSTALL)
+## Do not uninstall anything if EMACS was not found.
+	@test "$(EMACS)" != no && test -n "$(%NDIR%dir)" || exit 0; \
+	list='$(%DIR%_LISP)'; \
+?BASE?	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+?!BASE?	$(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \
+	files="$$files "`echo "$$files" | sed 's|$$|c|'`; \
+	dir='$(DESTDIR)$(%NDIR%dir)'; $(am__uninstall_files_from_dir)
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY clean-am: clean-lisp
+clean-lisp:
+	-rm -f $(ELCFILES)
+
+
+## -------------- ##
+## Distributing.  ##
+## -------------- ##
+
+if %?DIST%
+DIST_COMMON += %DISTVAR%
+endif %?DIST%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlib.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlib.am
new file mode 100644
index 0000000..9cf0704
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlib.am
@@ -0,0 +1,123 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+
+?EXEC?.PHONY install-exec-am: install-%DIR%LTLIBRARIES
+?!EXEC?.PHONY install-data-am: install-%DIR%LTLIBRARIES
+
+install-%DIR%LTLIBRARIES: $(%DIR%_LTLIBRARIES)
+	@$(NORMAL_INSTALL)
+if %?BASE%
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_LTLIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+	list2=; for p in $$list; do \
+	  if test -f $$p; then \
+	    list2="$$list2 $$p"; \
+	  else :; fi; \
+	done; \
+	test -z "$$list2" || { \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+## Note that we explicitly set the libtool mode.  This avoids any lossage
+## if the program doesn't have a name that libtool expects.
+## Use INSTALL and not INSTALL_DATA because libtool knows the right
+## permissions to use.
+?LIBTOOL?	  echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(%NDIR%dir)'"; \
+?LIBTOOL?	  $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(%NDIR%dir)"; \
+?!LIBTOOL?	  echo " $(INSTALL) $(INSTALL_STRIP_FLAG) $$list '$(DESTDIR)$(%NDIR%dir)'"; \
+?!LIBTOOL?	  $(INSTALL) $(INSTALL_STRIP_FLAG) $$list "$(DESTDIR)$(%NDIR%dir)"; \
+	}
+else !%?BASE%
+	@list='$(%DIR%_LTLIBRARIES)'; test -n "$(%NDIR%dir)"  || list=; \
+	if test -n "$$list"; then \
+	    echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	    $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	for p in $$list; do if test -f "$$p"; then echo "$$p $$p"; else :; fi; done | \
+	sed '/ .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+	$(AWK) 'BEGIN { cur = "." } \
+	  { if ($$2 == cur) { files = files " " $$1 } \
+	    else { print cur, files; files = $$1; cur = $$2 } } \
+	  END { print cur, files }' | \
+	while read dir files; do \
+	  test -z "$$files" || { \
+	    test "x$$dir" = x. || { \
+	      echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	      $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir"; }; \
+## Note that we explicitly set the libtool mode.  This avoids any lossage
+## if the program doesn't have a name that libtool expects.
+## Use INSTALL and not INSTALL_DATA because libtool knows the right
+## permissions to use.
+?LIBTOOL?	    echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$files '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+?LIBTOOL?	    $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$files "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?; \
+?!LIBTOOL?	    echo " $(INSTALL) $(INSTALL_STRIP_FLAG) $$files '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+?!LIBTOOL?	    $(INSTALL) $(INSTALL_STRIP_FLAG) $$files "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?; \
+	  }; \
+	done
+endif !%?BASE%
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%LTLIBRARIES
+uninstall-%DIR%LTLIBRARIES:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_LTLIBRARIES)'; test -n "$(%NDIR%dir)" || list=; \
+	for p in $$list; do \
+?BASE?	  $(am__strip_dir) \
+?!BASE?	  f=$$p; \
+?LIBTOOL?	  echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(%NDIR%dir)/$$f'"; \
+?LIBTOOL?	  $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(%NDIR%dir)/$$f"; \
+?!LIBTOOL?	  echo " rm -f '$(DESTDIR)$(%NDIR%dir)/$$f'"; \
+?!LIBTOOL?	  rm -f "$(DESTDIR)$(%NDIR%dir)/$$f"; \
+	done
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY clean-am: clean-%DIR%LTLIBRARIES
+clean-%DIR%LTLIBRARIES:
+	-test -z "$(%DIR%_LTLIBRARIES)" || rm -f $(%DIR%_LTLIBRARIES)
+## 'so_locations' files are created by some linkers (IRIX, OSF) when
+## building a shared object.  Libtool places these files in the
+## directory where the shared object is created.
+	@list='$(%DIR%_LTLIBRARIES)'; \
+	locs=`for p in $$list; do echo $$p; done | \
+	      sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
+	      sort -u`; \
+	test -z "$$locs" || { \
+	  echo rm -f $${locs}; \
+	  rm -f $${locs}; \
+	}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlibrary.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlibrary.am
new file mode 100644
index 0000000..642d032
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/ltlibrary.am
@@ -0,0 +1,18 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+%LTLIBRARY%: $(%XLTLIBRARY%_OBJECTS) $(%XLTLIBRARY%_DEPENDENCIES) $(EXTRA_%XLTLIBRARY%_DEPENDENCIES) %DIRSTAMP%
+	%VERBOSE%$(%XLINK%) %RPATH% $(%XLTLIBRARY%_OBJECTS) $(%XLTLIBRARY%_LIBADD) $(LIBS)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans-vars.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans-vars.am
new file mode 100644
index 0000000..7e866e7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans-vars.am
@@ -0,0 +1,20 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+NROFF = nroff
+## We don't really need this, but we use it in case we ever want to
+## support noinst_MANS.
+MANS = %MANS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans.am
new file mode 100644
index 0000000..4abaa0d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/mans.am
@@ -0,0 +1,154 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1998-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+include inst-vars.am
+man%SECTION%dir = $(mandir)/man%SECTION%
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+## MANS primary are always installed in mandir, hence install-data
+## is hard coded.
+
+.PHONY: install-man
+?INSTALL-MAN?install-data-am: install-man
+?INSTALL-MAN?am__installdirs += "$(DESTDIR)$(man%SECTION%dir)"
+.PHONY install-man: install-man%SECTION%
+install-man%SECTION%: %DEPS%
+	@$(NORMAL_INSTALL)
+if %?NOTRANS_MANS%
+## Handle MANS with notrans_ prefix
+	@list1='%NOTRANS_SECT_LIST%'; \
+?!HAVE_NOTRANS?	list2=''; \
+?HAVE_NOTRANS?	list2='%NOTRANS_LIST%'; \
+	test -n "$(man%SECTION%dir)" \
+	  && test -n "`echo $$list1$$list2`" \
+	  || exit 0; \
+	echo " $(MKDIR_P) '$(DESTDIR)$(man%SECTION%dir)'"; \
+	$(MKDIR_P) "$(DESTDIR)$(man%SECTION%dir)" || exit 1; \
+	{ for i in $$list1; do echo "$$i"; done;  \
+## Extract all items from notrans_man_MANS that should go in this section.
+## This must be done dynamically to support conditionals.
+	if test -n "$$list2"; then \
+	  for i in $$list2; do echo "$$i"; done \
+## Accept for 'man1' files like 'foo.1c' but not 'sub.1/foo.2' or 'foo-2.1.4'.
+	    | sed -n '/\.%SECTION%[a-z]*$$/p'; \
+	fi; \
+## Extract basename of manpage, change the extension if needed.
+	} | while read p; do \
+## Find the file.
+	  if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; echo "$$p"; \
+	done | \
+## Extract the basename of the man page and change the extension if needed.
+	sed 'n;s,.*/,,;p;s,\.[^%SECTION%][0-9a-z]*$$,.%SECTION%,' | \
+	sed 'N;N;s,\n, ,g' | { \
+## We now have a list "sourcefile basename installed-name".
+	list=; while read file base inst; do \
+	  if test "$$base" = "$$inst"; then list="$$list $$file"; else \
+	    echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man%SECTION%dir)/$$inst'"; \
+	    $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man%SECTION%dir)/$$inst" || exit $$?; \
+	  fi; \
+	done; \
+	for i in $$list; do echo "$$i"; done | $(am__base_list) | \
+	while read files; do \
+	  test -z "$$files" || { \
+	    echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man%SECTION%dir)'"; \
+	    $(INSTALL_DATA) $$files "$(DESTDIR)$(man%SECTION%dir)" || exit $$?; }; \
+	done; }
+endif %?NOTRANS_MANS%
+if %?TRANS_MANS%
+## Handle MANS without notrans_ prefix
+	@list1='%TRANS_SECT_LIST%'; \
+?!HAVE_TRANS?	list2=''; \
+?HAVE_TRANS?	list2='%TRANS_LIST%'; \
+	test -n "$(man%SECTION%dir)" \
+	  && test -n "`echo $$list1$$list2`" \
+	  || exit 0; \
+	echo " $(MKDIR_P) '$(DESTDIR)$(man%SECTION%dir)'"; \
+	$(MKDIR_P) "$(DESTDIR)$(man%SECTION%dir)" || exit 1; \
+	{ for i in $$list1; do echo "$$i"; done;  \
+## Extract all items from notrans_man_MANS that should go in this section.
+## This must be done dynamically to support conditionals.
+	if test -n "$$list2"; then \
+	  for i in $$list2; do echo "$$i"; done \
+## Accept for 'man1' files like `foo.1c' but not 'sub.1/foo.2' or 'foo-2.1.4'.
+	    | sed -n '/\.%SECTION%[a-z]*$$/p'; \
+	fi; \
+## Extract basename of manpage, change the extension if needed.
+	} | while read p; do \
+## Find the file.
+	  if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; echo "$$p"; \
+	done | \
+## Extract the basename of the man page and change the extension if needed.
+	sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^%SECTION%][0-9a-z]*$$,%SECTION%,;x' \
+	      -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \
+	sed 'N;N;s,\n, ,g' | { \
+## We now have a list "sourcefile basename installed-name".
+	list=; while read file base inst; do \
+	  if test "$$base" = "$$inst"; then list="$$list $$file"; else \
+	    echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man%SECTION%dir)/$$inst'"; \
+	    $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man%SECTION%dir)/$$inst" || exit $$?; \
+	  fi; \
+	done; \
+	for i in $$list; do echo "$$i"; done | $(am__base_list) | \
+	while read files; do \
+	  test -z "$$files" || { \
+	    echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man%SECTION%dir)'"; \
+	    $(INSTALL_DATA) $$files "$(DESTDIR)$(man%SECTION%dir)" || exit $$?; }; \
+	done; }
+endif %?TRANS_MANS%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+.PHONY: uninstall-man
+?INSTALL-MAN?uninstall-am: uninstall-man
+.PHONY uninstall-man: uninstall-man%SECTION%
+uninstall-man%SECTION%:
+	@$(NORMAL_UNINSTALL)
+if %?NOTRANS_MANS%
+## Handle MANS with notrans_ prefix
+	@list='%NOTRANS_SECT_LIST%'; test -n "$(man%SECTION%dir)" || exit 0; \
+	files=`{ for i in $$list; do echo "$$i"; done; \
+## Extract all items from notrans_man_MANS that should go in this section.
+## This must be done dynamically to support conditionals.
+?HAVE_NOTRANS?	l2='%NOTRANS_LIST%'; for i in $$l2; do echo "$$i"; done | \
+## Accept for 'man1' files like 'foo.1c' but not 'sub.1/foo.2' or 'foo-2.1.4'.
+?HAVE_NOTRANS?	  sed -n '/\.%SECTION%[a-z]*$$/p'; \
+## Extract basename of manpage, change the extension if needed.
+	} | sed 's,.*/,,;s,\.[^%SECTION%][0-9a-z]*$$,.%SECTION%,'`; \
+	dir='$(DESTDIR)$(man%SECTION%dir)'; $(am__uninstall_files_from_dir)
+endif %?NOTRANS_MANS%
+if %?TRANS_MANS%
+## Handle MANS without notrans_ prefix
+	@list='%TRANS_SECT_LIST%'; test -n "$(man%SECTION%dir)" || exit 0; \
+	files=`{ for i in $$list; do echo "$$i"; done; \
+## Extract all items from man_MANS that should go in this section.
+## This must be done dynamically to support conditionals.
+?HAVE_TRANS?	l2='%TRANS_LIST%'; for i in $$l2; do echo "$$i"; done | \
+## Accept for 'man1' files like 'foo.1c' but not 'sub.1/foo.2' or 'foo-2.1.4'.
+?HAVE_TRANS?	  sed -n '/\.%SECTION%[a-z]*$$/p'; \
+## Extract basename of manpage, run it through the program rename
+## transform, and change the extension if needed.
+	} | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^%SECTION%][0-9a-z]*$$,%SECTION%,;x' \
+	      -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \
+	dir='$(DESTDIR)$(man%SECTION%dir)'; $(am__uninstall_files_from_dir)
+endif %?TRANS_MANS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/program.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/program.am
new file mode 100644
index 0000000..131c4a9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/program.am
@@ -0,0 +1,24 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+%PROGRAM%%EXEEXT%: $(%XPROGRAM%_OBJECTS) $(%XPROGRAM%_DEPENDENCIES) $(EXTRA_%XPROGRAM%_DEPENDENCIES) %DIRSTAMP%
+## Remove program before linking.  Otherwise the link will fail if the
+## program is running somewhere.  FIXME: this could be a loss if
+## you're using an incremental linker.  Maybe we should think twice?
+## Or maybe not... sadly, incremental linkers are rarer than losing
+## systems.
+	@rm -f %PROGRAM%%EXEEXT%
+	%VERBOSE%$(%XLINK%) $(%XPROGRAM%_OBJECTS) $(%XPROGRAM%_LDADD) $(LIBS)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/progs.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/progs.am
new file mode 100644
index 0000000..9879279
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/progs.am
@@ -0,0 +1,152 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%PROGRAMS
+?!EXEC?.PHONY install-data-am: install-%DIR%PROGRAMS
+install-%DIR%PROGRAMS: $(%DIR%_PROGRAMS)
+	@$(NORMAL_INSTALL)
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_PROGRAMS)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	for p in $$list; do echo "$$p $$p"; done | \
+## On Cygwin with libtool test won't see 'foo.exe' but instead 'foo'.
+## So we check for both.
+	sed 's/$(EXEEXT)$$//' | \
+	while read p p1; do if test -f $$p \
+?LIBTOOL?	 || test -f $$p1 \
+	  ; then echo "$$p"; echo "$$p"; else :; fi; \
+	done | \
+## We now have a list of sourcefile pairs, separated by newline.
+## Turn that into "sourcefile source_base target_dir xformed_target_base",
+## with newlines being turned into spaces in a second step.
+	sed -e 'p;s,.*/,,;n;h' \
+?BASE?	    -e 's|.*|.|' \
+?!BASE?	    -e 's|[^/]*$$||; s|^$$|.|' \
+	    -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \
+	sed 'N;N;N;s,\n, ,g' | \
+## The following awk script turns that into one line containing directories
+## and then lines of 'type target_name_or_directory sources ...', with type
+## 'd' designating directories, and 'f' files.
+	$(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \
+	  { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
+	    if ($$2 == $$4) files[d] = files[d] " " $$1; \
+	    else { print "f", $$3 "/" $$4, $$1; } } \
+	  END { for (d in files) print "f", d, files[d] }' | \
+	while read type dir files; do \
+?!BASE?	  case $$type in \
+?!BASE?	  d) echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+?!BASE?	     $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?;; \
+?!BASE?	  f) \
+	    if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
+	    test -z "$$files" || { \
+?!LIBTOOL?	      echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(%NDIR%dir)$$dir'"; \
+?!LIBTOOL?	      $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(%NDIR%dir)$$dir" || exit $$?; \
+## Note that we explicitly set the libtool mode.  This avoids any
+## lossage if the install program doesn't have a name that libtool
+## expects.
+?LIBTOOL?	    echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(%NDIR%dir)$$dir'"; \
+?LIBTOOL?	    $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(%NDIR%dir)$$dir" || exit $$?; \
+	    } \
+?!BASE?	  ;; esac \
+	; done
+
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%PROGRAMS
+uninstall-%DIR%PROGRAMS:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_PROGRAMS)'; test -n "$(%NDIR%dir)" || list=; \
+	files=`for p in $$list; do echo "$$p"; done | \
+## Remove any leading directory before applying $(transform),
+## but keep the directory part in the hold buffer, in order to
+## reapply it again afterwards in the nobase case.  Append $(EXEEXT).
+	  sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \
+	      -e 's/$$/$(EXEEXT)/' \
+?!BASE?	      -e 'x;s,[^/]*$$,,;G;s,\n,,' \
+	`; \
+	test -n "$$list" || exit 0; \
+	echo " ( cd '$(DESTDIR)$(%NDIR%dir)' && rm -f" $$files ")"; \
+	cd "$(DESTDIR)$(%NDIR%dir)" && rm -f $$files
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY clean-am: clean-%DIR%PROGRAMS
+clean-%DIR%PROGRAMS:
+?!LIBTOOL?	-test -z "$(%DIR%_PROGRAMS)" || rm -f $(%DIR%_PROGRAMS)
+## Under Cygwin, we build 'program$(EXEEXT)'.  However, if this
+## program uses a Libtool library, Libtool will move it in
+## '_libs/program$(EXEEXT)' and create a 'program' wrapper (without
+## '$(EXEEXT)').  Therefore, if Libtool is used, we must try to erase
+## both 'program$(EXEEXT)' and 'program'.
+## Cleaning the '_libs/' or '.libs/' directory is done from clean-libtool.
+## FIXME: In the future (i.e., when it works) it would be nice to delegate
+## this task to "libtool --mode=clean".
+?LIBTOOL?	@list='$(%DIR%_PROGRAMS)'; test -n "$$list" || exit 0; \
+?LIBTOOL?	echo " rm -f" $$list; \
+?LIBTOOL?	rm -f $$list || exit $$?; \
+?LIBTOOL?	test -n "$(EXEEXT)" || exit 0; \
+?LIBTOOL?	list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
+?LIBTOOL?	echo " rm -f" $$list; \
+?LIBTOOL?	rm -f $$list
+
+
+## ---------- ##
+## Checking.  ##
+## ---------- ##
+
+if %?CK-OPTS%
+.PHONY installcheck-am: installcheck-%DIR%PROGRAMS
+installcheck-%DIR%PROGRAMS: $(%DIR%_PROGRAMS)
+	bad=0; pid=$$$$; list="$(%DIR%_PROGRAMS)"; for p in $$list; do \
+	  case ' $(AM_INSTALLCHECK_STD_OPTIONS_EXEMPT) ' in \
+## Match $(srcdir)/$$p in addition to $$p because Sun make might rewrite
+## filenames in AM_INSTALLCHECK_STD_OPTIONS_EXEMPT during VPATH builds.
+	   *" $$p "* | *" $(srcdir)/$$p "*) continue;; \
+	  esac; \
+## Strip the directory and $(EXEEXT) before applying $(transform).
+	  f=`echo "$$p" | \
+	     sed 's,^.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/'`; \
+## Insert the directory back if nobase_ is used.
+?!BASE?	  f=`echo "$$p" | sed 's|[^/]*$$||'`"$$f"; \
+	  for opt in --help --version; do \
+	    if "$(DESTDIR)$(%NDIR%dir)/$$f" $$opt >c$${pid}_.out \
+	         2>c$${pid}_.err </dev/null \
+		 && test -n "`cat c$${pid}_.out`" \
+		 && test -z "`cat c$${pid}_.err`"; then :; \
+	    else echo "$$f does not support $$opt" 1>&2; bad=1; fi; \
+	  done; \
+	done; rm -f c$${pid}_.???; exit $$bad
+endif %?CK-OPTS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/python.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/python.am
new file mode 100644
index 0000000..06545b1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/python.am
@@ -0,0 +1,148 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1999-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+?FIRST?am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile)
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%PYTHON
+?!EXEC?.PHONY install-data-am: install-%DIR%PYTHON
+install-%DIR%PYTHON: $(%DIR%_PYTHON)
+	@$(NORMAL_INSTALL)
+if %?BASE%
+	@list='$(%DIR%_PYTHON)'; dlist=; list2=; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+## A file can be in the source directory or the build directory.
+	  if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \
+	  if test -f $$b$$p; then \
+## Compute basename of source file.  Unless this is a nobase_ target, we
+## want to install 'python/foo.py' as '$(DESTDIR)$(%NDIR%dir)/foo.py',
+## not '$(DESTDIR)$(%NDIR%dir)/python/foo.py'.
+	    $(am__strip_dir) \
+	    dlist="$$dlist $$f"; \
+	    list2="$$list2 $$b$$p"; \
+	  else :; fi; \
+	done; \
+	for file in $$list2; do echo $$file; done | $(am__base_list) | \
+	while read files; do \
+## Don't perform translation, since script name is important.
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(%NDIR%dir)" || exit $$?; \
+	done || exit $$?; \
+## Byte-compile must be done at install time, since file times are
+## encoded in the actual files.
+	if test -n "$$dlist"; then \
+	  $(am__py_compile) --destdir "$(DESTDIR)" \
+	                    --basedir "$(%NDIR%dir)" $$dlist; \
+	else :; fi
+else !%?BASE%
+	@list='$(%DIR%_PYTHON)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+	$(am__nobase_list) | { while read dir files; do \
+	  xfiles=; for p in $$files; do \
+## A file can be in the source directory or the build directory.
+	    if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \
+	    if test -f "$$b$$p"; then xfiles="$$xfiles $$b$$p"; dlist="$$dlist $$p"; \
+	    else :; fi; done; \
+	  test -z "$$xfiles" || { \
+	    test "x$$dir" = x. || { \
+	      echo "$(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	      $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir"; }; \
+## Don't perform translation, since script name is important.
+	    echo " $(INSTALL_DATA) $$xfiles '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+	    $(INSTALL_DATA) $$xfiles "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?; }; \
+	done; \
+## Byte-compile must be done at install time, since file times are
+## encoded in the actual files.
+	if test -n "$$dlist"; then \
+	  $(am__py_compile) --destdir "$(DESTDIR)" \
+	                    --basedir "$(%NDIR%dir)" $$dlist; \
+	else :; fi; }
+endif !%?BASE%
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+
+?FIRST?am__pep3147_tweak = \
+?FIRST?  sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|'
+
+.PHONY uninstall-am: uninstall-%DIR%PYTHON
+uninstall-%DIR%PYTHON:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_PYTHON)'; test -n "$(%NDIR%dir)" || list=; \
+?BASE?	py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+?!BASE?	$(am__nobase_strip_setup); py_files=`$(am__nobase_strip)`; \
+	test -n "$$py_files" || exit 0; \
+	dir='$(DESTDIR)$(%NDIR%dir)'; \
+## Also remove the .pyc and .pyo byte compiled versions.
+## This is somewhat tricky, because for newer pythons we have to take
+## PEP-3147 into account.
+	pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \
+	pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \
+	py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \
+	echo "$$py_files_pep3147";\
+	pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \
+	pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \
+	st=0; \
+	for files in \
+	  "$$py_files" \
+	  "$$pyc_files" \
+	  "$$pyo_files" \
+## Installation of '.py' files is not influenced by PEP-3147, so it
+## is correct *not* to have $pyfiles_pep3147 here.
+	  "$$pyc_files_pep3147" \
+	  "$$pyo_files_pep3147" \
+	; do \
+	  $(am__uninstall_files_from_dir) || st=$$?; \
+	done; \
+	exit $$st
+endif %?INSTALL%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+## There is nothing to clean here since files are
+## byte-compiled when (and where) they are installed.
+
+## -------------- ##
+## Distributing.  ##
+## -------------- ##
+
+if %?DIST%
+DIST_COMMON += %DISTVAR%
+endif %?DIST%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/remake-hdr.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/remake-hdr.am
new file mode 100644
index 0000000..979427d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/remake-hdr.am
@@ -0,0 +1,71 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+%CONFIG_H%: %STAMP%
+## Recover from removal of CONFIG_HEADER.
+	@test -f $@ || rm -f %STAMP%
+	@test -f $@ || $(MAKE) $(AM_MAKEFLAGS) %STAMP%
+
+
+%STAMP%: %CONFIG_H_DEPS% $(top_builddir)/config.status
+	@rm -f %STAMP%
+	cd $(top_builddir) && $(SHELL) ./config.status %CONFIG_H_PATH%
+
+
+## Only the first file of AC_CONFIG_HEADERS is assumed to be generated
+## by autoheader.
+if %?FIRST-HDR%
+%CONFIG_HIN%: %MAINTAINER-MODE% $(am__configure_deps) %FILES%
+## Cater to parallel BSD make.
+	($(am__cd) $(top_srcdir) && $(AUTOHEADER))
+## Whenever $(AUTOHEADER) has run, we must make sure that
+## ./config.status will rebuild config.h.  The dependency from %STAMP%
+## on %CONFIG_H_DEPS% (which contains config.hin) is not enough to
+## express this.
+##
+## There are some tricky cases where this rule will build a
+## config.hin which has the same timestamp as %STAMP%, in which case
+## ./config.status will not be rerun (meaning that users will use an
+## out-of-date config.h without knowing it).  One situation where this
+## can occur is the following:
+## 1. the user updates some configure dependency (let's say foo.m4)
+##    and runs 'make';
+## 2. the rebuild rules detect that a foo.m4 has changed,
+##    run aclocal, autoconf, automake, and then run ./config.status.
+##    (Note that autoheader hasn't been called yet, so ./config.status
+##    outputs a config.h from an obsolete config.hin);
+## 3. once Makefile has been regenerated, make continues, and
+##    discovers that config.h is a dependency of the 'all' rule.
+##    Because config.h depends on stamp-h1, stamp-h1 depends on
+##    config.hin, and config.hin depends on aclocal.m4, make runs
+##    autoheader to rebuild config.hin.
+## Now make ought to call ./config.status once again to rebuild
+## config.h from the new config.hin, but if you have a sufficiently
+## fast box, steps 2 and 3 will occur within the same second: the
+## config.h/stamp-h1 generated from the outdated config.hin will have
+## the same mtime as the new config.hin.  Hence make will think that
+## config.h is up to date.
+##
+## A solution is to erase %STAMP% here so that the %STAMP% rule
+## is always triggered after the this one.
+	rm -f %STAMP%
+## Autoheader has the bad habit of not changing the timestamp if
+## config.hin is unchanged, which breaks Make targets.  Since what
+## must not changed gratuitously is config.h, which is already handled
+## by config.status, there is no reason to make things complex for
+## config.hin.
+	touch $@
+endif %?FIRST-HDR%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/scripts.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/scripts.am
new file mode 100644
index 0000000..0f5730a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/scripts.am
@@ -0,0 +1,127 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+if %?INSTALL%
+include inst-vars.am
+endif %?INSTALL%
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+if %?INSTALL%
+## if doesn't work properly for Automake variables yet.
+am__installdirs += "$(DESTDIR)$(%NDIR%dir)"
+?EXEC?.PHONY install-exec-am: install-%DIR%SCRIPTS
+?!EXEC?.PHONY install-data-am: install-%DIR%SCRIPTS
+install-%DIR%SCRIPTS: $(%DIR%_SCRIPTS)
+	@$(NORMAL_INSTALL)
+## Funny invocation because Makefile variable can be empty, leading to
+## a syntax error in sh.
+	@list='$(%DIR%_SCRIPTS)'; test -n "$(%NDIR%dir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)" || exit 1; \
+	fi; \
+?!BASE?	$(am__nobase_strip_setup); \
+	for p in $$list; do \
+## A file can be in the source directory or the build directory.
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+## A script may or may not exist.
+	  if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \
+	done | \
+## We now have a list of "sourcefile newline (nobase-)target" pairs.
+## Turn that into "sourcefile source_base target_dir xformed_target_base",
+## with newlines being turned into spaces in a second step.
+	sed -e 'p;s,.*/,,;n' \
+?BASE?	    -e 'h;s|.*|.|' \
+?!BASE?	    -e "s|$$srcdirstrip/||" -e 'h;s|[^/]*$$||; s|^$$|.|' \
+	    -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \
+	$(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \
+	  { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
+	    if ($$2 == $$4) { files[d] = files[d] " " $$1; \
+	      if (++n[d] == $(am__install_max)) { \
+		print "f", d, files[d]; n[d] = 0; files[d] = "" } } \
+	    else { print "f", d "/" $$4, $$1 } } \
+	  END { for (d in files) print "f", d, files[d] }' | \
+	while read type dir files; do \
+?!BASE?	  case $$type in \
+?!BASE?	  d) echo " $(MKDIR_P) '$(DESTDIR)$(%NDIR%dir)/$$dir'"; \
+?!BASE?	     $(MKDIR_P) "$(DESTDIR)$(%NDIR%dir)/$$dir" || exit $$?;; \
+?!BASE?	  f) \
+	     if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
+	     test -z "$$files" || { \
+	       echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(%NDIR%dir)$$dir'"; \
+	       $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(%NDIR%dir)$$dir" || exit $$?; \
+	     } \
+?!BASE?	  ;; esac \
+	; done
+endif %?INSTALL%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?INSTALL%
+.PHONY uninstall-am: uninstall-%DIR%SCRIPTS
+uninstall-%DIR%SCRIPTS:
+	@$(NORMAL_UNINSTALL)
+	@list='$(%DIR%_SCRIPTS)'; test -n "$(%NDIR%dir)" || exit 0; \
+?BASE?	files=`for p in $$list; do echo "$$p"; done | \
+?BASE?	       sed -e 's,.*/,,;$(transform)'`; \
+?!BASE?	$(am__nobase_strip_setup); \
+?!BASE?	files=`$(am__nobase_strip) \
+?!BASE?	       -e 'h;s,.*/,,;$(transform);x;s|[^/]*$$||;G;s,\n,,'`; \
+	dir='$(DESTDIR)$(%NDIR%dir)'; $(am__uninstall_files_from_dir)
+endif %?INSTALL%
+
+
+## -------------- ##
+## Distributing.  ##
+## -------------- ##
+
+if %?DIST%
+DIST_COMMON += %DISTVAR%
+endif %?DIST%
+
+
+## ---------- ##
+## Checking.  ##
+## ---------- ##
+
+if %?CK-OPTS%
+.PHONY installcheck-am: installcheck-%DIR%SCRIPTS
+installcheck-%DIR%SCRIPTS: $(%DIR%_SCRIPTS)
+	bad=0; pid=$$$$; list="$(%DIR%_SCRIPTS)"; for p in $$list; do \
+	  case ' $(AM_INSTALLCHECK_STD_OPTIONS_EXEMPT) ' in \
+## Match $(srcdir)/$$p in addition to $$p because Sun make might rewrite
+## filenames in AM_INSTALLCHECK_STD_OPTIONS_EXEMPT during VPATH builds.
+	   *" $$p "* | *" $(srcdir)/$$p "*) continue;; \
+	  esac; \
+## Strip any leading directory before applying $(transform).
+	  f=`echo "$$p" | sed 's,^.*/,,;$(transform)'`; \
+## Insert the directory back if nobase_ is used.
+?!BASE?	  f=`echo "$$p" | sed 's|[^/]*$$||'`"$$f"; \
+	  for opt in --help --version; do \
+	    if "$(DESTDIR)$(%NDIR%dir)/$$f" $$opt >c$${pid}_.out \
+	         2>c$${pid}_.err </dev/null \
+		 && test -n "`cat c$${pid}_.out`" \
+		 && test -z "`cat c$${pid}_.err`"; then :; \
+	    else echo "$$f does not support $$opt" 1>&2; bad=1; fi; \
+	  done; \
+	done; rm -f c$${pid}_.???; exit $$bad
+endif %?CK-OPTS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/subdirs.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/subdirs.am
new file mode 100644
index 0000000..999aa78
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/subdirs.am
@@ -0,0 +1,76 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+RECURSIVE_TARGETS += all-recursive check-recursive installcheck-recursive
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
+  distclean-recursive maintainer-clean-recursive
+
+am__recursive_targets = \
+  $(RECURSIVE_TARGETS) \
+  $(RECURSIVE_CLEAN_TARGETS) \
+  $(am__extra_recursive_targets)
+
+## All documented targets which invoke 'make' recursively, or depend
+## on targets that do so.  GNUmakefile from gnulib depends on this.
+AM_RECURSIVE_TARGETS += $(am__recursive_targets:-recursive=)
+
+.PHONY .MAKE: $(am__recursive_targets)
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run 'make' without going through this Makefile.
+# To change the values of 'make' variables: instead of editing Makefiles,
+# (1) if the variable is set in 'config.status', edit 'config.status'
+#     (which will cause the Makefiles to be regenerated when you run 'make');
+# (2) otherwise, pass the desired values on the 'make' command line.
+
+$(am__recursive_targets):
+## Using $failcom allows "-k" to keep its natural meaning when running a
+## recursive rule.
+	@fail=; \
+	if $(am__make_keepgoing); then \
+	  failcom='fail=yes'; \
+	else \
+	  failcom='exit 1'; \
+	fi; \
+	dot_seen=no; \
+	target=`echo $@ | sed s/-recursive//`; \
+## For distclean and maintainer-clean we make sure to use the full
+## list of subdirectories.  We do this so that 'configure; make
+## distclean' really is a no-op, even if SUBDIRS is conditional.
+	case "$@" in \
+	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+	  *) list='$(SUBDIRS)' ;; \
+	esac; \
+	for subdir in $$list; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    dot_seen=yes; \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done; \
+	if test "$$dot_seen" = "no"; then \
+	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+	fi; test -z "$$fail"
+
+mostlyclean: mostlyclean-recursive
+clean: clean-recursive
+distclean: distclean-recursive
+maintainer-clean: maintainer-clean-recursive
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/tags.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/tags.am
new file mode 100644
index 0000000..57eb116
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/tags.am
@@ -0,0 +1,182 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates.  Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+  BEGIN { nonempty = 0; } \
+  { items[$$0] = 1; nonempty = 1; } \
+  END { if (nonempty) { for (i in items) print i; }; } \
+'
+
+# Make sure the list of sources is unique.  This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+  list='$(am__tagged_files)'; \
+  unique=`for i in $$list; do \
+## Handle VPATH correctly.
+    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+  done | $(am__uniquify_input)`
+
+## ---- ##
+## ID.  ##
+## ---- ##
+
+ID: $(am__tagged_files)
+	$(am__define_uniq_tagged_files); mkid -fID $$unique
+
+
+## ------ ##
+## TAGS.  ##
+## ------ ##
+
+ETAGS = etags
+.PHONY: TAGS tags
+if %?SUBDIRS%
+AM_RECURSIVE_TARGETS += TAGS
+RECURSIVE_TARGETS += tags-recursive
+tags: tags-recursive
+else !%?SUBDIRS%
+tags: tags-am
+endif !%?SUBDIRS%
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+## We use the positional parameters to build the subdir list with
+## absolute names, without the need to worry about white space in `pwd`.
+	set x; \
+	here=`pwd`; \
+## Exuberant Ctags wants --etags-include,
+## GNU Etags             --include
+## Furthermore Exuberant Ctags 5.5.4 fails to create TAGS files
+## when no files are supplied, despite any --etags-include option.
+## A workaround is to pass '.' as a file.  This is what $empty_fix is for.
+?SUBDIRS?	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+?SUBDIRS?	  include_option=--etags-include; \
+?SUBDIRS?	  empty_fix=.; \
+?SUBDIRS?	else \
+?SUBDIRS?	  include_option=--include; \
+?SUBDIRS?	  empty_fix=; \
+?SUBDIRS?	fi; \
+?SUBDIRS?	list='$(SUBDIRS)'; for subdir in $$list; do \
+## Do nothing if we're trying to look in '.'.
+?SUBDIRS?	  if test "$$subdir" = .; then :; else \
+?SUBDIRS?	    test ! -f $$subdir/TAGS || \
+## Note that the = is mandatory for --etags-include.
+?SUBDIRS?	      set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
+?SUBDIRS?	  fi; \
+?SUBDIRS?	done; \
+	$(am__define_uniq_tagged_files); \
+## Remove the 'x' we added first:
+	shift; \
+## Make sure we have something to run etags on.
+	if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  if test $$# -gt 0; then \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      "$$@" $$unique; \
+	  else \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      $$unique; \
+	  fi; \
+	fi
+
+
+## --------------- ##
+## vi-style tags.  ##
+## --------------- ##
+
+CTAGS = ctags
+.PHONY: CTAGS ctags
+if %?SUBDIRS%
+AM_RECURSIVE_TARGETS += CTAGS
+RECURSIVE_TARGETS += ctags-recursive
+ctags: ctags-recursive
+else !%?SUBDIRS%
+ctags: ctags-am
+endif !%?SUBDIRS%
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+	$(am__define_uniq_tagged_files); \
+## Make sure we have something to run ctags on.
+	test -z "$(CTAGS_ARGS)$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$unique
+
+
+## --------------- ##
+## "Global tags".  ##
+## --------------- ##
+
+.PHONY: GTAGS
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && $(am__cd) $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) "$$here"
+
+
+## ------- ##
+## cscope  ##
+## ------- ##
+
+if %?TOPDIR_P%
+CSCOPE = cscope
+.PHONY: cscope clean-cscope
+AM_RECURSIVE_TARGETS += cscope
+cscope: cscope.files
+	test ! -s cscope.files \
+	  || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS)
+clean-cscope:
+	-rm -f cscope.files
+cscope.files: clean-cscope cscopelist
+endif %?TOPDIR_P%
+
+if %?SUBDIRS%
+RECURSIVE_TARGETS += cscopelist-recursive
+cscopelist: cscopelist-recursive
+else !%?SUBDIRS%
+cscopelist: cscopelist-am
+endif !%?SUBDIRS%
+
+cscopelist-am: $(am__tagged_files)
+	list='$(am__tagged_files)'; \
+	case "$(srcdir)" in \
+	  [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+	  *) sdir=$(subdir)/$(srcdir) ;; \
+	esac; \
+	for i in $$list; do \
+	  if test -f "$$i"; then \
+	    echo "$(subdir)/$$i"; \
+	  else \
+	    echo "$$sdir/$$i"; \
+	  fi; \
+	done >> $(top_builddir)/cscope.files
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+.PHONY distclean-am: distclean-tags
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+if %?TOPDIR_P%
+	-rm -f cscope.out cscope.in.out cscope.po.out cscope.files
+endif %?TOPDIR_P%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texi-vers.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texi-vers.am
new file mode 100644
index 0000000..bddf382
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texi-vers.am
@@ -0,0 +1,58 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+DIST_COMMON += %VTEXI% %STAMPVTI%
+
+## Don't give this rule a command (even '@:').
+## %STAMPVTI% is always newer than %VTEXI%, so this rule is always
+## triggered.  If you equip this rule with a command, GNU make will
+## assume %VTEXI% has been rebuild in the current directory and
+## discard any %VTEXI% file found in a VPATH search.
+%VTEXI%: %MAINTAINER-MODE% %STAMPVTI%
+
+## Depend on configure so that version number updates cause a rebuild.
+## (Not configure.ac, because not all setups define the version number
+## in this file.)
+%STAMPVTI%: %TEXI% $(top_srcdir)/configure
+## It is wrong to have %STAMPVTI% dependent on %DIRSTAMP%, because
+## %STAMPVTI% is distributed and %DIRSTAMP% isn't: a distributed file
+## should never be dependent upon a non-distributed built file.
+## Therefore we ensure that %DIRSTAMP% exists in the rule.
+?DIRSTAMP?	@test -f %DIRSTAMP% || $(MAKE) $(AM_MAKEFLAGS) %DIRSTAMP%
+	@(dir=.; test -f ./%TEXI% || dir=$(srcdir); \
+	set `$(SHELL) %MDDIR%mdate-sh $$dir/%TEXI%`; \
+	echo "@set UPDATED $$1 $$2 $$3"; \
+	echo "@set UPDATED-MONTH $$2 $$3"; \
+	echo "@set EDITION $(VERSION)"; \
+	echo "@set VERSION $(VERSION)") > %VTI%.tmp
+## Use cp and rm here because some older "mv"s can't move across
+## filesystems.  Furthermore, GNU "mv" in the AmigaDOS environment
+## can't handle this.
+	@cmp -s %VTI%.tmp %VTEXI% \
+	  || (echo "Updating %VTEXI%"; \
+	      cp %VTI%.tmp %VTEXI%)
+	-@rm -f %VTI%.tmp
+	@cp %VTEXI% $@
+
+mostlyclean-am: mostlyclean-%VTI%
+mostlyclean-%VTI%:
+	-rm -f %VTI%.tmp
+
+maintainer-clean-am: maintainer-clean-%VTI%
+maintainer-clean-%VTI%:
+%MAINTAINER-MODE%	-rm -f %STAMPVTI% %VTEXI%
+
+.PHONY: mostlyclean-%VTI% maintainer-clean-%VTI%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texibuild.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texibuild.am
new file mode 100644
index 0000000..a59d443
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texibuild.am
@@ -0,0 +1,128 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+?GENERIC_INFO?%SOURCE_SUFFIX%%DEST_SUFFIX%:
+?!GENERIC_INFO?%DEST_INFO_PREFIX%%DEST_SUFFIX%: %SOURCE_INFO% %DEPS%
+## It is wrong to have 'info' files dependent on %DIRSTAMP%, because
+## 'info' files are distributed and %DIRSTAMP% isn't: a distributed file
+## should never be dependent upon a non-distributed built file.
+## Therefore we ensure that %DIRSTAMP% exists in the rule.
+?!INSRC??DIRSTAMP?	@test -f %DIRSTAMP% || $(MAKE) $(AM_MAKEFLAGS) %DIRSTAMP%
+## Back up the info files before running makeinfo. This is the cheapest
+## way to ensure that
+## 1) If the texinfo file shrinks (or if you start using --no-split),
+##    you'll not be left with some dead info files lying around -- dead
+##    files which would end up in the distribution.
+## 2) If the texinfo file has some minor mistakes which cause makeinfo
+##    to fail, the info files are not removed.  (They are needed by the
+##    developer while he writes documentation.)
+## *.iNN files are used on DJGPP.  See the comments in install-info-am
+	%AM_V_MAKEINFO%restore=: && backupdir="$(am__leading_dot)am$$$$" && \
+?INSRC?	am__cwd=`pwd` && $(am__cd) $(srcdir) && \
+	rm -rf $$backupdir && mkdir $$backupdir && \
+## If makeinfo is not installed we must not backup the files so
+## 'missing' can do its job and touch $@ if it exists.
+	if ($(MAKEINFO) --version) >/dev/null 2>&1; then \
+	  for f in $@ $@-[0-9] $@-[0-9][0-9] $(@:.info=).i[0-9] $(@:.info=).i[0-9][0-9]; do \
+	    if test -f $$f; then mv $$f $$backupdir; restore=mv; else :; fi; \
+	  done; \
+	else :; fi && \
+?INSRC?	cd "$$am__cwd"; \
+	if $(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) %MAKEINFOFLAGS% \
+?!INSRC?	 -o $@ `test -f '%SOURCE_INFO%' || echo '$(srcdir)/'`%SOURCE_INFO%; \
+?INSRC??!GENERIC_INFO?	 -o $@ $(srcdir)/%SOURCE_INFO%; \
+?INSRC??GENERIC_INFO?	 -o $@ $<; \
+	then \
+	  rc=0; \
+?INSRC?	  $(am__cd) $(srcdir); \
+	else \
+	  rc=$$?; \
+## Beware that backup info files might come from a subdirectory.
+?INSRC?	  $(am__cd) $(srcdir) && \
+	  $$restore $$backupdir/* `echo "./$@" | sed 's|[^/]*$$||'`; \
+	fi; \
+	rm -rf $$backupdir; exit $$rc
+
+INFO_DEPS += %DEST_INFO_PREFIX%%DEST_SUFFIX%
+
+?GENERIC?%SOURCE_SUFFIX%.dvi:
+?!GENERIC?%DEST_PREFIX%.dvi: %SOURCE% %DEPS% %DIRSTAMP%
+	%AM_V_TEXI2DVI%TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+## Must set MAKEINFO like this so that version.texi will be found even
+## if it is in srcdir (-I $(srcdir) is set in %MAKEINFOFLAGS%).
+	MAKEINFO='$(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) %MAKEINFOFLAGS%' \
+## texi2dvi doesn't silence everything with -q, redirect to /dev/null instead.
+## We still want -q (%TEXIQUIET%) because it turns on batch mode.
+## Use '--build-dir' so that TeX and Texinfo auxiliary files and build
+## by-products are left in there, instead of cluttering the current
+## directory (see automake bug#11146).  Use a different build-dir for
+## each file (and distinct from that of the corresponding PDF file) to
+## avoid hitting a Texinfop bug that could cause low-probability racy
+## failure when doing parallel builds; see:
+## http://lists.gnu.org/archive/html/automake-patches/2012-06/msg00073.html
+	$(TEXI2DVI) %TEXIQUIET% --build-dir=$(@:.dvi=.t2d) -o $@ %TEXIDEVNULL% \
+?GENERIC?	%SOURCE%
+?!GENERIC?	`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+
+?GENERIC?%SOURCE_SUFFIX%.pdf:
+?!GENERIC?%DEST_PREFIX%.pdf: %SOURCE% %DEPS% %DIRSTAMP%
+	%AM_V_TEXI2PDF%TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+## Must set MAKEINFO like this so that version.texi will be found even
+## if it is in srcdir (-I $(srcdir) is set in %MAKEINFOFLAGS%).
+	MAKEINFO='$(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) %MAKEINFOFLAGS%' \
+## texi2pdf doesn't silence everything with -q, redirect to /dev/null instead.
+## We still want -q (%TEXIQUIET%) because it turns on batch mode.
+## Use '--build-dir' so that TeX and Texinfo auxiliary files and build
+## by-products are left in there, instead of cluttering the current
+## directory (see automake bug#11146).  Use a different build-dir for
+## each file (and distinct from that of the corresponding DVI file) to
+## avoid hitting a Texinfop bug that could cause low-probability racy
+## failure when doing parallel builds; see:
+## http://lists.gnu.org/archive/html/automake-patches/2012-06/msg00073.html
+	$(TEXI2PDF) %TEXIQUIET% --build-dir=$(@:.pdf=.t2p) -o $@ %TEXIDEVNULL% \
+?GENERIC?	%SOURCE%
+?!GENERIC?	`test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%
+
+?GENERIC?%SOURCE_SUFFIX%.html:
+?!GENERIC?%DEST_PREFIX%.html: %SOURCE% %DEPS% %DIRSTAMP%
+## When --split (the default) is used, makeinfo will output a
+## directory.  However it will not update the time stamp of a
+## previously existing directory, and when the names of the nodes
+## in the manual change, it may leave unused pages.  Our fix
+## is to build under a temporary name, and replace the target on
+## success.
+	%AM_V_MAKEINFO%rm -rf $(@:.html=.htp)
+	%SILENT%if $(MAKEINFOHTML) $(AM_MAKEINFOHTMLFLAGS) $(MAKEINFOFLAGS) %MAKEINFOFLAGS% \
+?GENERIC?	 -o $(@:.html=.htp) %SOURCE%; \
+?!GENERIC?	 -o $(@:.html=.htp) `test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE%; \
+	then \
+	  rm -rf $@ && mv $(@:.html=.htp) $@; \
+	else \
+	  rm -rf $(@:.html=.htp); exit 1; \
+	fi
+
+## If we are using the generic rules, we need separate dependencies.
+## (Don't wonder about %DIRSTAMP% here, this is used only by non-generic
+## rules.)
+if %?GENERIC_INFO%
+%DEST_INFO_PREFIX%%DEST_SUFFIX%: %SOURCE_REAL% %DEPS%
+endif %?GENERIC_INFO%
+if %?GENERIC%
+%DEST_PREFIX%.dvi: %SOURCE_REAL% %DEPS%
+%DEST_PREFIX%.pdf: %SOURCE_REAL% %DEPS%
+%DEST_PREFIX%.html: %SOURCE_REAL% %DEPS%
+endif %?GENERIC%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texinfos.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texinfos.am
new file mode 100644
index 0000000..c2b080a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/texinfos.am
@@ -0,0 +1,411 @@
+## automake - create Makefile.in from Makefile.am
+
+## Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## ----------- ##
+## Variables.  ##
+## ----------- ##
+
+if %?LOCAL-TEXIS%
+TEXI2DVI = texi2dvi
+TEXI2PDF = $(TEXI2DVI) --pdf --batch
+MAKEINFOHTML = $(MAKEINFO) --html
+AM_MAKEINFOHTMLFLAGS = $(AM_MAKEINFOFLAGS)
+endif %?LOCAL-TEXIS%
+
+
+## ---------- ##
+## Building.  ##
+## ---------- ##
+
+## The way to make PostScript, for those who want it.
+if %?LOCAL-TEXIS%
+DVIPS = dvips
+.dvi.ps:
+	%AM_V_DVIPS%TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+	$(DVIPS) %TEXIQUIET% -o $@ $<
+endif %?LOCAL-TEXIS%
+
+.PHONY: dvi dvi-am html html-am info info-am pdf pdf-am ps ps-am
+if %?SUBDIRS%
+RECURSIVE_TARGETS += dvi-recursive html-recursive info-recursive
+RECURSIVE_TARGETS += pdf-recursive ps-recursive
+dvi: dvi-recursive
+html: html-recursive
+info: info-recursive
+pdf: pdf-recursive
+ps: ps-recursive
+else !%?SUBDIRS%
+dvi: dvi-am
+html: html-am
+info: info-am
+pdf: pdf-am
+ps: ps-am
+endif !%?SUBDIRS%
+
+if %?LOCAL-TEXIS%
+dvi-am: $(DVIS)
+html-am: $(HTMLS)
+info-am: $(INFO_DEPS)
+pdf-am: $(PDFS)
+ps-am: $(PSS)
+else ! %?LOCAL-TEXIS%
+dvi-am:
+html-am:
+info-am:
+pdf-am:
+ps-am:
+endif ! %?LOCAL-TEXIS%
+
+
+## ------------ ##
+## Installing.  ##
+## ------------ ##
+
+## Some code should be run only if install-info actually exists, and
+## if the user doesn't request it not to be run (through the
+## 'AM_UPDATE_INFO_DIR' environment variable).  See automake bug#9773
+## and Debian Bug#543992.
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+
+## Look in both . and srcdir because the info pages might have been
+## rebuilt in the build directory.  Can't cd to srcdir; that might
+## break a possible install-sh reference.
+##
+## Funny name due to --cygnus influence; we want to reserve
+## 'install-info' for the user.
+##
+## TEXINFOS primary are always installed in infodir, hence install-data
+## is hard coded.
+if %?INSTALL-INFO%
+if %?LOCAL-TEXIS%
+am__installdirs += "$(DESTDIR)$(infodir)"
+install-data-am: install-info-am
+endif %?LOCAL-TEXIS%
+endif %?INSTALL-INFO%
+.PHONY: \
+  install-dvi  install-dvi-am \
+  install-html install-html-am \
+  install-info install-info-am \
+  install-pdf  install-pdf-am \
+  install-ps   install-ps-am
+
+if %?SUBDIRS%
+RECURSIVE_TARGETS += \
+  install-dvi-recursive \
+  install-html-recursive \
+  install-info-recursive \
+  install-pdf-recursive \
+  install-ps-recursive
+install-dvi: install-dvi-recursive
+install-html: install-html-recursive
+install-info: install-info-recursive
+install-pdf: install-pdf-recursive
+install-ps: install-ps-recursive
+else !%?SUBDIRS%
+install-dvi: install-dvi-am
+install-html: install-html-am
+install-info: install-info-am
+install-pdf: install-pdf-am
+install-ps: install-ps-am
+endif !%?SUBDIRS%
+
+if %?LOCAL-TEXIS%
+
+include inst-vars.am
+
+install-dvi-am: $(DVIS)
+	@$(NORMAL_INSTALL)
+	@list='$(DVIS)'; test -n "$(dvidir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(dvidir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(dvidir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(dvidir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(dvidir)" || exit $$?; \
+	done
+
+install-html-am: $(HTMLS)
+	@$(NORMAL_INSTALL)
+	@list='$(HTMLS)'; list2=; test -n "$(htmldir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(htmldir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(htmldir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p" || test -d "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  $(am__strip_dir) \
+## This indirection is required to work around a bug of the Solaris 10
+## shell /usr/xpg4/bin/sh.  The description of the bug can be found at
+## <http://lists.gnu.org/archive/html/bug-autoconf/2011-11/msg00005.html>
+## and the report of the original failure can be found at automake
+## bug#10026 <http://debbugs.gnu.org/cgi/bugreport.cgi?bug=10026#23>
+	  d2=$$d$$p; \
+	  if test -d "$$d2"; then \
+	    echo " $(MKDIR_P) '$(DESTDIR)$(htmldir)/$$f'"; \
+	    $(MKDIR_P) "$(DESTDIR)$(htmldir)/$$f" || exit 1; \
+	    echo " $(INSTALL_DATA) '$$d2'/* '$(DESTDIR)$(htmldir)/$$f'"; \
+	    $(INSTALL_DATA) "$$d2"/* "$(DESTDIR)$(htmldir)/$$f" || exit $$?; \
+	  else \
+	    list2="$$list2 $$d2"; \
+	  fi; \
+	done; \
+	test -z "$$list2" || { echo "$$list2" | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(htmldir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(htmldir)" || exit $$?; \
+	done; }
+
+install-info-am: $(INFO_DEPS)
+	@$(NORMAL_INSTALL)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+	list='$(INFO_DEPS)'; test -n "$(infodir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(infodir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(infodir)" || exit 1; \
+	fi; \
+	for file in $$list; do \
+## Strip possible $(srcdir) prefix.
+	  case $$file in \
+	    $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+	  esac; \
+	  if test -f $$file; then d=.; else d=$(srcdir); fi; \
+## 8+3 filesystems cannot deal with foo.info-N filenames: they all
+## conflict.  DJGPP comes with a tool, DJTAR, that will rename these
+## files to foo.iNN while extracting the archive.  DJGPP's makeinfo
+## is patched to grok these filenames.  However we have to account
+## for the renaming when installing the info files.
+##
+## If $file == foo.info, then $file_i == foo.i.  The reason we use two
+## shell commands instead of one ('s|\.info$$|.i|') is so that a suffix-less
+## 'foo' becomes 'foo.i' too.
+	  file_i=`echo "$$file" | sed 's|\.info$$||;s|$$|.i|'`; \
+	  for ifile in $$d/$$file $$d/$$file-[0-9] $$d/$$file-[0-9][0-9] \
+	               $$d/$$file_i[0-9] $$d/$$file_i[0-9][0-9] ; do \
+	    if test -f $$ifile; then \
+	      echo "$$ifile"; \
+	    else : ; fi; \
+	  done; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(infodir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(infodir)" || exit $$?; done
+	@$(POST_INSTALL)
+	@if $(am__can_run_installinfo); then \
+	  list='$(INFO_DEPS)'; test -n "$(infodir)" || list=; \
+	  for file in $$list; do \
+## Strip directory
+	    relfile=`echo "$$file" | sed 's|^.*/||'`; \
+## Run ":" after install-info in case install-info fails.  We really
+## don't care about failures here, because they can be spurious.  For
+## instance if you don't have a dir file, install-info will fail.  I
+## think instead it should create a new dir file for you.  This bug
+## causes the "make distcheck" target to fail reliably.
+	    echo " install-info --info-dir='$(DESTDIR)$(infodir)' '$(DESTDIR)$(infodir)/$$relfile'";\
+## Use "|| :" here because Sun make passes -e to sh; if install-info
+## fails then we'd fail if we used ";".
+	    install-info --info-dir="$(DESTDIR)$(infodir)" "$(DESTDIR)$(infodir)/$$relfile" || :;\
+	  done; \
+	else : ; fi
+
+install-pdf-am: $(PDFS)
+	@$(NORMAL_INSTALL)
+	@list='$(PDFS)'; test -n "$(pdfdir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(pdfdir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(pdfdir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pdfdir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(pdfdir)" || exit $$?; done
+
+install-ps-am: $(PSS)
+	@$(NORMAL_INSTALL)
+	@list='$(PSS)'; test -n "$(psdir)" || list=; \
+	if test -n "$$list"; then \
+	  echo " $(MKDIR_P) '$(DESTDIR)$(psdir)'"; \
+	  $(MKDIR_P) "$(DESTDIR)$(psdir)" || exit 1; \
+	fi; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(psdir)'"; \
+	  $(INSTALL_DATA) $$files "$(DESTDIR)$(psdir)" || exit $$?; done
+
+else ! %?LOCAL-TEXIS%
+install-dvi-am:
+install-html-am:
+install-info-am:
+install-pdf-am:
+install-ps-am:
+endif ! %?LOCAL-TEXIS%
+
+
+## -------------- ##
+## Uninstalling.  ##
+## -------------- ##
+
+if %?LOCAL-TEXIS%
+.PHONY uninstall-am: \
+  uninstall-dvi-am \
+  uninstall-html-am \
+  uninstall-info-am \
+  uninstall-ps-am \
+  uninstall-pdf-am
+
+uninstall-dvi-am:
+	@$(NORMAL_UNINSTALL)
+	@list='$(DVIS)'; test -n "$(dvidir)" || list=; \
+	for p in $$list; do \
+	  $(am__strip_dir) \
+	  echo " rm -f '$(DESTDIR)$(dvidir)/$$f'"; \
+	  rm -f "$(DESTDIR)$(dvidir)/$$f"; \
+	done
+
+uninstall-html-am:
+	@$(NORMAL_UNINSTALL)
+	@list='$(HTMLS)'; test -n "$(htmldir)" || list=; \
+	for p in $$list; do \
+	  $(am__strip_dir) \
+## $f can be a directory, hence the -r.
+	  echo " rm -rf '$(DESTDIR)$(htmldir)/$$f'"; \
+	  rm -rf "$(DESTDIR)$(htmldir)/$$f"; \
+	done
+
+uninstall-info-am:
+	@$(PRE_UNINSTALL)
+## Run two loops here so that we can handle PRE_UNINSTALL and
+## NORMAL_UNINSTALL correctly.
+	@if test -d '$(DESTDIR)$(infodir)' && $(am__can_run_installinfo); then \
+	  list='$(INFO_DEPS)'; \
+	  for file in $$list; do \
+	    relfile=`echo "$$file" | sed 's|^.*/||'`; \
+## install-info needs the actual info file.  We use the installed one,
+## rather than relying on one still being in srcdir or builddir.
+## However, "make uninstall && make uninstall" should not fail,
+## so we ignore failure if the file did not exist.
+	    echo " install-info --info-dir='$(DESTDIR)$(infodir)' --remove '$(DESTDIR)$(infodir)/$$relfile'"; \
+	    if install-info --info-dir="$(DESTDIR)$(infodir)" --remove "$(DESTDIR)$(infodir)/$$relfile"; \
+	    then :; else test ! -f "$(DESTDIR)$(infodir)/$$relfile" || exit 1; fi; \
+	  done; \
+	else :; fi
+	@$(NORMAL_UNINSTALL)
+	@list='$(INFO_DEPS)'; \
+	for file in $$list; do \
+	  relfile=`echo "$$file" | sed 's|^.*/||'`; \
+## DJGPP-style info files.  See comment in install-info-am.
+	  relfile_i=`echo "$$relfile" | sed 's|\.info$$||;s|$$|.i|'`; \
+	  (if test -d "$(DESTDIR)$(infodir)" && cd "$(DESTDIR)$(infodir)"; then \
+	     echo " cd '$(DESTDIR)$(infodir)' && rm -f $$relfile $$relfile-[0-9] $$relfile-[0-9][0-9] $$relfile_i[0-9] $$relfile_i[0-9][0-9]"; \
+	     rm -f $$relfile $$relfile-[0-9] $$relfile-[0-9][0-9] $$relfile_i[0-9] $$relfile_i[0-9][0-9]; \
+	   else :; fi); \
+	done
+
+uninstall-pdf-am:
+	@$(NORMAL_UNINSTALL)
+	@list='$(PDFS)'; test -n "$(pdfdir)" || list=; \
+	for p in $$list; do \
+	  $(am__strip_dir) \
+	  echo " rm -f '$(DESTDIR)$(pdfdir)/$$f'"; \
+	  rm -f "$(DESTDIR)$(pdfdir)/$$f"; \
+	done
+
+uninstall-ps-am:
+	@$(NORMAL_UNINSTALL)
+	@list='$(PSS)'; test -n "$(psdir)" || list=; \
+	for p in $$list; do \
+	  $(am__strip_dir) \
+	  echo " rm -f '$(DESTDIR)$(psdir)/$$f'"; \
+	  rm -f "$(DESTDIR)$(psdir)/$$f"; \
+	done
+endif %?LOCAL-TEXIS%
+
+if %?LOCAL-TEXIS%
+.PHONY: dist-info
+dist-info: $(INFO_DEPS)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+	list='$(INFO_DEPS)'; \
+	for base in $$list; do \
+## Strip possible $(srcdir) prefix.
+	  case $$base in \
+	    $(srcdir)/*) base=`echo "$$base" | sed "s|^$$srcdirstrip/||"`;; \
+	  esac; \
+	  if test -f $$base; then d=.; else d=$(srcdir); fi; \
+	  base_i=`echo "$$base" | sed 's|\.info$$||;s|$$|.i|'`; \
+	  for file in $$d/$$base $$d/$$base-[0-9] $$d/$$base-[0-9][0-9] $$d/$$base_i[0-9] $$d/$$base_i[0-9][0-9]; do \
+	    if test -f $$file; then \
+## Strip leading '$$d/'.
+	      relfile=`expr "$$file" : "$$d/\(.*\)"`; \
+	      test -f "$(distdir)/$$relfile" || \
+		cp -p $$file "$(distdir)/$$relfile"; \
+	    else :; fi; \
+	  done; \
+	done
+endif %?LOCAL-TEXIS%
+
+
+## ---------- ##
+## Cleaning.  ##
+## ---------- ##
+
+## The funny name is due to --cygnus influence; in Cygnus mode,
+## 'clean-info' is a target that users can use.
+
+if %?LOCAL-TEXIS%
+.PHONY mostlyclean-am: mostlyclean-aminfo
+.PHONY: mostlyclean-aminfo
+mostlyclean-aminfo:
+## Use '-rf', not just '-f', because the %*CLEAN% substitutions can also
+## contain any directory created by "makeinfo --html", as well as the
+## '*.t2d' and '*.t2p' directories used by texi2dvi and texi2pdf.
+	-rm -rf %MOSTLYCLEAN%
+
+.PHONY clean-am: clean-aminfo
+clean-aminfo:
+## Use '-rf', not just '-f'; see comments in 'mostlyclean-aminfo'
+## above for details.
+?TEXICLEAN?	-test -z "%TEXICLEAN%" \
+?TEXICLEAN?	|| rm -rf %TEXICLEAN%
+
+.PHONY maintainer-clean-am: maintainer-clean-aminfo
+maintainer-clean-aminfo:
+	@list='$(INFO_DEPS)'; for i in $$list; do \
+## .iNN files are DJGPP-style info files.
+	  i_i=`echo "$$i" | sed 's|\.info$$||;s|$$|.i|'`; \
+	  echo " rm -f $$i $$i-[0-9] $$i-[0-9][0-9] $$i_i[0-9] $$i_i[0-9][0-9]"; \
+	  rm -f $$i $$i-[0-9] $$i-[0-9][0-9] $$i_i[0-9] $$i_i[0-9][0-9]; \
+	done
+## Use '-rf', not just '-f'; see comments in 'mostlyclean-aminfo'
+## above for details.
+?MAINTCLEAN?	-test -z "%MAINTCLEAN%" \
+?MAINTCLEAN?	|| rm -rf %MAINTCLEAN%
+
+endif %?LOCAL-TEXIS%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/vala.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/vala.am
new file mode 100644
index 0000000..1289175
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/vala.am
@@ -0,0 +1,17 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 2008-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## There is no rule here.  :-)
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/yacc.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/yacc.am
new file mode 100644
index 0000000..a1fb43e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/am/yacc.am
@@ -0,0 +1,50 @@
+## automake - create Makefile.in from Makefile.am
+## Copyright (C) 1998-2013 Free Software Foundation, Inc.
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+## You should have received a copy of the GNU General Public License
+## along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## We want to disable the Yacc rebuild rule when
+##   1. AM_MAINTAINER_MODE is used, and
+##   2. --enable-maintainer-mode is not specified, and
+##   3. parser.c already exist, and
+##   4. parser.y and parser.c are distributed.
+## Point #3 is because "make maintainer-clean" erases parser.c, yet
+## the GNU Coding Standards require that ./configure; make works even
+## after that.
+## Point #4 is because parsers listed in nodist_*_SOURCES are always
+## built on the user's side, so it makes no sense to disable them.
+##
+## Points #1, #2, #3 are solved by unconditionally prefixing the rule
+## with $(am__skipyacc) defined below only when needed.
+##
+## Point #4 requires a condition on whether parser.y/parser.c are
+## distributed or not.  We cannot have a generic rule that works in
+## both cases, so we ensure in automake that nodist_ parsers always
+## use non-generic rules.
+if %?FIRST%
+if %?MAINTAINER-MODE%
+@MAINTAINER_MODE_FALSE@am__skipyacc = test -f $@ ||
+endif %?MAINTAINER-MODE%
+## The 's/c$/h/' substitution *must* be the last one.
+am__yacc_c2h = sed -e s/cc$$/hh/ -e s/cpp$$/hpp/ -e s/cxx$$/hxx/ \
+		   -e s/c++$$/h++/ -e s/c$$/h/
+endif %?FIRST%
+
+?GENERIC?%EXT%%DERIVED-EXT%:
+?!GENERIC?%OBJ%: %SOURCE%
+?GENERIC?	%VERBOSE%$(am__skipyacc) $(SHELL) $(YLWRAP) %SOURCE% y.tab.c %OBJ% y.tab.h `echo %OBJ% | $(am__yacc_c2h)` y.output %BASE%.output -- %COMPILE%
+?!GENERIC?	%VERBOSE% \
+?!GENERIC??DIST_SOURCE?	$(am__skipyacc) \
+## For non-suffix rules, we must emulate a VPATH search on %SOURCE%.
+?!GENERIC?	$(SHELL) $(YLWRAP) `test -f '%SOURCE%' || echo '$(srcdir)/'`%SOURCE% y.tab.c %OBJ% y.tab.h `echo %OBJ% | $(am__yacc_c2h)` y.output %BASE%.output -- %COMPILE%
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ar-lib b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ar-lib
new file mode 100755
index 0000000..fe2301e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ar-lib
@@ -0,0 +1,270 @@
+#! /bin/sh
+# Wrapper for Microsoft lib.exe
+
+me=ar-lib
+scriptversion=2012-03-01.08; # UTC
+
+# Copyright (C) 2010-2013 Free Software Foundation, Inc.
+# Written by Peter Rosin <peda@lysator.liu.se>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+
+# func_error message
+func_error ()
+{
+  echo "$me: $1" 1>&2
+  exit 1
+}
+
+file_conv=
+
+# func_file_conv build_file
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts.
+func_file_conv ()
+{
+  file=$1
+  case $file in
+    / | /[!/]*) # absolute file, and not a UNC file
+      if test -z "$file_conv"; then
+	# lazily determine how to convert abs files
+	case `uname -s` in
+	  MINGW*)
+	    file_conv=mingw
+	    ;;
+	  CYGWIN*)
+	    file_conv=cygwin
+	    ;;
+	  *)
+	    file_conv=wine
+	    ;;
+	esac
+      fi
+      case $file_conv in
+	mingw)
+	  file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+	  ;;
+	cygwin)
+	  file=`cygpath -m "$file" || echo "$file"`
+	  ;;
+	wine)
+	  file=`winepath -w "$file" || echo "$file"`
+	  ;;
+      esac
+      ;;
+  esac
+}
+
+# func_at_file at_file operation archive
+# Iterate over all members in AT_FILE performing OPERATION on ARCHIVE
+# for each of them.
+# When interpreting the content of the @FILE, do NOT use func_file_conv,
+# since the user would need to supply preconverted file names to
+# binutils ar, at least for MinGW.
+func_at_file ()
+{
+  operation=$2
+  archive=$3
+  at_file_contents=`cat "$1"`
+  eval set x "$at_file_contents"
+  shift
+
+  for member
+  do
+    $AR -NOLOGO $operation:"$member" "$archive" || exit $?
+  done
+}
+
+case $1 in
+  '')
+     func_error "no command.  Try '$0 --help' for more information."
+     ;;
+  -h | --h*)
+    cat <<EOF
+Usage: $me [--help] [--version] PROGRAM ACTION ARCHIVE [MEMBER...]
+
+Members may be specified in a file named with @FILE.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "$me, version $scriptversion"
+    exit $?
+    ;;
+esac
+
+if test $# -lt 3; then
+  func_error "you must specify a program, an action and an archive"
+fi
+
+AR=$1
+shift
+while :
+do
+  if test $# -lt 2; then
+    func_error "you must specify a program, an action and an archive"
+  fi
+  case $1 in
+    -lib | -LIB \
+    | -ltcg | -LTCG \
+    | -machine* | -MACHINE* \
+    | -subsystem* | -SUBSYSTEM* \
+    | -verbose | -VERBOSE \
+    | -wx* | -WX* )
+      AR="$AR $1"
+      shift
+      ;;
+    *)
+      action=$1
+      shift
+      break
+      ;;
+  esac
+done
+orig_archive=$1
+shift
+func_file_conv "$orig_archive"
+archive=$file
+
+# strip leading dash in $action
+action=${action#-}
+
+delete=
+extract=
+list=
+quick=
+replace=
+index=
+create=
+
+while test -n "$action"
+do
+  case $action in
+    d*) delete=yes  ;;
+    x*) extract=yes ;;
+    t*) list=yes    ;;
+    q*) quick=yes   ;;
+    r*) replace=yes ;;
+    s*) index=yes   ;;
+    S*)             ;; # the index is always updated implicitly
+    c*) create=yes  ;;
+    u*)             ;; # TODO: don't ignore the update modifier
+    v*)             ;; # TODO: don't ignore the verbose modifier
+    *)
+      func_error "unknown action specified"
+      ;;
+  esac
+  action=${action#?}
+done
+
+case $delete$extract$list$quick$replace,$index in
+  yes,* | ,yes)
+    ;;
+  yesyes*)
+    func_error "more than one action specified"
+    ;;
+  *)
+    func_error "no action specified"
+    ;;
+esac
+
+if test -n "$delete"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  for member
+  do
+    case $1 in
+      @*)
+        func_at_file "${1#@}" -REMOVE "$archive"
+        ;;
+      *)
+        func_file_conv "$1"
+        $AR -NOLOGO -REMOVE:"$file" "$archive" || exit $?
+        ;;
+    esac
+  done
+
+elif test -n "$extract"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  if test $# -gt 0; then
+    for member
+    do
+      case $1 in
+        @*)
+          func_at_file "${1#@}" -EXTRACT "$archive"
+          ;;
+        *)
+          func_file_conv "$1"
+          $AR -NOLOGO -EXTRACT:"$file" "$archive" || exit $?
+          ;;
+      esac
+    done
+  else
+    $AR -NOLOGO -LIST "$archive" | sed -e 's/\\/\\\\/g' | while read member
+    do
+      $AR -NOLOGO -EXTRACT:"$member" "$archive" || exit $?
+    done
+  fi
+
+elif test -n "$quick$replace"; then
+  if test ! -f "$orig_archive"; then
+    if test -z "$create"; then
+      echo "$me: creating $orig_archive"
+    fi
+    orig_archive=
+  else
+    orig_archive=$archive
+  fi
+
+  for member
+  do
+    case $1 in
+    @*)
+      func_file_conv "${1#@}"
+      set x "$@" "@$file"
+      ;;
+    *)
+      func_file_conv "$1"
+      set x "$@" "$file"
+      ;;
+    esac
+    shift
+    shift
+  done
+
+  if test -n "$orig_archive"; then
+    $AR -NOLOGO -OUT:"$archive" "$orig_archive" "$@" || exit $?
+  else
+    $AR -NOLOGO -OUT:"$archive" "$@" || exit $?
+  fi
+
+elif test -n "$list"; then
+  if test ! -f "$orig_archive"; then
+    func_error "archive not found"
+  fi
+  $AR -NOLOGO -LIST "$archive" || exit $?
+fi
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/compile b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/compile
new file mode 100755
index 0000000..531136b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/compile
@@ -0,0 +1,347 @@
+#! /bin/sh
+# Wrapper for compilers which do not understand '-c -o'.
+
+scriptversion=2012-10-14.11; # UTC
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+# Written by Tom Tromey <tromey@cygnus.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent tools from complaining about whitespace usage.
+IFS=" ""	$nl"
+
+file_conv=
+
+# func_file_conv build_file lazy
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts. If the determined conversion
+# type is listed in (the comma separated) LAZY, no conversion will
+# take place.
+func_file_conv ()
+{
+  file=$1
+  case $file in
+    / | /[!/]*) # absolute file, and not a UNC file
+      if test -z "$file_conv"; then
+	# lazily determine how to convert abs files
+	case `uname -s` in
+	  MINGW*)
+	    file_conv=mingw
+	    ;;
+	  CYGWIN*)
+	    file_conv=cygwin
+	    ;;
+	  *)
+	    file_conv=wine
+	    ;;
+	esac
+      fi
+      case $file_conv/,$2, in
+	*,$file_conv,*)
+	  ;;
+	mingw/*)
+	  file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+	  ;;
+	cygwin/*)
+	  file=`cygpath -m "$file" || echo "$file"`
+	  ;;
+	wine/*)
+	  file=`winepath -w "$file" || echo "$file"`
+	  ;;
+      esac
+      ;;
+  esac
+}
+
+# func_cl_dashL linkdir
+# Make cl look for libraries in LINKDIR
+func_cl_dashL ()
+{
+  func_file_conv "$1"
+  if test -z "$lib_path"; then
+    lib_path=$file
+  else
+    lib_path="$lib_path;$file"
+  fi
+  linker_opts="$linker_opts -LIBPATH:$file"
+}
+
+# func_cl_dashl library
+# Do a library search-path lookup for cl
+func_cl_dashl ()
+{
+  lib=$1
+  found=no
+  save_IFS=$IFS
+  IFS=';'
+  for dir in $lib_path $LIB
+  do
+    IFS=$save_IFS
+    if $shared && test -f "$dir/$lib.dll.lib"; then
+      found=yes
+      lib=$dir/$lib.dll.lib
+      break
+    fi
+    if test -f "$dir/$lib.lib"; then
+      found=yes
+      lib=$dir/$lib.lib
+      break
+    fi
+    if test -f "$dir/lib$lib.a"; then
+      found=yes
+      lib=$dir/lib$lib.a
+      break
+    fi
+  done
+  IFS=$save_IFS
+
+  if test "$found" != yes; then
+    lib=$lib.lib
+  fi
+}
+
+# func_cl_wrapper cl arg...
+# Adjust compile command to suit cl
+func_cl_wrapper ()
+{
+  # Assume a capable shell
+  lib_path=
+  shared=:
+  linker_opts=
+  for arg
+  do
+    if test -n "$eat"; then
+      eat=
+    else
+      case $1 in
+	-o)
+	  # configure might choose to run compile as 'compile cc -o foo foo.c'.
+	  eat=1
+	  case $2 in
+	    *.o | *.[oO][bB][jJ])
+	      func_file_conv "$2"
+	      set x "$@" -Fo"$file"
+	      shift
+	      ;;
+	    *)
+	      func_file_conv "$2"
+	      set x "$@" -Fe"$file"
+	      shift
+	      ;;
+	  esac
+	  ;;
+	-I)
+	  eat=1
+	  func_file_conv "$2" mingw
+	  set x "$@" -I"$file"
+	  shift
+	  ;;
+	-I*)
+	  func_file_conv "${1#-I}" mingw
+	  set x "$@" -I"$file"
+	  shift
+	  ;;
+	-l)
+	  eat=1
+	  func_cl_dashl "$2"
+	  set x "$@" "$lib"
+	  shift
+	  ;;
+	-l*)
+	  func_cl_dashl "${1#-l}"
+	  set x "$@" "$lib"
+	  shift
+	  ;;
+	-L)
+	  eat=1
+	  func_cl_dashL "$2"
+	  ;;
+	-L*)
+	  func_cl_dashL "${1#-L}"
+	  ;;
+	-static)
+	  shared=false
+	  ;;
+	-Wl,*)
+	  arg=${1#-Wl,}
+	  save_ifs="$IFS"; IFS=','
+	  for flag in $arg; do
+	    IFS="$save_ifs"
+	    linker_opts="$linker_opts $flag"
+	  done
+	  IFS="$save_ifs"
+	  ;;
+	-Xlinker)
+	  eat=1
+	  linker_opts="$linker_opts $2"
+	  ;;
+	-*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+	*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
+	  func_file_conv "$1"
+	  set x "$@" -Tp"$file"
+	  shift
+	  ;;
+	*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
+	  func_file_conv "$1" mingw
+	  set x "$@" "$file"
+	  shift
+	  ;;
+	*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+      esac
+    fi
+    shift
+  done
+  if test -n "$linker_opts"; then
+    linker_opts="-link$linker_opts"
+  fi
+  exec "$@" $linker_opts
+  exit 1
+}
+
+eat=
+
+case $1 in
+  '')
+     echo "$0: No command.  Try '$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: compile [--help] [--version] PROGRAM [ARGS]
+
+Wrapper for compilers which do not understand '-c -o'.
+Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
+arguments, and rename the output as expected.
+
+If you are trying to build a whole package this is not the
+right script to run: please start by reading the file 'INSTALL'.
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "compile $scriptversion"
+    exit $?
+    ;;
+  cl | *[/\\]cl | cl.exe | *[/\\]cl.exe )
+    func_cl_wrapper "$@"      # Doesn't return...
+    ;;
+esac
+
+ofile=
+cfile=
+
+for arg
+do
+  if test -n "$eat"; then
+    eat=
+  else
+    case $1 in
+      -o)
+	# configure might choose to run compile as 'compile cc -o foo foo.c'.
+	# So we strip '-o arg' only if arg is an object.
+	eat=1
+	case $2 in
+	  *.o | *.obj)
+	    ofile=$2
+	    ;;
+	  *)
+	    set x "$@" -o "$2"
+	    shift
+	    ;;
+	esac
+	;;
+      *.c)
+	cfile=$1
+	set x "$@" "$1"
+	shift
+	;;
+      *)
+	set x "$@" "$1"
+	shift
+	;;
+    esac
+  fi
+  shift
+done
+
+if test -z "$ofile" || test -z "$cfile"; then
+  # If no '-o' option was seen then we might have been invoked from a
+  # pattern rule where we don't need one.  That is ok -- this is a
+  # normal compilation that the losing compiler can handle.  If no
+  # '.c' file was seen then we are probably linking.  That is also
+  # ok.
+  exec "$@"
+fi
+
+# Name of file we expect compiler to create.
+cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
+
+# Create the lock directory.
+# Note: use '[/\\:.-]' here to ensure that we don't use the same name
+# that we are using for the .o file.  Also, base the name on the expected
+# object file name, since that is what matters with a parallel build.
+lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
+while true; do
+  if mkdir "$lockdir" >/dev/null 2>&1; then
+    break
+  fi
+  sleep 1
+done
+# FIXME: race condition here if user kills between mkdir and trap.
+trap "rmdir '$lockdir'; exit 1" 1 2 15
+
+# Run the compile.
+"$@"
+ret=$?
+
+if test -f "$cofile"; then
+  test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
+elif test -f "${cofile}bj"; then
+  test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
+fi
+
+rmdir "$lockdir"
+exit $ret
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.guess b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.guess
new file mode 100755
index 0000000..9afd676
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.guess
@@ -0,0 +1,1568 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright 1992-2013 Free Software Foundation, Inc.
+
+timestamp='2013-11-29'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program.  This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+#
+# Originally written by Per Bothner.
+#
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+#
+# Please send patches with a ChangeLog entry to config-patches@gnu.org.
+
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright 1992-2013 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,)    echo "int x;" > $dummy.c ;
+	for c in cc gcc c89 c99 ; do
+	  if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+	     CC_FOR_BUILD="$c"; break ;
+	  fi ;
+	done ;
+	if test x"$CC_FOR_BUILD" = x ; then
+	  CC_FOR_BUILD=no_compiler_found ;
+	fi
+	;;
+ ,,*)   CC_FOR_BUILD=$CC ;;
+ ,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null`  || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+case "${UNAME_SYSTEM}" in
+Linux|GNU|GNU/*)
+	# If the system lacks a compiler, then just pick glibc.
+	# We could probably try harder.
+	LIBC=gnu
+
+	eval $set_cc_for_build
+	cat <<-EOF > $dummy.c
+	#include <features.h>
+	#if defined(__UCLIBC__)
+	LIBC=uclibc
+	#elif defined(__dietlibc__)
+	LIBC=dietlibc
+	#else
+	LIBC=gnu
+	#endif
+	EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
+	;;
+esac
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	sysctl="sysctl -n hw.machine_arch"
+	UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+	    /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+	case "${UNAME_MACHINE_ARCH}" in
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    sh5el) machine=sh5le-unknown ;;
+	    *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently, or will in the future.
+	case "${UNAME_MACHINE_ARCH}" in
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		eval $set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep -q __ELF__
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+		os=netbsd
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case "${UNAME_VERSION}" in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	echo "${machine}-${os}${release}"
+	exit ;;
+    *:Bitrig:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'`
+	echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE}
+	exit ;;
+    *:OpenBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+	echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+	exit ;;
+    *:ekkoBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+	exit ;;
+    *:SolidBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
+	exit ;;
+    macppc:MirBSD:*:*)
+	echo powerpc-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    *:MirBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    alpha:OSF1:*:*)
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case "$ALPHA_CPU_TYPE" in
+	    "EV4 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE="alphaev5" ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE="alphaev56" ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE="alphapca56" ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE="alphapca57" ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE="alphaev6" ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE="alphaev67" ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE="alphaev69" ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE="alphaev7" ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE="alphaev79" ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	# Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+	exitcode=$?
+	trap '' 0
+	exit $exitcode ;;
+    Alpha\ *:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# Should we change UNAME_MACHINE based on the output of uname instead
+	# of the specific Alpha model?
+	echo alpha-pc-interix
+	exit ;;
+    21064:Windows_NT:50:3)
+	echo alpha-dec-winnt3.5
+	exit ;;
+    Amiga*:UNIX_System_V:4.0:*)
+	echo m68k-unknown-sysv4
+	exit ;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-amigaos
+	exit ;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-morphos
+	exit ;;
+    *:OS/390:*:*)
+	echo i370-ibm-openedition
+	exit ;;
+    *:z/VM:*:*)
+	echo s390-ibm-zvmoe
+	exit ;;
+    *:OS400:*:*)
+	echo powerpc-ibm-os400
+	exit ;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	echo arm-acorn-riscix${UNAME_RELEASE}
+	exit ;;
+    arm*:riscos:*:*|arm*:RISCOS:*:*)
+	echo arm-unknown-riscos
+	exit ;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	echo hppa1.1-hitachi-hiuxmpp
+	exit ;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	if test "`(/bin/universe) 2>/dev/null`" = att ; then
+		echo pyramid-pyramid-sysv3
+	else
+		echo pyramid-pyramid-bsd
+	fi
+	exit ;;
+    NILE*:*:*:dcosx)
+	echo pyramid-pyramid-svr4
+	exit ;;
+    DRS?6000:unix:4.0:6*)
+	echo sparc-icl-nx6
+	exit ;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) echo sparc-icl-nx7; exit ;;
+	esac ;;
+    s390x:SunOS:*:*)
+	echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4H:SunOS:5.*:*)
+	echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+	echo i386-pc-auroraux${UNAME_RELEASE}
+	exit ;;
+    i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+	eval $set_cc_for_build
+	SUN_ARCH="i386"
+	# If there is a compiler, see if it is configured for 64-bit objects.
+	# Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+	# This test works for both compilers.
+	if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+	    if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+		(CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		grep IS_64BIT_ARCH >/dev/null
+	    then
+		SUN_ARCH="x86_64"
+	    fi
+	fi
+	echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:*:*)
+	case "`/usr/bin/arch -k`" in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+	exit ;;
+    sun3*:SunOS:*:*)
+	echo m68k-sun-sunos${UNAME_RELEASE}
+	exit ;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+	case "`/bin/arch`" in
+	    sun3)
+		echo m68k-sun-sunos${UNAME_RELEASE}
+		;;
+	    sun4)
+		echo sparc-sun-sunos${UNAME_RELEASE}
+		;;
+	esac
+	exit ;;
+    aushp:SunOS:*:*)
+	echo sparc-auspex-sunos${UNAME_RELEASE}
+	exit ;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+	echo m68k-milan-mint${UNAME_RELEASE}
+	exit ;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+	echo m68k-hades-mint${UNAME_RELEASE}
+	exit ;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+	echo m68k-unknown-mint${UNAME_RELEASE}
+	exit ;;
+    m68k:machten:*:*)
+	echo m68k-apple-machten${UNAME_RELEASE}
+	exit ;;
+    powerpc:machten:*:*)
+	echo powerpc-apple-machten${UNAME_RELEASE}
+	exit ;;
+    RISC*:Mach:*:*)
+	echo mips-dec-mach_bsd4.3
+	exit ;;
+    RISC*:ULTRIX:*:*)
+	echo mips-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    VAX*:ULTRIX*:*:*)
+	echo vax-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	echo clipper-intergraph-clix${UNAME_RELEASE}
+	exit ;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c &&
+	  dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+	  SYSTEM_NAME=`$dummy $dummyarg` &&
+	    { echo "$SYSTEM_NAME"; exit; }
+	echo mips-mips-riscos${UNAME_RELEASE}
+	exit ;;
+    Motorola:PowerMAX_OS:*:*)
+	echo powerpc-motorola-powermax
+	exit ;;
+    Motorola:*:4.3:PL8-*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:Power_UNIX:*:*)
+	echo powerpc-harris-powerunix
+	exit ;;
+    m88k:CX/UX:7*:*)
+	echo m88k-harris-cxux7
+	exit ;;
+    m88k:*:4*:R4*)
+	echo m88k-motorola-sysv4
+	exit ;;
+    m88k:*:3*:R3*)
+	echo m88k-motorola-sysv3
+	exit ;;
+    AViiON:dgux:*:*)
+	# DG/UX returns AViiON for all architectures
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+	then
+	    if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+	       [ ${TARGET_BINARY_INTERFACE}x = x ]
+	    then
+		echo m88k-dg-dgux${UNAME_RELEASE}
+	    else
+		echo m88k-dg-dguxbcs${UNAME_RELEASE}
+	    fi
+	else
+	    echo i586-dg-dgux${UNAME_RELEASE}
+	fi
+	exit ;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	echo m88k-dolphin-sysv3
+	exit ;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	echo m88k-motorola-sysv3
+	exit ;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	echo m88k-tektronix-sysv3
+	exit ;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	echo m68k-tektronix-bsd
+	exit ;;
+    *:IRIX*:*:*)
+	echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+	exit ;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	echo romp-ibm-aix     # uname -m gives an 8 hex-code CPU id
+	exit ;;               # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	echo i386-ibm-aix
+	exit ;;
+    ia64:AIX:*:*)
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		eval $set_cc_for_build
+		sed 's/^		//' << EOF >$dummy.c
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
+		then
+			echo "$SYSTEM_NAME"
+		else
+			echo rs6000-ibm-aix3.2.5
+		fi
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		echo rs6000-ibm-aix3.2.4
+	else
+		echo rs6000-ibm-aix3.2
+	fi
+	exit ;;
+    *:AIX:*:[4567])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:*:*)
+	echo rs6000-ibm-aix
+	exit ;;
+    ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+	echo romp-ibm-bsd4.4
+	exit ;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	echo romp-ibm-bsd${UNAME_RELEASE}   # 4.3 with uname added to
+	exit ;;                             # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	echo rs6000-bull-bosx
+	exit ;;
+    DPX/2?00:B.O.S.:*:*)
+	echo m68k-bull-sysv3
+	exit ;;
+    9000/[34]??:4.3bsd:1.*:*)
+	echo m68k-hp-bsd
+	exit ;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	echo m68k-hp-bsd4.4
+	exit ;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	case "${UNAME_MACHINE}" in
+	    9000/31? )            HP_ARCH=m68000 ;;
+	    9000/[34]?? )         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if [ -x /usr/bin/getconf ]; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+		    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+		    case "${sc_cpu_version}" in
+		      523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+		      528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+		      532)                      # CPU_PA_RISC2_0
+			case "${sc_kernel_bits}" in
+			  32) HP_ARCH="hppa2.0n" ;;
+			  64) HP_ARCH="hppa2.0w" ;;
+			  '') HP_ARCH="hppa2.0" ;;   # HP-UX 10.20
+			esac ;;
+		    esac
+		fi
+		if [ "${HP_ARCH}" = "" ]; then
+		    eval $set_cc_for_build
+		    sed 's/^		//' << EOF >$dummy.c
+
+		#define _HPUX_SOURCE
+		#include <stdlib.h>
+		#include <unistd.h>
+
+		int main ()
+		{
+		#if defined(_SC_KERNEL_BITS)
+		    long bits = sysconf(_SC_KERNEL_BITS);
+		#endif
+		    long cpu  = sysconf (_SC_CPU_VERSION);
+
+		    switch (cpu)
+			{
+			case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+			case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+			case CPU_PA_RISC2_0:
+		#if defined(_SC_KERNEL_BITS)
+			    switch (bits)
+				{
+				case 64: puts ("hppa2.0w"); break;
+				case 32: puts ("hppa2.0n"); break;
+				default: puts ("hppa2.0"); break;
+				} break;
+		#else  /* !defined(_SC_KERNEL_BITS) */
+			    puts ("hppa2.0"); break;
+		#endif
+			default: puts ("hppa1.0"); break;
+			}
+		    exit (0);
+		}
+EOF
+		    (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if [ ${HP_ARCH} = "hppa2.0w" ]
+	then
+	    eval $set_cc_for_build
+
+	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
+	    # generating 64-bit code.  GNU and HP use different nomenclature:
+	    #
+	    # $ CC_FOR_BUILD=cc ./config.guess
+	    # => hppa2.0w-hp-hpux11.23
+	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+	    # => hppa64-hp-hpux11.23
+
+	    if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
+		grep -q __LP64__
+	    then
+		HP_ARCH="hppa2.0w"
+	    else
+		HP_ARCH="hppa64"
+	    fi
+	fi
+	echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+	exit ;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	echo ia64-hp-hpux${HPUX_REV}
+	exit ;;
+    3050*:HI-UX:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
+		{ echo "$SYSTEM_NAME"; exit; }
+	echo unknown-hitachi-hiuxwe2
+	exit ;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+	echo hppa1.1-hp-bsd
+	exit ;;
+    9000/8??:4.3bsd:*:*)
+	echo hppa1.0-hp-bsd
+	exit ;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	echo hppa1.0-hp-mpeix
+	exit ;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+	echo hppa1.1-hp-osf
+	exit ;;
+    hp8??:OSF1:*:*)
+	echo hppa1.0-hp-osf
+	exit ;;
+    i*86:OSF1:*:*)
+	if [ -x /usr/sbin/sysversion ] ; then
+	    echo ${UNAME_MACHINE}-unknown-osf1mk
+	else
+	    echo ${UNAME_MACHINE}-unknown-osf1
+	fi
+	exit ;;
+    parisc*:Lites*:*:*)
+	echo hppa1.1-hp-lites
+	exit ;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	echo c1-convex-bsd
+	exit ;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	echo c34-convex-bsd
+	exit ;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	echo c38-convex-bsd
+	exit ;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	echo c4-convex-bsd
+	exit ;;
+    CRAY*Y-MP:*:*:*)
+	echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*[A-Z]90:*:*:*)
+	echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*TS:*:*:*)
+	echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*T3E:*:*:*)
+	echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*SV1:*:*:*)
+	echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    *:UNICOS/mp:*:*)
+	echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+	echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    5000:UNIX_System_V:4.*:*)
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+	echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+	exit ;;
+    sparc*:BSD/OS:*:*)
+	echo sparc-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:BSD/OS:*:*)
+	echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:FreeBSD:*:*)
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	case ${UNAME_PROCESSOR} in
+	    amd64)
+		echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	    *)
+		echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	esac
+	exit ;;
+    i*:CYGWIN*:*)
+	echo ${UNAME_MACHINE}-pc-cygwin
+	exit ;;
+    *:MINGW64*:*)
+	echo ${UNAME_MACHINE}-pc-mingw64
+	exit ;;
+    *:MINGW*:*)
+	echo ${UNAME_MACHINE}-pc-mingw32
+	exit ;;
+    i*:MSYS*:*)
+	echo ${UNAME_MACHINE}-pc-msys
+	exit ;;
+    i*:windows32*:*)
+	# uname -m includes "-pc" on this system.
+	echo ${UNAME_MACHINE}-mingw32
+	exit ;;
+    i*:PW*:*)
+	echo ${UNAME_MACHINE}-pc-pw32
+	exit ;;
+    *:Interix*:*)
+	case ${UNAME_MACHINE} in
+	    x86)
+		echo i586-pc-interix${UNAME_RELEASE}
+		exit ;;
+	    authenticamd | genuineintel | EM64T)
+		echo x86_64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	    IA64)
+		echo ia64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	esac ;;
+    [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+	echo i${UNAME_MACHINE}-pc-mks
+	exit ;;
+    8664:Windows_NT:*)
+	echo x86_64-pc-mks
+	exit ;;
+    i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+	# UNAME_MACHINE based on the output of uname instead of i386?
+	echo i586-pc-interix
+	exit ;;
+    i*:UWIN*:*)
+	echo ${UNAME_MACHINE}-pc-uwin
+	exit ;;
+    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+	echo x86_64-unknown-cygwin
+	exit ;;
+    p*:CYGWIN*:*)
+	echo powerpcle-unknown-cygwin
+	exit ;;
+    prep*:SunOS:5.*:*)
+	echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    *:GNU:*:*)
+	# the GNU system
+	echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+	exit ;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC}
+	exit ;;
+    i*86:Minix:*:*)
+	echo ${UNAME_MACHINE}-pc-minix
+	exit ;;
+    aarch64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    aarch64_be:Linux:*:*)
+	UNAME_MACHINE=aarch64_be
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+	esac
+	objdump --private-headers /bin/sh | grep -q ld.so.1
+	if test "$?" = 0 ; then LIBC="gnulibc1" ; fi
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    arc:Linux:*:* | arceb:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    arm*:Linux:*:*)
+	eval $set_cc_for_build
+	if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+	    | grep -q __ARM_EABI__
+	then
+	    echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	else
+	    if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+		| grep -q __ARM_PCS_VFP
+	    then
+		echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi
+	    else
+		echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf
+	    fi
+	fi
+	exit ;;
+    avr32*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    cris:Linux:*:*)
+	echo ${UNAME_MACHINE}-axis-linux-${LIBC}
+	exit ;;
+    crisv32:Linux:*:*)
+	echo ${UNAME_MACHINE}-axis-linux-${LIBC}
+	exit ;;
+    frv:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    hexagon:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    i*86:Linux:*:*)
+	echo ${UNAME_MACHINE}-pc-linux-${LIBC}
+	exit ;;
+    ia64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    m32r*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    m68*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    mips:Linux:*:* | mips64:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef ${UNAME_MACHINE}
+	#undef ${UNAME_MACHINE}el
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=${UNAME_MACHINE}el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=${UNAME_MACHINE}
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
+	test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; }
+	;;
+    or1k:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    or32:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    padre:Linux:*:*)
+	echo sparc-unknown-linux-${LIBC}
+	exit ;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	echo hppa64-unknown-linux-${LIBC}
+	exit ;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) echo hppa1.1-unknown-linux-${LIBC} ;;
+	  PA8*) echo hppa2.0-unknown-linux-${LIBC} ;;
+	  *)    echo hppa-unknown-linux-${LIBC} ;;
+	esac
+	exit ;;
+    ppc64:Linux:*:*)
+	echo powerpc64-unknown-linux-${LIBC}
+	exit ;;
+    ppc:Linux:*:*)
+	echo powerpc-unknown-linux-${LIBC}
+	exit ;;
+    ppc64le:Linux:*:*)
+	echo powerpc64le-unknown-linux-${LIBC}
+	exit ;;
+    ppcle:Linux:*:*)
+	echo powerpcle-unknown-linux-${LIBC}
+	exit ;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	echo ${UNAME_MACHINE}-ibm-linux-${LIBC}
+	exit ;;
+    sh64*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    sh*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    tile*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    vax:Linux:*:*)
+	echo ${UNAME_MACHINE}-dec-linux-${LIBC}
+	exit ;;
+    x86_64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    xtensa*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+	exit ;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	echo i386-sequent-sysv4
+	exit ;;
+    i*86:UNIX_SV:4.2MP:2.*)
+	# Unixware is an offshoot of SVR4, but it has its own version
+	# number series starting with 2...
+	# I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+	# Use sysv4.2uw... so that sysv4* matches it.
+	echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+	exit ;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	echo ${UNAME_MACHINE}-pc-os2-emx
+	exit ;;
+    i*86:XTS-300:*:STOP)
+	echo ${UNAME_MACHINE}-unknown-stop
+	exit ;;
+    i*86:atheos:*:*)
+	echo ${UNAME_MACHINE}-unknown-atheos
+	exit ;;
+    i*86:syllable:*:*)
+	echo ${UNAME_MACHINE}-pc-syllable
+	exit ;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+	echo i386-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    i*86:*DOS:*:*)
+	echo ${UNAME_MACHINE}-pc-msdosdjgpp
+	exit ;;
+    i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+	UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+	else
+		echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+	fi
+	exit ;;
+    i*86:*:5:[678]*)
+	# UnixWare 7.x, OpenUNIX and OpenServer 6.
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	exit ;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+	else
+		echo ${UNAME_MACHINE}-pc-sysv32
+	fi
+	exit ;;
+    pc:*:*:*)
+	# Left here for compatibility:
+	# uname -m prints for DJGPP always 'pc', but it prints nothing about
+	# the processor, so we play safe by assuming i586.
+	# Note: whatever this is, it MUST be the same as what config.sub
+	# prints for the "djgpp" host, or else GDB configury will decide that
+	# this is a cross-build.
+	echo i586-pc-msdosdjgpp
+	exit ;;
+    Intel:Mach:3*:*)
+	echo i386-pc-mach3
+	exit ;;
+    paragon:*:*:*)
+	echo i860-intel-osf1
+	exit ;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  echo i860-unknown-sysv${UNAME_RELEASE}  # Unknown i860-SVR4
+	fi
+	exit ;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	echo m68010-convergent-sysv
+	exit ;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	echo m68k-convergent-sysv
+	exit ;;
+    M680?0:D-NIX:5.3:*)
+	echo m68k-diab-dnix
+	exit ;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4; exit; } ;;
+    NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+	OS_REL='.3'
+	test -r /etc/.relid \
+	    && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	    && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	echo m68k-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    mc68030:UNIX_System_V:4.*:*)
+	echo m68k-atari-sysv4
+	exit ;;
+    TSUNAMI:LynxOS:2.*:*)
+	echo sparc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    rs6000:LynxOS:2.*:*)
+	echo rs6000-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+	echo powerpc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    SM[BE]S:UNIX_SV:*:*)
+	echo mips-dde-sysv${UNAME_RELEASE}
+	exit ;;
+    RM*:ReliantUNIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    RM*:SINIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		echo ${UNAME_MACHINE}-sni-sysv4
+	else
+		echo ns32k-sni-sysv
+	fi
+	exit ;;
+    PENTIUM:*:4.0*:*)	# Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+			# says <Richard.M.Bartel@ccMail.Census.GOV>
+	echo i586-unisys-sysv4
+	exit ;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes@openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	echo hppa1.1-stratus-sysv4
+	exit ;;
+    *:*:*:FTX*)
+	# From seanf@swdc.stratus.com.
+	echo i860-stratus-sysv4
+	exit ;;
+    i*86:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo ${UNAME_MACHINE}-stratus-vos
+	exit ;;
+    *:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo hppa1.1-stratus-vos
+	exit ;;
+    mc68*:A/UX:*:*)
+	echo m68k-apple-aux${UNAME_RELEASE}
+	exit ;;
+    news*:NEWS-OS:6*:*)
+	echo mips-sony-newsos6
+	exit ;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if [ -d /usr/nec ]; then
+		echo mips-nec-sysv${UNAME_RELEASE}
+	else
+		echo mips-unknown-sysv${UNAME_RELEASE}
+	fi
+	exit ;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	echo powerpc-be-beos
+	exit ;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	echo powerpc-apple-beos
+	exit ;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	echo i586-pc-beos
+	exit ;;
+    BePC:Haiku:*:*)	# Haiku running on Intel PC compatible.
+	echo i586-pc-haiku
+	exit ;;
+    x86_64:Haiku:*:*)
+	echo x86_64-unknown-haiku
+	exit ;;
+    SX-4:SUPER-UX:*:*)
+	echo sx4-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-5:SUPER-UX:*:*)
+	echo sx5-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-6:SUPER-UX:*:*)
+	echo sx6-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-7:SUPER-UX:*:*)
+	echo sx7-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8:SUPER-UX:*:*)
+	echo sx8-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8R:SUPER-UX:*:*)
+	echo sx8r-nec-superux${UNAME_RELEASE}
+	exit ;;
+    Power*:Rhapsody:*:*)
+	echo powerpc-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Rhapsody:*:*)
+	echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+	eval $set_cc_for_build
+	if test "$UNAME_PROCESSOR" = unknown ; then
+	    UNAME_PROCESSOR=powerpc
+	fi
+	if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then
+	    if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+		if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+		    (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		    grep IS_64BIT_ARCH >/dev/null
+		then
+		    case $UNAME_PROCESSOR in
+			i386) UNAME_PROCESSOR=x86_64 ;;
+			powerpc) UNAME_PROCESSOR=powerpc64 ;;
+		    esac
+		fi
+	    fi
+	elif test "$UNAME_PROCESSOR" = i386 ; then
+	    # Avoid executing cc on OS X 10.9, as it ships with a stub
+	    # that puts up a graphical alert prompting to install
+	    # developer tools.  Any system running Mac OS X 10.7 or
+	    # later (Darwin 11 and later) is required to have a 64-bit
+	    # processor. This is not true of the ARM version of Darwin
+	    # that Apple uses in portable devices.
+	    UNAME_PROCESSOR=x86_64
+	fi
+	echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+	exit ;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = "x86"; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+	exit ;;
+    *:QNX:*:4*)
+	echo i386-pc-qnx
+	exit ;;
+    NEO-?:NONSTOP_KERNEL:*:*)
+	echo neo-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSE-*:NONSTOP_KERNEL:*:*)
+	echo nse-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSR-?:NONSTOP_KERNEL:*:*)
+	echo nsr-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    *:NonStop-UX:*:*)
+	echo mips-compaq-nonstopux
+	exit ;;
+    BS2000:POSIX*:*:*)
+	echo bs2000-siemens-sysv
+	exit ;;
+    DS/*:UNIX_System_V:*:*)
+	echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+	exit ;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "$cputype" = "386"; then
+	    UNAME_MACHINE=i386
+	else
+	    UNAME_MACHINE="$cputype"
+	fi
+	echo ${UNAME_MACHINE}-unknown-plan9
+	exit ;;
+    *:TOPS-10:*:*)
+	echo pdp10-unknown-tops10
+	exit ;;
+    *:TENEX:*:*)
+	echo pdp10-unknown-tenex
+	exit ;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	echo pdp10-dec-tops20
+	exit ;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	echo pdp10-xkl-tops20
+	exit ;;
+    *:TOPS-20:*:*)
+	echo pdp10-unknown-tops20
+	exit ;;
+    *:ITS:*:*)
+	echo pdp10-unknown-its
+	exit ;;
+    SEI:*:*:SEIUX)
+	echo mips-sei-seiux${UNAME_RELEASE}
+	exit ;;
+    *:DragonFly:*:*)
+	echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit ;;
+    *:*VMS:*:*)
+	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case "${UNAME_MACHINE}" in
+	    A*) echo alpha-dec-vms ; exit ;;
+	    I*) echo ia64-dec-vms ; exit ;;
+	    V*) echo vax-dec-vms ; exit ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	echo i386-pc-xenix
+	exit ;;
+    i*86:skyos:*:*)
+	echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
+	exit ;;
+    i*86:rdos:*:*)
+	echo ${UNAME_MACHINE}-pc-rdos
+	exit ;;
+    i*86:AROS:*:*)
+	echo ${UNAME_MACHINE}-pc-aros
+	exit ;;
+    x86_64:VMkernel:*:*)
+	echo ${UNAME_MACHINE}-unknown-esx
+	exit ;;
+esac
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+	"4"
+#else
+	""
+#endif
+	); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+  printf ("arm-acorn-riscix\n"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+  printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+    struct utsname un;
+
+    uname(&un);
+
+    if (strncmp(un.version, "V2", 2) == 0) {
+	printf ("i386-sequent-ptx2\n"); exit (0);
+    }
+    if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+	printf ("i386-sequent-ptx1\n"); exit (0);
+    }
+    printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+#  include <sys/param.h>
+#  if defined (BSD)
+#   if BSD == 43
+      printf ("vax-dec-bsd4.3\n"); exit (0);
+#   else
+#    if BSD == 199006
+      printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#    else
+      printf ("vax-dec-bsd\n"); exit (0);
+#    endif
+#   endif
+#  else
+    printf ("vax-dec-bsd\n"); exit (0);
+#  endif
+# else
+    printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+	{ echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+    case `getsysinfo -f cpu_type` in
+    c1*)
+	echo c1-convex-bsd
+	exit ;;
+    c2*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    c34*)
+	echo c34-convex-bsd
+	exit ;;
+    c38*)
+	echo c38-convex-bsd
+	exit ;;
+    c4*)
+	echo c4-convex-bsd
+	exit ;;
+    esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+and
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches@gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM  = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.sub b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.sub
new file mode 100755
index 0000000..61cb4bc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/config.sub
@@ -0,0 +1,1793 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright 1992-2013 Free Software Foundation, Inc.
+
+timestamp='2013-10-01'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program.  This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+
+
+# Please send patches with a ChangeLog entry to config-patches@gnu.org.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+       $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright 1992-2013 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help"
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo $1
+       exit ;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+  nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \
+  linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
+  knetbsd*-gnu* | netbsd*-gnu* | \
+  kopensolaris*-gnu* | \
+  storm-chaos* | os2-emx* | rtmk-nova*)
+    os=-$maybe_os
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+    ;;
+  android-linux)
+    os=-linux-android
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown
+    ;;
+  *)
+    basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+    if [ $basic_machine != $1 ]
+    then os=`echo $1 | sed 's/.*-/-/'`
+    else os=; fi
+    ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work.  We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+	-sun*os*)
+		# Prevent following clause from handling this invalid input.
+		;;
+	-dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+	-att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+	-unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+	-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+	-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+	-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+	-apple | -axis | -knuth | -cray | -microblaze*)
+		os=
+		basic_machine=$1
+		;;
+	-bluegene*)
+		os=-cnk
+		;;
+	-sim | -cisco | -oki | -wec | -winbond)
+		os=
+		basic_machine=$1
+		;;
+	-scout)
+		;;
+	-wrs)
+		os=-vxworks
+		basic_machine=$1
+		;;
+	-chorusos*)
+		os=-chorusos
+		basic_machine=$1
+		;;
+	-chorusrdb)
+		os=-chorusrdb
+		basic_machine=$1
+		;;
+	-hiux*)
+		os=-hiuxwe2
+		;;
+	-sco6)
+		os=-sco5v6
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5)
+		os=-sco3.2v5
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco4)
+		os=-sco3.2v4
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2.[4-9]*)
+		os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2v[4-9]*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5v6*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco*)
+		os=-sco3.2v2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-udk*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-isc)
+		os=-isc2.2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-clix*)
+		basic_machine=clipper-intergraph
+		;;
+	-isc*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-lynx*178)
+		os=-lynxos178
+		;;
+	-lynx*5)
+		os=-lynxos5
+		;;
+	-lynx*)
+		os=-lynxos
+		;;
+	-ptx*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+		;;
+	-windowsnt*)
+		os=`echo $os | sed -e 's/windowsnt/winnt/'`
+		;;
+	-psos*)
+		os=-psos
+		;;
+	-mint | -mint[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+	# Recognize the basic CPU types without company name.
+	# Some are omitted here because they have special meanings below.
+	1750a | 580 \
+	| a29k \
+	| aarch64 | aarch64_be \
+	| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+	| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+	| am33_2.0 \
+	| arc | arceb \
+	| arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \
+	| avr | avr32 \
+	| be32 | be64 \
+	| bfin \
+	| c4x | c8051 | clipper \
+	| d10v | d30v | dlx | dsp16xx \
+	| epiphany \
+	| fido | fr30 | frv \
+	| h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+	| hexagon \
+	| i370 | i860 | i960 | ia64 \
+	| ip2k | iq2000 \
+	| k1om \
+	| le32 | le64 \
+	| lm32 \
+	| m32c | m32r | m32rle | m68000 | m68k | m88k \
+	| maxq | mb | microblaze | microblazeel | mcore | mep | metag \
+	| mips | mipsbe | mipseb | mipsel | mipsle \
+	| mips16 \
+	| mips64 | mips64el \
+	| mips64octeon | mips64octeonel \
+	| mips64orion | mips64orionel \
+	| mips64r5900 | mips64r5900el \
+	| mips64vr | mips64vrel \
+	| mips64vr4100 | mips64vr4100el \
+	| mips64vr4300 | mips64vr4300el \
+	| mips64vr5000 | mips64vr5000el \
+	| mips64vr5900 | mips64vr5900el \
+	| mipsisa32 | mipsisa32el \
+	| mipsisa32r2 | mipsisa32r2el \
+	| mipsisa64 | mipsisa64el \
+	| mipsisa64r2 | mipsisa64r2el \
+	| mipsisa64sb1 | mipsisa64sb1el \
+	| mipsisa64sr71k | mipsisa64sr71kel \
+	| mipsr5900 | mipsr5900el \
+	| mipstx39 | mipstx39el \
+	| mn10200 | mn10300 \
+	| moxie \
+	| mt \
+	| msp430 \
+	| nds32 | nds32le | nds32be \
+	| nios | nios2 | nios2eb | nios2el \
+	| ns16k | ns32k \
+	| open8 \
+	| or1k | or32 \
+	| pdp10 | pdp11 | pj | pjl \
+	| powerpc | powerpc64 | powerpc64le | powerpcle \
+	| pyramid \
+	| rl78 | rx \
+	| score \
+	| sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+	| sh64 | sh64le \
+	| sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+	| sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+	| spu \
+	| tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \
+	| ubicom32 \
+	| v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \
+	| we32k \
+	| x86 | xc16x | xstormy16 | xtensa \
+	| z8k | z80)
+		basic_machine=$basic_machine-unknown
+		;;
+	c54x)
+		basic_machine=tic54x-unknown
+		;;
+	c55x)
+		basic_machine=tic55x-unknown
+		;;
+	c6x)
+		basic_machine=tic6x-unknown
+		;;
+	m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip)
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+		;;
+	ms1)
+		basic_machine=mt-unknown
+		;;
+
+	strongarm | thumb | xscale)
+		basic_machine=arm-unknown
+		;;
+	xgate)
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	xscaleeb)
+		basic_machine=armeb-unknown
+		;;
+
+	xscaleel)
+		basic_machine=armel-unknown
+		;;
+
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+	  basic_machine=$basic_machine-pc
+	  ;;
+	# Object if more than one company name word.
+	*-*-*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+	# Recognize the basic CPU types with company name.
+	580-* \
+	| a29k-* \
+	| aarch64-* | aarch64_be-* \
+	| alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+	| alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+	| alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \
+	| arm-*  | armbe-* | armle-* | armeb-* | armv*-* \
+	| avr-* | avr32-* \
+	| be32-* | be64-* \
+	| bfin-* | bs2000-* \
+	| c[123]* | c30-* | [cjt]90-* | c4x-* \
+	| c8051-* | clipper-* | craynv-* | cydra-* \
+	| d10v-* | d30v-* | dlx-* \
+	| elxsi-* \
+	| f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
+	| h8300-* | h8500-* \
+	| hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+	| hexagon-* \
+	| i*86-* | i860-* | i960-* | ia64-* \
+	| ip2k-* | iq2000-* \
+	| k1om-* \
+	| le32-* | le64-* \
+	| lm32-* \
+	| m32c-* | m32r-* | m32rle-* \
+	| m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+	| m88110-* | m88k-* | maxq-* | mcore-* | metag-* \
+	| microblaze-* | microblazeel-* \
+	| mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+	| mips16-* \
+	| mips64-* | mips64el-* \
+	| mips64octeon-* | mips64octeonel-* \
+	| mips64orion-* | mips64orionel-* \
+	| mips64r5900-* | mips64r5900el-* \
+	| mips64vr-* | mips64vrel-* \
+	| mips64vr4100-* | mips64vr4100el-* \
+	| mips64vr4300-* | mips64vr4300el-* \
+	| mips64vr5000-* | mips64vr5000el-* \
+	| mips64vr5900-* | mips64vr5900el-* \
+	| mipsisa32-* | mipsisa32el-* \
+	| mipsisa32r2-* | mipsisa32r2el-* \
+	| mipsisa64-* | mipsisa64el-* \
+	| mipsisa64r2-* | mipsisa64r2el-* \
+	| mipsisa64sb1-* | mipsisa64sb1el-* \
+	| mipsisa64sr71k-* | mipsisa64sr71kel-* \
+	| mipsr5900-* | mipsr5900el-* \
+	| mipstx39-* | mipstx39el-* \
+	| mmix-* \
+	| mt-* \
+	| msp430-* \
+	| nds32-* | nds32le-* | nds32be-* \
+	| nios-* | nios2-* | nios2eb-* | nios2el-* \
+	| none-* | np1-* | ns16k-* | ns32k-* \
+	| open8-* \
+	| orion-* \
+	| pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+	| powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \
+	| pyramid-* \
+	| rl78-* | romp-* | rs6000-* | rx-* \
+	| sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+	| shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+	| sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+	| sparclite-* \
+	| sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \
+	| tahoe-* \
+	| tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+	| tile*-* \
+	| tron-* \
+	| ubicom32-* \
+	| v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \
+	| vax-* \
+	| we32k-* \
+	| x86-* | x86_64-* | xc16x-* | xps100-* \
+	| xstormy16-* | xtensa*-* \
+	| ymp-* \
+	| z8k-* | z80-*)
+		;;
+	# Recognize the basic CPU types without company name, with glob match.
+	xtensa*)
+		basic_machine=$basic_machine-unknown
+		;;
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	386bsd)
+		basic_machine=i386-unknown
+		os=-bsd
+		;;
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		basic_machine=m68000-att
+		;;
+	3b*)
+		basic_machine=we32k-att
+		;;
+	a29khif)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	abacus)
+		basic_machine=abacus-unknown
+		;;
+	adobe68k)
+		basic_machine=m68010-adobe
+		os=-scout
+		;;
+	alliant | fx80)
+		basic_machine=fx80-alliant
+		;;
+	altos | altos3068)
+		basic_machine=m68k-altos
+		;;
+	am29k)
+		basic_machine=a29k-none
+		os=-bsd
+		;;
+	amd64)
+		basic_machine=x86_64-pc
+		;;
+	amd64-*)
+		basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	amdahl)
+		basic_machine=580-amdahl
+		os=-sysv
+		;;
+	amiga | amiga-*)
+		basic_machine=m68k-unknown
+		;;
+	amigaos | amigados)
+		basic_machine=m68k-unknown
+		os=-amigaos
+		;;
+	amigaunix | amix)
+		basic_machine=m68k-unknown
+		os=-sysv4
+		;;
+	apollo68)
+		basic_machine=m68k-apollo
+		os=-sysv
+		;;
+	apollo68bsd)
+		basic_machine=m68k-apollo
+		os=-bsd
+		;;
+	aros)
+		basic_machine=i386-pc
+		os=-aros
+		;;
+	aux)
+		basic_machine=m68k-apple
+		os=-aux
+		;;
+	balance)
+		basic_machine=ns32k-sequent
+		os=-dynix
+		;;
+	blackfin)
+		basic_machine=bfin-unknown
+		os=-linux
+		;;
+	blackfin-*)
+		basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	bluegene*)
+		basic_machine=powerpc-ibm
+		os=-cnk
+		;;
+	c54x-*)
+		basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c55x-*)
+		basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c6x-*)
+		basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c90)
+		basic_machine=c90-cray
+		os=-unicos
+		;;
+	cegcc)
+		basic_machine=arm-unknown
+		os=-cegcc
+		;;
+	convex-c1)
+		basic_machine=c1-convex
+		os=-bsd
+		;;
+	convex-c2)
+		basic_machine=c2-convex
+		os=-bsd
+		;;
+	convex-c32)
+		basic_machine=c32-convex
+		os=-bsd
+		;;
+	convex-c34)
+		basic_machine=c34-convex
+		os=-bsd
+		;;
+	convex-c38)
+		basic_machine=c38-convex
+		os=-bsd
+		;;
+	cray | j90)
+		basic_machine=j90-cray
+		os=-unicos
+		;;
+	craynv)
+		basic_machine=craynv-cray
+		os=-unicosmp
+		;;
+	cr16 | cr16-*)
+		basic_machine=cr16-unknown
+		os=-elf
+		;;
+	crds | unos)
+		basic_machine=m68k-crds
+		;;
+	crisv32 | crisv32-* | etraxfs*)
+		basic_machine=crisv32-axis
+		;;
+	cris | cris-* | etrax*)
+		basic_machine=cris-axis
+		;;
+	crx)
+		basic_machine=crx-unknown
+		os=-elf
+		;;
+	da30 | da30-*)
+		basic_machine=m68k-da30
+		;;
+	decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+		basic_machine=mips-dec
+		;;
+	decsystem10* | dec10*)
+		basic_machine=pdp10-dec
+		os=-tops10
+		;;
+	decsystem20* | dec20*)
+		basic_machine=pdp10-dec
+		os=-tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		basic_machine=m68k-motorola
+		;;
+	delta88)
+		basic_machine=m88k-motorola
+		os=-sysv3
+		;;
+	dicos)
+		basic_machine=i686-pc
+		os=-dicos
+		;;
+	djgpp)
+		basic_machine=i586-pc
+		os=-msdosdjgpp
+		;;
+	dpx20 | dpx20-*)
+		basic_machine=rs6000-bull
+		os=-bosx
+		;;
+	dpx2* | dpx2*-bull)
+		basic_machine=m68k-bull
+		os=-sysv3
+		;;
+	ebmon29k)
+		basic_machine=a29k-amd
+		os=-ebmon
+		;;
+	elxsi)
+		basic_machine=elxsi-elxsi
+		os=-bsd
+		;;
+	encore | umax | mmax)
+		basic_machine=ns32k-encore
+		;;
+	es1800 | OSE68k | ose68k | ose | OSE)
+		basic_machine=m68k-ericsson
+		os=-ose
+		;;
+	fx2800)
+		basic_machine=i860-alliant
+		;;
+	genix)
+		basic_machine=ns32k-ns
+		;;
+	gmicro)
+		basic_machine=tron-gmicro
+		os=-sysv
+		;;
+	go32)
+		basic_machine=i386-pc
+		os=-go32
+		;;
+	h3050r* | hiux*)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	h8300hms)
+		basic_machine=h8300-hitachi
+		os=-hms
+		;;
+	h8300xray)
+		basic_machine=h8300-hitachi
+		os=-xray
+		;;
+	h8500hms)
+		basic_machine=h8500-hitachi
+		os=-hms
+		;;
+	harris)
+		basic_machine=m88k-harris
+		os=-sysv3
+		;;
+	hp300-*)
+		basic_machine=m68k-hp
+		;;
+	hp300bsd)
+		basic_machine=m68k-hp
+		os=-bsd
+		;;
+	hp300hpux)
+		basic_machine=m68k-hp
+		os=-hpux
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		basic_machine=m68000-hp
+		;;
+	hp9k3[2-9][0-9])
+		basic_machine=m68k-hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hppa-next)
+		os=-nextstep3
+		;;
+	hppaosf)
+		basic_machine=hppa1.1-hp
+		os=-osf
+		;;
+	hppro)
+		basic_machine=hppa1.1-hp
+		os=-proelf
+		;;
+	i370-ibm* | ibm*)
+		basic_machine=i370-ibm
+		;;
+	i*86v32)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv32
+		;;
+	i*86v4*)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv4
+		;;
+	i*86v)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv
+		;;
+	i*86sol2)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-solaris2
+		;;
+	i386mach)
+		basic_machine=i386-mach
+		os=-mach
+		;;
+	i386-vsta | vsta)
+		basic_machine=i386-unknown
+		os=-vsta
+		;;
+	iris | iris4d)
+		basic_machine=mips-sgi
+		case $os in
+		    -irix*)
+			;;
+		    *)
+			os=-irix4
+			;;
+		esac
+		;;
+	isi68 | isi)
+		basic_machine=m68k-isi
+		os=-sysv
+		;;
+	m68knommu)
+		basic_machine=m68k-unknown
+		os=-linux
+		;;
+	m68knommu-*)
+		basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	m88k-omron*)
+		basic_machine=m88k-omron
+		;;
+	magnum | m3230)
+		basic_machine=mips-mips
+		os=-sysv
+		;;
+	merlin)
+		basic_machine=ns32k-utek
+		os=-sysv
+		;;
+	microblaze*)
+		basic_machine=microblaze-xilinx
+		;;
+	mingw64)
+		basic_machine=x86_64-pc
+		os=-mingw64
+		;;
+	mingw32)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	mingw32ce)
+		basic_machine=arm-unknown
+		os=-mingw32ce
+		;;
+	miniframe)
+		basic_machine=m68000-convergent
+		;;
+	*mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+	mips3*-*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+		;;
+	mips3*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+		;;
+	monitor)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	morphos)
+		basic_machine=powerpc-unknown
+		os=-morphos
+		;;
+	msdos)
+		basic_machine=i386-pc
+		os=-msdos
+		;;
+	ms1-*)
+		basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
+		;;
+	msys)
+		basic_machine=i686-pc
+		os=-msys
+		;;
+	mvs)
+		basic_machine=i370-ibm
+		os=-mvs
+		;;
+	nacl)
+		basic_machine=le32-unknown
+		os=-nacl
+		;;
+	ncr3000)
+		basic_machine=i486-ncr
+		os=-sysv4
+		;;
+	netbsd386)
+		basic_machine=i386-unknown
+		os=-netbsd
+		;;
+	netwinder)
+		basic_machine=armv4l-rebel
+		os=-linux
+		;;
+	news | news700 | news800 | news900)
+		basic_machine=m68k-sony
+		os=-newsos
+		;;
+	news1000)
+		basic_machine=m68030-sony
+		os=-newsos
+		;;
+	news-3600 | risc-news)
+		basic_machine=mips-sony
+		os=-newsos
+		;;
+	necv70)
+		basic_machine=v70-nec
+		os=-sysv
+		;;
+	next | m*-next )
+		basic_machine=m68k-next
+		case $os in
+		    -nextstep* )
+			;;
+		    -ns2*)
+		      os=-nextstep2
+			;;
+		    *)
+		      os=-nextstep3
+			;;
+		esac
+		;;
+	nh3000)
+		basic_machine=m68k-harris
+		os=-cxux
+		;;
+	nh[45]000)
+		basic_machine=m88k-harris
+		os=-cxux
+		;;
+	nindy960)
+		basic_machine=i960-intel
+		os=-nindy
+		;;
+	mon960)
+		basic_machine=i960-intel
+		os=-mon960
+		;;
+	nonstopux)
+		basic_machine=mips-compaq
+		os=-nonstopux
+		;;
+	np1)
+		basic_machine=np1-gould
+		;;
+	neo-tandem)
+		basic_machine=neo-tandem
+		;;
+	nse-tandem)
+		basic_machine=nse-tandem
+		;;
+	nsr-tandem)
+		basic_machine=nsr-tandem
+		;;
+	op50n-* | op60c-*)
+		basic_machine=hppa1.1-oki
+		os=-proelf
+		;;
+	openrisc | openrisc-*)
+		basic_machine=or32-unknown
+		;;
+	os400)
+		basic_machine=powerpc-ibm
+		os=-os400
+		;;
+	OSE68000 | ose68000)
+		basic_machine=m68000-ericsson
+		os=-ose
+		;;
+	os68k)
+		basic_machine=m68k-none
+		os=-os68k
+		;;
+	pa-hitachi)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	paragon)
+		basic_machine=i860-intel
+		os=-osf
+		;;
+	parisc)
+		basic_machine=hppa-unknown
+		os=-linux
+		;;
+	parisc-*)
+		basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	pbd)
+		basic_machine=sparc-tti
+		;;
+	pbb)
+		basic_machine=m68k-tti
+		;;
+	pc532 | pc532-*)
+		basic_machine=ns32k-pc532
+		;;
+	pc98)
+		basic_machine=i386-pc
+		;;
+	pc98-*)
+		basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium | p5 | k5 | k6 | nexgen | viac3)
+		basic_machine=i586-pc
+		;;
+	pentiumpro | p6 | 6x86 | athlon | athlon_*)
+		basic_machine=i686-pc
+		;;
+	pentiumii | pentium2 | pentiumiii | pentium3)
+		basic_machine=i686-pc
+		;;
+	pentium4)
+		basic_machine=i786-pc
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium4-*)
+		basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pn)
+		basic_machine=pn-gould
+		;;
+	power)	basic_machine=power-ibm
+		;;
+	ppc | ppcbe)	basic_machine=powerpc-unknown
+		;;
+	ppc-* | ppcbe-*)
+		basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppcle | powerpclittle | ppc-le | powerpc-little)
+		basic_machine=powerpcle-unknown
+		;;
+	ppcle-* | powerpclittle-*)
+		basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64)	basic_machine=powerpc64-unknown
+		;;
+	ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+		basic_machine=powerpc64le-unknown
+		;;
+	ppc64le-* | powerpc64little-*)
+		basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ps2)
+		basic_machine=i386-ibm
+		;;
+	pw32)
+		basic_machine=i586-unknown
+		os=-pw32
+		;;
+	rdos | rdos64)
+		basic_machine=x86_64-pc
+		os=-rdos
+		;;
+	rdos32)
+		basic_machine=i386-pc
+		os=-rdos
+		;;
+	rom68k)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	rm[46]00)
+		basic_machine=mips-siemens
+		;;
+	rtpc | rtpc-*)
+		basic_machine=romp-ibm
+		;;
+	s390 | s390-*)
+		basic_machine=s390-ibm
+		;;
+	s390x | s390x-*)
+		basic_machine=s390x-ibm
+		;;
+	sa29200)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	sb1)
+		basic_machine=mipsisa64sb1-unknown
+		;;
+	sb1el)
+		basic_machine=mipsisa64sb1el-unknown
+		;;
+	sde)
+		basic_machine=mipsisa32-sde
+		os=-elf
+		;;
+	sei)
+		basic_machine=mips-sei
+		os=-seiux
+		;;
+	sequent)
+		basic_machine=i386-sequent
+		;;
+	sh)
+		basic_machine=sh-hitachi
+		os=-hms
+		;;
+	sh5el)
+		basic_machine=sh5le-unknown
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparclite-wrs | simso-wrs)
+		basic_machine=sparclite-wrs
+		os=-vxworks
+		;;
+	sps7)
+		basic_machine=m68k-bull
+		os=-sysv2
+		;;
+	spur)
+		basic_machine=spur-unknown
+		;;
+	st2000)
+		basic_machine=m68k-tandem
+		;;
+	stratus)
+		basic_machine=i860-stratus
+		os=-sysv4
+		;;
+	strongarm-* | thumb-*)
+		basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	sun2)
+		basic_machine=m68000-sun
+		;;
+	sun2os3)
+		basic_machine=m68000-sun
+		os=-sunos3
+		;;
+	sun2os4)
+		basic_machine=m68000-sun
+		os=-sunos4
+		;;
+	sun3os3)
+		basic_machine=m68k-sun
+		os=-sunos3
+		;;
+	sun3os4)
+		basic_machine=m68k-sun
+		os=-sunos4
+		;;
+	sun4os3)
+		basic_machine=sparc-sun
+		os=-sunos3
+		;;
+	sun4os4)
+		basic_machine=sparc-sun
+		os=-sunos4
+		;;
+	sun4sol2)
+		basic_machine=sparc-sun
+		os=-solaris2
+		;;
+	sun3 | sun3-*)
+		basic_machine=m68k-sun
+		;;
+	sun4)
+		basic_machine=sparc-sun
+		;;
+	sun386 | sun386i | roadrunner)
+		basic_machine=i386-sun
+		;;
+	sv1)
+		basic_machine=sv1-cray
+		os=-unicos
+		;;
+	symmetry)
+		basic_machine=i386-sequent
+		os=-dynix
+		;;
+	t3e)
+		basic_machine=alphaev5-cray
+		os=-unicos
+		;;
+	t90)
+		basic_machine=t90-cray
+		os=-unicos
+		;;
+	tile*)
+		basic_machine=$basic_machine-unknown
+		os=-linux-gnu
+		;;
+	tx39)
+		basic_machine=mipstx39-unknown
+		;;
+	tx39el)
+		basic_machine=mipstx39el-unknown
+		;;
+	toad1)
+		basic_machine=pdp10-xkl
+		os=-tops20
+		;;
+	tower | tower-32)
+		basic_machine=m68k-ncr
+		;;
+	tpf)
+		basic_machine=s390x-ibm
+		os=-tpf
+		;;
+	udi29k)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	ultra3)
+		basic_machine=a29k-nyu
+		os=-sym1
+		;;
+	v810 | necv810)
+		basic_machine=v810-nec
+		os=-none
+		;;
+	vaxv)
+		basic_machine=vax-dec
+		os=-sysv
+		;;
+	vms)
+		basic_machine=vax-dec
+		os=-vms
+		;;
+	vpp*|vx|vx-*)
+		basic_machine=f301-fujitsu
+		;;
+	vxworks960)
+		basic_machine=i960-wrs
+		os=-vxworks
+		;;
+	vxworks68)
+		basic_machine=m68k-wrs
+		os=-vxworks
+		;;
+	vxworks29k)
+		basic_machine=a29k-wrs
+		os=-vxworks
+		;;
+	w65*)
+		basic_machine=w65-wdc
+		os=-none
+		;;
+	w89k-*)
+		basic_machine=hppa1.1-winbond
+		os=-proelf
+		;;
+	xbox)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	xps | xps100)
+		basic_machine=xps100-honeywell
+		;;
+	xscale-* | xscalee[bl]-*)
+		basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'`
+		;;
+	ymp)
+		basic_machine=ymp-cray
+		os=-unicos
+		;;
+	z8k-*-coff)
+		basic_machine=z8k-unknown
+		os=-sim
+		;;
+	z80-*-coff)
+		basic_machine=z80-unknown
+		os=-sim
+		;;
+	none)
+		basic_machine=none-none
+		os=-none
+		;;
+
+# Here we handle the default manufacturer of certain CPU types.  It is in
+# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		basic_machine=hppa1.1-winbond
+		;;
+	op50n)
+		basic_machine=hppa1.1-oki
+		;;
+	op60c)
+		basic_machine=hppa1.1-oki
+		;;
+	romp)
+		basic_machine=romp-ibm
+		;;
+	mmix)
+		basic_machine=mmix-knuth
+		;;
+	rs6000)
+		basic_machine=rs6000-ibm
+		;;
+	vax)
+		basic_machine=vax-dec
+		;;
+	pdp10)
+		# there are many clones, so DEC is not a safe bet
+		basic_machine=pdp10-unknown
+		;;
+	pdp11)
+		basic_machine=pdp11-dec
+		;;
+	we32k)
+		basic_machine=we32k-att
+		;;
+	sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
+		basic_machine=sh-unknown
+		;;
+	sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
+		basic_machine=sparc-sun
+		;;
+	cydra)
+		basic_machine=cydra-cydrome
+		;;
+	orion)
+		basic_machine=orion-highlevel
+		;;
+	orion105)
+		basic_machine=clipper-highlevel
+		;;
+	mac | mpw | mac-mpw)
+		basic_machine=m68k-apple
+		;;
+	pmac | pmac-mpw)
+		basic_machine=powerpc-apple
+		;;
+	*-unknown)
+		# Make sure to match an already-canonicalized machine name.
+		;;
+	*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+	*-digital*)
+		basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+		;;
+	*-commodore*)
+		basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+	# First match some system type aliases
+	# that might get confused with valid system types.
+	# -solaris* is a basic system type, with this one exception.
+	-auroraux)
+		os=-auroraux
+		;;
+	-solaris1 | -solaris1.*)
+		os=`echo $os | sed -e 's|solaris1|sunos4|'`
+		;;
+	-solaris)
+		os=-solaris2
+		;;
+	-svr4*)
+		os=-sysv4
+		;;
+	-unixware*)
+		os=-sysv4.2uw
+		;;
+	-gnu/linux*)
+		os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+		;;
+	# First accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST END IN A *, to match a version number.
+	# -sysv* is not here because it comes later, after sysvr4.
+	-gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+	      | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
+	      | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
+	      | -sym* | -kopensolaris* | -plan9* \
+	      | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+	      | -aos* | -aros* \
+	      | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+	      | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+	      | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
+	      | -bitrig* | -openbsd* | -solidbsd* \
+	      | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+	      | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+	      | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+	      | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+	      | -chorusos* | -chorusrdb* | -cegcc* \
+	      | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+	      | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \
+	      | -linux-newlib* | -linux-musl* | -linux-uclibc* \
+	      | -uxpv* | -beos* | -mpeix* | -udk* \
+	      | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+	      | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+	      | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+	      | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+	      | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+	      | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+	      | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
+	# Remember, each alternative MUST END IN *, to match a version number.
+		;;
+	-qnx*)
+		case $basic_machine in
+		    x86-* | i*86-*)
+			;;
+		    *)
+			os=-nto$os
+			;;
+		esac
+		;;
+	-nto-qnx*)
+		;;
+	-nto*)
+		os=`echo $os | sed -e 's|nto|nto-qnx|'`
+		;;
+	-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+	      | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
+	      | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+		;;
+	-mac*)
+		os=`echo $os | sed -e 's|mac|macos|'`
+		;;
+	-linux-dietlibc)
+		os=-linux-dietlibc
+		;;
+	-linux*)
+		os=`echo $os | sed -e 's|linux|linux-gnu|'`
+		;;
+	-sunos5*)
+		os=`echo $os | sed -e 's|sunos5|solaris2|'`
+		;;
+	-sunos6*)
+		os=`echo $os | sed -e 's|sunos6|solaris3|'`
+		;;
+	-opened*)
+		os=-openedition
+		;;
+	-os400*)
+		os=-os400
+		;;
+	-wince*)
+		os=-wince
+		;;
+	-osfrose*)
+		os=-osfrose
+		;;
+	-osf*)
+		os=-osf
+		;;
+	-utek*)
+		os=-bsd
+		;;
+	-dynix*)
+		os=-bsd
+		;;
+	-acis*)
+		os=-aos
+		;;
+	-atheos*)
+		os=-atheos
+		;;
+	-syllable*)
+		os=-syllable
+		;;
+	-386bsd)
+		os=-bsd
+		;;
+	-ctix* | -uts*)
+		os=-sysv
+		;;
+	-nova*)
+		os=-rtmk-nova
+		;;
+	-ns2 )
+		os=-nextstep2
+		;;
+	-nsk*)
+		os=-nsk
+		;;
+	# Preserve the version number of sinix5.
+	-sinix5.*)
+		os=`echo $os | sed -e 's|sinix|sysv|'`
+		;;
+	-sinix*)
+		os=-sysv4
+		;;
+	-tpf*)
+		os=-tpf
+		;;
+	-triton*)
+		os=-sysv3
+		;;
+	-oss*)
+		os=-sysv3
+		;;
+	-svr4)
+		os=-sysv4
+		;;
+	-svr3)
+		os=-sysv3
+		;;
+	-sysvr4)
+		os=-sysv4
+		;;
+	# This must come after -sysvr4.
+	-sysv*)
+		;;
+	-ose*)
+		os=-ose
+		;;
+	-es1800*)
+		os=-ose
+		;;
+	-xenix)
+		os=-xenix
+		;;
+	-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+		os=-mint
+		;;
+	-aros*)
+		os=-aros
+		;;
+	-zvmoe)
+		os=-zvmoe
+		;;
+	-dicos*)
+		os=-dicos
+		;;
+	-nacl*)
+		;;
+	-none)
+		;;
+	*)
+		# Get rid of the `-' at the beginning of $os.
+		os=`echo $os | sed 's/[^-]*-//'`
+		echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+		exit 1
+		;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+	score-*)
+		os=-elf
+		;;
+	spu-*)
+		os=-elf
+		;;
+	*-acorn)
+		os=-riscix1.2
+		;;
+	arm*-rebel)
+		os=-linux
+		;;
+	arm*-semi)
+		os=-aout
+		;;
+	c4x-* | tic4x-*)
+		os=-coff
+		;;
+	c8051-*)
+		os=-elf
+		;;
+	hexagon-*)
+		os=-elf
+		;;
+	tic54x-*)
+		os=-coff
+		;;
+	tic55x-*)
+		os=-coff
+		;;
+	tic6x-*)
+		os=-coff
+		;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=-tops20
+		;;
+	pdp11-*)
+		os=-none
+		;;
+	*-dec | vax-*)
+		os=-ultrix4.2
+		;;
+	m68*-apollo)
+		os=-domain
+		;;
+	i386-sun)
+		os=-sunos4.0.2
+		;;
+	m68000-sun)
+		os=-sunos3
+		;;
+	m68*-cisco)
+		os=-aout
+		;;
+	mep-*)
+		os=-elf
+		;;
+	mips*-cisco)
+		os=-elf
+		;;
+	mips*-*)
+		os=-elf
+		;;
+	or1k-*)
+		os=-elf
+		;;
+	or32-*)
+		os=-coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=-sysv3
+		;;
+	sparc-* | *-sun)
+		os=-sunos4.1.1
+		;;
+	*-be)
+		os=-beos
+		;;
+	*-haiku)
+		os=-haiku
+		;;
+	*-ibm)
+		os=-aix
+		;;
+	*-knuth)
+		os=-mmixware
+		;;
+	*-wec)
+		os=-proelf
+		;;
+	*-winbond)
+		os=-proelf
+		;;
+	*-oki)
+		os=-proelf
+		;;
+	*-hp)
+		os=-hpux
+		;;
+	*-hitachi)
+		os=-hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=-sysv
+		;;
+	*-cbm)
+		os=-amigaos
+		;;
+	*-dg)
+		os=-dgux
+		;;
+	*-dolphin)
+		os=-sysv3
+		;;
+	m68k-ccur)
+		os=-rtu
+		;;
+	m88k-omron*)
+		os=-luna
+		;;
+	*-next )
+		os=-nextstep
+		;;
+	*-sequent)
+		os=-ptx
+		;;
+	*-crds)
+		os=-unos
+		;;
+	*-ns)
+		os=-genix
+		;;
+	i370-*)
+		os=-mvs
+		;;
+	*-next)
+		os=-nextstep3
+		;;
+	*-gould)
+		os=-sysv
+		;;
+	*-highlevel)
+		os=-bsd
+		;;
+	*-encore)
+		os=-bsd
+		;;
+	*-sgi)
+		os=-irix
+		;;
+	*-siemens)
+		os=-sysv4
+		;;
+	*-masscomp)
+		os=-rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=-uxpv
+		;;
+	*-rom68k)
+		os=-coff
+		;;
+	*-*bug)
+		os=-coff
+		;;
+	*-apple)
+		os=-macos
+		;;
+	*-atari*)
+		os=-mint
+		;;
+	*)
+		os=-none
+		;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+	*-unknown)
+		case $os in
+			-riscix*)
+				vendor=acorn
+				;;
+			-sunos*)
+				vendor=sun
+				;;
+			-cnk*|-aix*)
+				vendor=ibm
+				;;
+			-beos*)
+				vendor=be
+				;;
+			-hpux*)
+				vendor=hp
+				;;
+			-mpeix*)
+				vendor=hp
+				;;
+			-hiux*)
+				vendor=hitachi
+				;;
+			-unos*)
+				vendor=crds
+				;;
+			-dgux*)
+				vendor=dg
+				;;
+			-luna*)
+				vendor=omron
+				;;
+			-genix*)
+				vendor=ns
+				;;
+			-mvs* | -opened*)
+				vendor=ibm
+				;;
+			-os400*)
+				vendor=ibm
+				;;
+			-ptx*)
+				vendor=sequent
+				;;
+			-tpf*)
+				vendor=ibm
+				;;
+			-vxsim* | -vxworks* | -windiss*)
+				vendor=wrs
+				;;
+			-aux*)
+				vendor=apple
+				;;
+			-hms*)
+				vendor=hitachi
+				;;
+			-mpw* | -macos*)
+				vendor=apple
+				;;
+			-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+				vendor=atari
+				;;
+			-vos*)
+				vendor=stratus
+				;;
+		esac
+		basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+		;;
+esac
+
+echo $basic_machine$os
+exit
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/depcomp b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/depcomp
new file mode 100755
index 0000000..4ebd5b3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/depcomp
@@ -0,0 +1,791 @@
+#! /bin/sh
+# depcomp - compile a program generating dependencies as side-effects
+
+scriptversion=2013-05-30.07; # UTC
+
+# Copyright (C) 1999-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
+
+case $1 in
+  '')
+    echo "$0: No command.  Try '$0 --help' for more information." 1>&2
+    exit 1;
+    ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: depcomp [--help] [--version] PROGRAM [ARGS]
+
+Run PROGRAMS ARGS to compile a file, generating dependencies
+as side-effects.
+
+Environment variables:
+  depmode     Dependency tracking mode.
+  source      Source file read by 'PROGRAMS ARGS'.
+  object      Object file output by 'PROGRAMS ARGS'.
+  DEPDIR      directory where to store dependencies.
+  depfile     Dependency file to output.
+  tmpdepfile  Temporary file to use when outputting dependencies.
+  libtool     Whether libtool is used (yes/no).
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "depcomp $scriptversion"
+    exit $?
+    ;;
+esac
+
+# Get the directory component of the given path, and save it in the
+# global variables '$dir'.  Note that this directory component will
+# be either empty or ending with a '/' character.  This is deliberate.
+set_dir_from ()
+{
+  case $1 in
+    */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
+      *) dir=;;
+  esac
+}
+
+# Get the suffix-stripped basename of the given path, and save it the
+# global variable '$base'.
+set_base_from ()
+{
+  base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
+}
+
+# If no dependency file was actually created by the compiler invocation,
+# we still have to create a dummy depfile, to avoid errors with the
+# Makefile "include basename.Plo" scheme.
+make_dummy_depfile ()
+{
+  echo "#dummy" > "$depfile"
+}
+
+# Factor out some common post-processing of the generated depfile.
+# Requires the auxiliary global variable '$tmpdepfile' to be set.
+aix_post_process_depfile ()
+{
+  # If the compiler actually managed to produce a dependency file,
+  # post-process it.
+  if test -f "$tmpdepfile"; then
+    # Each line is of the form 'foo.o: dependency.h'.
+    # Do two passes, one to just change these to
+    #   $object: dependency.h
+    # and one to simply output
+    #   dependency.h:
+    # which is needed to avoid the deleted-header problem.
+    { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
+      sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
+    } > "$depfile"
+    rm -f "$tmpdepfile"
+  else
+    make_dummy_depfile
+  fi
+}
+
+# A tabulation character.
+tab='	'
+# A newline character.
+nl='
+'
+# Character ranges might be problematic outside the C locale.
+# These definitions help.
+upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
+lower=abcdefghijklmnopqrstuvwxyz
+digits=0123456789
+alpha=${upper}${lower}
+
+if test -z "$depmode" || test -z "$source" || test -z "$object"; then
+  echo "depcomp: Variables source, object and depmode must be set" 1>&2
+  exit 1
+fi
+
+# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
+depfile=${depfile-`echo "$object" |
+  sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
+tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
+
+rm -f "$tmpdepfile"
+
+# Avoid interferences from the environment.
+gccflag= dashmflag=
+
+# Some modes work just like other modes, but use different flags.  We
+# parameterize here, but still list the modes in the big case below,
+# to make depend.m4 easier to write.  Note that we *cannot* use a case
+# here, because this file can only contain one case statement.
+if test "$depmode" = hp; then
+  # HP compiler uses -M and no extra arg.
+  gccflag=-M
+  depmode=gcc
+fi
+
+if test "$depmode" = dashXmstdout; then
+  # This is just like dashmstdout with a different argument.
+  dashmflag=-xM
+  depmode=dashmstdout
+fi
+
+cygpath_u="cygpath -u -f -"
+if test "$depmode" = msvcmsys; then
+  # This is just like msvisualcpp but w/o cygpath translation.
+  # Just convert the backslash-escaped backslashes to single forward
+  # slashes to satisfy depend.m4
+  cygpath_u='sed s,\\\\,/,g'
+  depmode=msvisualcpp
+fi
+
+if test "$depmode" = msvc7msys; then
+  # This is just like msvc7 but w/o cygpath translation.
+  # Just convert the backslash-escaped backslashes to single forward
+  # slashes to satisfy depend.m4
+  cygpath_u='sed s,\\\\,/,g'
+  depmode=msvc7
+fi
+
+if test "$depmode" = xlc; then
+  # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
+  gccflag=-qmakedep=gcc,-MF
+  depmode=gcc
+fi
+
+case "$depmode" in
+gcc3)
+## gcc 3 implements dependency tracking that does exactly what
+## we want.  Yay!  Note: for some reason libtool 1.4 doesn't like
+## it if -MD -MP comes after the -MF stuff.  Hmm.
+## Unfortunately, FreeBSD c89 acceptance of flags depends upon
+## the command line argument order; so add the flags where they
+## appear in depend2.am.  Note that the slowdown incurred here
+## affects only configure: in makefiles, %FASTDEP% shortcuts this.
+  for arg
+  do
+    case $arg in
+    -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
+    *)  set fnord "$@" "$arg" ;;
+    esac
+    shift # fnord
+    shift # $arg
+  done
+  "$@"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  mv "$tmpdepfile" "$depfile"
+  ;;
+
+gcc)
+## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
+## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
+## (see the conditional assignment to $gccflag above).
+## There are various ways to get dependency output from gcc.  Here's
+## why we pick this rather obscure method:
+## - Don't want to use -MD because we'd like the dependencies to end
+##   up in a subdir.  Having to rename by hand is ugly.
+##   (We might end up doing this anyway to support other compilers.)
+## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
+##   -MM, not -M (despite what the docs say).  Also, it might not be
+##   supported by the other compilers which use the 'gcc' depmode.
+## - Using -M directly means running the compiler twice (even worse
+##   than renaming).
+  if test -z "$gccflag"; then
+    gccflag=-MD,
+  fi
+  "$@" -Wp,"$gccflag$tmpdepfile"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  # The second -e expression handles DOS-style file names with drive
+  # letters.
+  sed -e 's/^[^:]*: / /' \
+      -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
+## This next piece of magic avoids the "deleted header file" problem.
+## The problem is that when a header file which appears in a .P file
+## is deleted, the dependency causes make to die (because there is
+## typically no way to rebuild the header).  We avoid this by adding
+## dummy dependencies for each header file.  Too bad gcc doesn't do
+## this for us directly.
+## Some versions of gcc put a space before the ':'.  On the theory
+## that the space means something, we add a space to the output as
+## well.  hp depmode also adds that space, but also prefixes the VPATH
+## to the object.  Take care to not repeat it in the output.
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+  tr ' ' "$nl" < "$tmpdepfile" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+sgi)
+  if test "$libtool" = yes; then
+    "$@" "-Wp,-MDupdate,$tmpdepfile"
+  else
+    "$@" -MDupdate "$tmpdepfile"
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+
+  if test -f "$tmpdepfile"; then  # yes, the sourcefile depend on other files
+    echo "$object : \\" > "$depfile"
+    # Clip off the initial element (the dependent).  Don't try to be
+    # clever and replace this with sed code, as IRIX sed won't handle
+    # lines with more than a fixed number of characters (4096 in
+    # IRIX 6.2 sed, 8192 in IRIX 6.5).  We also remove comment lines;
+    # the IRIX cc adds comments like '#:fec' to the end of the
+    # dependency line.
+    tr ' ' "$nl" < "$tmpdepfile" \
+      | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
+      | tr "$nl" ' ' >> "$depfile"
+    echo >> "$depfile"
+    # The second pass generates a dummy entry for each header file.
+    tr ' ' "$nl" < "$tmpdepfile" \
+      | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
+      >> "$depfile"
+  else
+    make_dummy_depfile
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+xlc)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+aix)
+  # The C for AIX Compiler uses -M and outputs the dependencies
+  # in a .u file.  In older versions, this file always lives in the
+  # current directory.  Also, the AIX compiler puts '$object:' at the
+  # start of each line; $object doesn't have directory information.
+  # Version 6 uses the directory in both cases.
+  set_dir_from "$object"
+  set_base_from "$object"
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$base.u
+    tmpdepfile3=$dir.libs/$base.u
+    "$@" -Wc,-M
+  else
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$dir$base.u
+    tmpdepfile3=$dir$base.u
+    "$@" -M
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+    exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  aix_post_process_depfile
+  ;;
+
+tcc)
+  # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
+  # FIXME: That version still under development at the moment of writing.
+  #        Make that this statement remains true also for stable, released
+  #        versions.
+  # It will wrap lines (doesn't matter whether long or short) with a
+  # trailing '\', as in:
+  #
+  #   foo.o : \
+  #    foo.c \
+  #    foo.h \
+  #
+  # It will put a trailing '\' even on the last line, and will use leading
+  # spaces rather than leading tabs (at least since its commit 0394caf7
+  # "Emit spaces for -MD").
+  "$@" -MD -MF "$tmpdepfile"
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
+  # We have to change lines of the first kind to '$object: \'.
+  sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
+  # And for each line of the second kind, we have to emit a 'dep.h:'
+  # dummy dependency, to avoid the deleted-header problem.
+  sed -n -e 's|^  *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+## The order of this option in the case statement is important, since the
+## shell code in configure will try each of these formats in the order
+## listed in this file.  A plain '-MD' option would be understood by many
+## compilers, so we must ensure this comes after the gcc and icc options.
+pgcc)
+  # Portland's C compiler understands '-MD'.
+  # Will always output deps to 'file.d' where file is the root name of the
+  # source file under compilation, even if file resides in a subdirectory.
+  # The object file name does not affect the name of the '.d' file.
+  # pgcc 10.2 will output
+  #    foo.o: sub/foo.c sub/foo.h
+  # and will wrap long lines using '\' :
+  #    foo.o: sub/foo.c ... \
+  #     sub/foo.h ... \
+  #     ...
+  set_dir_from "$object"
+  # Use the source, not the object, to determine the base name, since
+  # that's sadly what pgcc will do too.
+  set_base_from "$source"
+  tmpdepfile=$base.d
+
+  # For projects that build the same source file twice into different object
+  # files, the pgcc approach of using the *source* file root name can cause
+  # problems in parallel builds.  Use a locking strategy to avoid stomping on
+  # the same $tmpdepfile.
+  lockdir=$base.d-lock
+  trap "
+    echo '$0: caught signal, cleaning up...' >&2
+    rmdir '$lockdir'
+    exit 1
+  " 1 2 13 15
+  numtries=100
+  i=$numtries
+  while test $i -gt 0; do
+    # mkdir is a portable test-and-set.
+    if mkdir "$lockdir" 2>/dev/null; then
+      # This process acquired the lock.
+      "$@" -MD
+      stat=$?
+      # Release the lock.
+      rmdir "$lockdir"
+      break
+    else
+      # If the lock is being held by a different process, wait
+      # until the winning process is done or we timeout.
+      while test -d "$lockdir" && test $i -gt 0; do
+        sleep 1
+        i=`expr $i - 1`
+      done
+    fi
+    i=`expr $i - 1`
+  done
+  trap - 1 2 13 15
+  if test $i -le 0; then
+    echo "$0: failed to acquire lock after $numtries attempts" >&2
+    echo "$0: check lockdir '$lockdir'" >&2
+    exit 1
+  fi
+
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each line is of the form `foo.o: dependent.h',
+  # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
+  # Do two passes, one to just change these to
+  # `$object: dependent.h' and one to simply `dependent.h:'.
+  sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp2)
+  # The "hp" stanza above does not work with aCC (C++) and HP's ia64
+  # compilers, which have integrated preprocessors.  The correct option
+  # to use with these is +Maked; it writes dependencies to a file named
+  # 'foo.d', which lands next to the object file, wherever that
+  # happens to be.
+  # Much of this is similar to the tru64 case; see comments there.
+  set_dir_from  "$object"
+  set_base_from "$object"
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir.libs/$base.d
+    "$@" -Wc,+Maked
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    "$@" +Maked
+  fi
+  stat=$?
+  if test $stat -ne 0; then
+     rm -f "$tmpdepfile1" "$tmpdepfile2"
+     exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  if test -f "$tmpdepfile"; then
+    sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
+    # Add 'dependent.h:' lines.
+    sed -ne '2,${
+               s/^ *//
+               s/ \\*$//
+               s/$/:/
+               p
+             }' "$tmpdepfile" >> "$depfile"
+  else
+    make_dummy_depfile
+  fi
+  rm -f "$tmpdepfile" "$tmpdepfile2"
+  ;;
+
+tru64)
+  # The Tru64 compiler uses -MD to generate dependencies as a side
+  # effect.  'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
+  # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
+  # dependencies in 'foo.d' instead, so we check for that too.
+  # Subdirectories are respected.
+  set_dir_from  "$object"
+  set_base_from "$object"
+
+  if test "$libtool" = yes; then
+    # Libtool generates 2 separate objects for the 2 libraries.  These
+    # two compilations output dependencies in $dir.libs/$base.o.d and
+    # in $dir$base.o.d.  We have to check for both files, because
+    # one of the two compilations can be disabled.  We should prefer
+    # $dir$base.o.d over $dir.libs/$base.o.d because the latter is
+    # automatically cleaned when .libs/ is deleted, while ignoring
+    # the former would cause a distcleancheck panic.
+    tmpdepfile1=$dir$base.o.d          # libtool 1.5
+    tmpdepfile2=$dir.libs/$base.o.d    # Likewise.
+    tmpdepfile3=$dir.libs/$base.d      # Compaq CCC V6.2-504
+    "$@" -Wc,-MD
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    tmpdepfile3=$dir$base.d
+    "$@" -MD
+  fi
+
+  stat=$?
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+    exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  # Same post-processing that is required for AIX mode.
+  aix_post_process_depfile
+  ;;
+
+msvc7)
+  if test "$libtool" = yes; then
+    showIncludes=-Wc,-showIncludes
+  else
+    showIncludes=-showIncludes
+  fi
+  "$@" $showIncludes > "$tmpdepfile"
+  stat=$?
+  grep -v '^Note: including file: ' "$tmpdepfile"
+  if test $stat -ne 0; then
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  # The first sed program below extracts the file names and escapes
+  # backslashes for cygpath.  The second sed program outputs the file
+  # name when reading, but also accumulates all include files in the
+  # hold buffer in order to output them again at the end.  This only
+  # works with sed implementations that can handle large buffers.
+  sed < "$tmpdepfile" -n '
+/^Note: including file:  *\(.*\)/ {
+  s//\1/
+  s/\\/\\\\/g
+  p
+}' | $cygpath_u | sort -u | sed -n '
+s/ /\\ /g
+s/\(.*\)/'"$tab"'\1 \\/p
+s/.\(.*\) \\/\1:/
+H
+$ {
+  s/.*/'"$tab"'/
+  G
+  p
+}' >> "$depfile"
+  echo >> "$depfile" # make sure the fragment doesn't end with a backslash
+  rm -f "$tmpdepfile"
+  ;;
+
+msvc7msys)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+#nosideeffect)
+  # This comment above is used by automake to tell side-effect
+  # dependency tracking mechanisms from slower ones.
+
+dashmstdout)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout, regardless of -o.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove '-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  test -z "$dashmflag" && dashmflag=-M
+  # Require at least two characters before searching for ':'
+  # in the target name.  This is to cope with DOS-style filenames:
+  # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
+  "$@" $dashmflag |
+    sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this sed invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  tr ' ' "$nl" < "$tmpdepfile" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+dashXmstdout)
+  # This case only exists to satisfy depend.m4.  It is never actually
+  # run, as this mode is specially recognized in the preamble.
+  exit 1
+  ;;
+
+makedepend)
+  "$@" || exit $?
+  # Remove any Libtool call
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+  # X makedepend
+  shift
+  cleared=no eat=no
+  for arg
+  do
+    case $cleared in
+    no)
+      set ""; shift
+      cleared=yes ;;
+    esac
+    if test $eat = yes; then
+      eat=no
+      continue
+    fi
+    case "$arg" in
+    -D*|-I*)
+      set fnord "$@" "$arg"; shift ;;
+    # Strip any option that makedepend may not understand.  Remove
+    # the object too, otherwise makedepend will parse it as a source file.
+    -arch)
+      eat=yes ;;
+    -*|$object)
+      ;;
+    *)
+      set fnord "$@" "$arg"; shift ;;
+    esac
+  done
+  obj_suffix=`echo "$object" | sed 's/^.*\././'`
+  touch "$tmpdepfile"
+  ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
+  rm -f "$depfile"
+  # makedepend may prepend the VPATH from the source file name to the object.
+  # No need to regex-escape $object, excess matching of '.' is harmless.
+  sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process the last invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed '1,2d' "$tmpdepfile" \
+    | tr ' ' "$nl" \
+    | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
+    | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile" "$tmpdepfile".bak
+  ;;
+
+cpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove '-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  "$@" -E \
+    | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+             -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+    | sed '$ s: \\$::' > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  cat < "$tmpdepfile" >> "$depfile"
+  sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvisualcpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  IFS=" "
+  for arg
+  do
+    case "$arg" in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
+        set fnord "$@"
+        shift
+        shift
+        ;;
+    *)
+        set fnord "$@" "$arg"
+        shift
+        shift
+        ;;
+    esac
+  done
+  "$@" -E 2>/dev/null |
+  sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
+  echo "$tab" >> "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvcmsys)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+none)
+  exec "$@"
+  ;;
+
+*)
+  echo "Unknown depmode $depmode" 1>&2
+  exit 1
+  ;;
+esac
+
+exit 0
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/install-sh b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/install-sh
new file mode 100755
index 0000000..377bb86
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/install-sh
@@ -0,0 +1,527 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2011-11-20.07; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# 'make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+nl='
+'
+IFS=" ""	$nl"
+
+# set DOITPROG to echo to test this script
+
+# Don't use :- since 4.3BSD and earlier shells don't like it.
+doit=${DOITPROG-}
+if test -z "$doit"; then
+  doit_exec=exec
+else
+  doit_exec=$doit
+fi
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_glob='?'
+initialize_posix_glob='
+  test "$posix_glob" != "?" || {
+    if (set -f) 2>/dev/null; then
+      posix_glob=
+    else
+      posix_glob=:
+    fi
+  }
+'
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+no_target_directory=
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+   or: $0 [OPTION]... SRCFILES... DIRECTORY
+   or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+   or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+     --help     display this help and exit.
+     --version  display version info and exit.
+
+  -c            (ignored)
+  -C            install only if different (preserve the last data modification time)
+  -d            create directories instead of installing files.
+  -g GROUP      $chgrpprog installed files to GROUP.
+  -m MODE       $chmodprog installed files to MODE.
+  -o USER       $chownprog installed files to USER.
+  -s            $stripprog installed files.
+  -t DIRECTORY  install into DIRECTORY.
+  -T            report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+  CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+  RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+  case $1 in
+    -c) ;;
+
+    -C) copy_on_change=true;;
+
+    -d) dir_arg=true;;
+
+    -g) chgrpcmd="$chgrpprog $2"
+	shift;;
+
+    --help) echo "$usage"; exit $?;;
+
+    -m) mode=$2
+	case $mode in
+	  *' '* | *'	'* | *'
+'*	  | *'*'* | *'?'* | *'['*)
+	    echo "$0: invalid mode: $mode" >&2
+	    exit 1;;
+	esac
+	shift;;
+
+    -o) chowncmd="$chownprog $2"
+	shift;;
+
+    -s) stripcmd=$stripprog;;
+
+    -t) dst_arg=$2
+	# Protect names problematic for 'test' and other utilities.
+	case $dst_arg in
+	  -* | [=\(\)!]) dst_arg=./$dst_arg;;
+	esac
+	shift;;
+
+    -T) no_target_directory=true;;
+
+    --version) echo "$0 $scriptversion"; exit $?;;
+
+    --)	shift
+	break;;
+
+    -*)	echo "$0: invalid option: $1" >&2
+	exit 1;;
+
+    *)  break;;
+  esac
+  shift
+done
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+  # When -d is used, all remaining arguments are directories to create.
+  # When -t is used, the destination is already specified.
+  # Otherwise, the last argument is the destination.  Remove it from $@.
+  for arg
+  do
+    if test -n "$dst_arg"; then
+      # $@ is not empty: it contains at least $arg.
+      set fnord "$@" "$dst_arg"
+      shift # fnord
+    fi
+    shift # arg
+    dst_arg=$arg
+    # Protect names problematic for 'test' and other utilities.
+    case $dst_arg in
+      -* | [=\(\)!]) dst_arg=./$dst_arg;;
+    esac
+  done
+fi
+
+if test $# -eq 0; then
+  if test -z "$dir_arg"; then
+    echo "$0: no input file specified." >&2
+    exit 1
+  fi
+  # It's OK to call 'install-sh -d' without argument.
+  # This can happen when creating conditional directories.
+  exit 0
+fi
+
+if test -z "$dir_arg"; then
+  do_exit='(exit $ret); exit $ret'
+  trap "ret=129; $do_exit" 1
+  trap "ret=130; $do_exit" 2
+  trap "ret=141; $do_exit" 13
+  trap "ret=143; $do_exit" 15
+
+  # Set umask so as not to create temps with too-generous modes.
+  # However, 'strip' requires both read and write access to temps.
+  case $mode in
+    # Optimize common cases.
+    *644) cp_umask=133;;
+    *755) cp_umask=22;;
+
+    *[0-7])
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw='% 200'
+      fi
+      cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+    *)
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw=,u+rw
+      fi
+      cp_umask=$mode$u_plus_rw;;
+  esac
+fi
+
+for src
+do
+  # Protect names problematic for 'test' and other utilities.
+  case $src in
+    -* | [=\(\)!]) src=./$src;;
+  esac
+
+  if test -n "$dir_arg"; then
+    dst=$src
+    dstdir=$dst
+    test -d "$dstdir"
+    dstdir_status=$?
+  else
+
+    # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+    # might cause directories to be created, which would be especially bad
+    # if $src (and thus $dsttmp) contains '*'.
+    if test ! -f "$src" && test ! -d "$src"; then
+      echo "$0: $src does not exist." >&2
+      exit 1
+    fi
+
+    if test -z "$dst_arg"; then
+      echo "$0: no destination specified." >&2
+      exit 1
+    fi
+    dst=$dst_arg
+
+    # If destination is a directory, append the input filename; won't work
+    # if double slashes aren't ignored.
+    if test -d "$dst"; then
+      if test -n "$no_target_directory"; then
+	echo "$0: $dst_arg: Is a directory" >&2
+	exit 1
+      fi
+      dstdir=$dst
+      dst=$dstdir/`basename "$src"`
+      dstdir_status=0
+    else
+      # Prefer dirname, but fall back on a substitute if dirname fails.
+      dstdir=`
+	(dirname "$dst") 2>/dev/null ||
+	expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	     X"$dst" : 'X\(//\)[^/]' \| \
+	     X"$dst" : 'X\(//\)$' \| \
+	     X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
+	echo X"$dst" |
+	    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)[^/].*/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\).*/{
+		   s//\1/
+		   q
+		 }
+		 s/.*/./; q'
+      `
+
+      test -d "$dstdir"
+      dstdir_status=$?
+    fi
+  fi
+
+  obsolete_mkdir_used=false
+
+  if test $dstdir_status != 0; then
+    case $posix_mkdir in
+      '')
+	# Create intermediate dirs using mode 755 as modified by the umask.
+	# This is like FreeBSD 'install' as of 1997-10-28.
+	umask=`umask`
+	case $stripcmd.$umask in
+	  # Optimize common cases.
+	  *[2367][2367]) mkdir_umask=$umask;;
+	  .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+	  *[0-7])
+	    mkdir_umask=`expr $umask + 22 \
+	      - $umask % 100 % 40 + $umask % 20 \
+	      - $umask % 10 % 4 + $umask % 2
+	    `;;
+	  *) mkdir_umask=$umask,go-w;;
+	esac
+
+	# With -d, create the new directory with the user-specified mode.
+	# Otherwise, rely on $mkdir_umask.
+	if test -n "$dir_arg"; then
+	  mkdir_mode=-m$mode
+	else
+	  mkdir_mode=
+	fi
+
+	posix_mkdir=false
+	case $umask in
+	  *[123567][0-7][0-7])
+	    # POSIX mkdir -p sets u+wx bits regardless of umask, which
+	    # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+	    ;;
+	  *)
+	    tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+	    trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+	    if (umask $mkdir_umask &&
+		exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
+	    then
+	      if test -z "$dir_arg" || {
+		   # Check for POSIX incompatibilities with -m.
+		   # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+		   # other-writable bit of parent directory when it shouldn't.
+		   # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+		   ls_ld_tmpdir=`ls -ld "$tmpdir"`
+		   case $ls_ld_tmpdir in
+		     d????-?r-*) different_mode=700;;
+		     d????-?--*) different_mode=755;;
+		     *) false;;
+		   esac &&
+		   $mkdirprog -m$different_mode -p -- "$tmpdir" && {
+		     ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
+		     test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+		   }
+		 }
+	      then posix_mkdir=:
+	      fi
+	      rmdir "$tmpdir/d" "$tmpdir"
+	    else
+	      # Remove any dirs left behind by ancient mkdir implementations.
+	      rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
+	    fi
+	    trap '' 0;;
+	esac;;
+    esac
+
+    if
+      $posix_mkdir && (
+	umask $mkdir_umask &&
+	$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+      )
+    then :
+    else
+
+      # The umask is ridiculous, or mkdir does not conform to POSIX,
+      # or it failed possibly due to a race condition.  Create the
+      # directory the slow way, step by step, checking for races as we go.
+
+      case $dstdir in
+	/*) prefix='/';;
+	[-=\(\)!]*) prefix='./';;
+	*)  prefix='';;
+      esac
+
+      eval "$initialize_posix_glob"
+
+      oIFS=$IFS
+      IFS=/
+      $posix_glob set -f
+      set fnord $dstdir
+      shift
+      $posix_glob set +f
+      IFS=$oIFS
+
+      prefixes=
+
+      for d
+      do
+	test X"$d" = X && continue
+
+	prefix=$prefix$d
+	if test -d "$prefix"; then
+	  prefixes=
+	else
+	  if $posix_mkdir; then
+	    (umask=$mkdir_umask &&
+	     $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+	    # Don't fail if two instances are running concurrently.
+	    test -d "$prefix" || exit 1
+	  else
+	    case $prefix in
+	      *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+	      *) qprefix=$prefix;;
+	    esac
+	    prefixes="$prefixes '$qprefix'"
+	  fi
+	fi
+	prefix=$prefix/
+      done
+
+      if test -n "$prefixes"; then
+	# Don't fail if two instances are running concurrently.
+	(umask $mkdir_umask &&
+	 eval "\$doit_exec \$mkdirprog $prefixes") ||
+	  test -d "$dstdir" || exit 1
+	obsolete_mkdir_used=true
+      fi
+    fi
+  fi
+
+  if test -n "$dir_arg"; then
+    { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+    { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+      test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+  else
+
+    # Make a couple of temp file names in the proper directory.
+    dsttmp=$dstdir/_inst.$$_
+    rmtmp=$dstdir/_rm.$$_
+
+    # Trap to clean up those temp files at exit.
+    trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+    # Copy the file name to the temp name.
+    (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+    # and set any options; do chmod last to preserve setuid bits.
+    #
+    # If any of these fail, we abort the whole thing.  If we want to
+    # ignore errors from any of these, just make sure not to ignore
+    # errors from the above "$doit $cpprog $src $dsttmp" command.
+    #
+    { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+    { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+    { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+    # If -C, don't bother to copy if it wouldn't change the file.
+    if $copy_on_change &&
+       old=`LC_ALL=C ls -dlL "$dst"	2>/dev/null` &&
+       new=`LC_ALL=C ls -dlL "$dsttmp"	2>/dev/null` &&
+
+       eval "$initialize_posix_glob" &&
+       $posix_glob set -f &&
+       set X $old && old=:$2:$4:$5:$6 &&
+       set X $new && new=:$2:$4:$5:$6 &&
+       $posix_glob set +f &&
+
+       test "$old" = "$new" &&
+       $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+    then
+      rm -f "$dsttmp"
+    else
+      # Rename the file to the real destination.
+      $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+      # The rename failed, perhaps because mv can't rename something else
+      # to itself, or perhaps because mv is so ancient that it does not
+      # support -f.
+      {
+	# Now remove or move aside any old file at destination location.
+	# We try this two ways since rm can't unlink itself on some
+	# systems and the destination file might be busy for other
+	# reasons.  In this case, the final cleanup might fail but the new
+	# file should still install successfully.
+	{
+	  test ! -f "$dst" ||
+	  $doit $rmcmd -f "$dst" 2>/dev/null ||
+	  { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+	    { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
+	  } ||
+	  { echo "$0: cannot unlink or rename $dst" >&2
+	    (exit 1); exit 1
+	  }
+	} &&
+
+	# Now rename the file to the real destination.
+	$doit $mvcmd "$dsttmp" "$dst"
+      }
+    fi || exit 1
+
+    trap '' 0
+  fi
+done
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mdate-sh b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mdate-sh
new file mode 100755
index 0000000..b3719cf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mdate-sh
@@ -0,0 +1,224 @@
+#!/bin/sh
+# Get modification time of a file or directory and pretty-print it.
+
+scriptversion=2010-08-21.06; # UTC
+
+# Copyright (C) 1995-2013 Free Software Foundation, Inc.
+# written by Ulrich Drepper <drepper@gnu.ai.mit.edu>, June 1995
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+fi
+
+case $1 in
+  '')
+     echo "$0: No file.  Try '$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: mdate-sh [--help] [--version] FILE
+
+Pretty-print the modification day of FILE, in the format:
+1 January 1970
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "mdate-sh $scriptversion"
+    exit $?
+    ;;
+esac
+
+error ()
+{
+  echo "$0: $1" >&2
+  exit 1
+}
+
+
+# Prevent date giving response in another language.
+LANG=C
+export LANG
+LC_ALL=C
+export LC_ALL
+LC_TIME=C
+export LC_TIME
+
+# GNU ls changes its time format in response to the TIME_STYLE
+# variable.  Since we cannot assume 'unset' works, revert this
+# variable to its documented default.
+if test "${TIME_STYLE+set}" = set; then
+  TIME_STYLE=posix-long-iso
+  export TIME_STYLE
+fi
+
+save_arg1=$1
+
+# Find out how to get the extended ls output of a file or directory.
+if ls -L /dev/null 1>/dev/null 2>&1; then
+  ls_command='ls -L -l -d'
+else
+  ls_command='ls -l -d'
+fi
+# Avoid user/group names that might have spaces, when possible.
+if ls -n /dev/null 1>/dev/null 2>&1; then
+  ls_command="$ls_command -n"
+fi
+
+# A 'ls -l' line looks as follows on OS/2.
+#  drwxrwx---        0 Aug 11  2001 foo
+# This differs from Unix, which adds ownership information.
+#  drwxrwx---   2 root  root      4096 Aug 11  2001 foo
+#
+# To find the date, we split the line on spaces and iterate on words
+# until we find a month.  This cannot work with files whose owner is a
+# user named "Jan", or "Feb", etc.  However, it's unlikely that '/'
+# will be owned by a user whose name is a month.  So we first look at
+# the extended ls output of the root directory to decide how many
+# words should be skipped to get the date.
+
+# On HPUX /bin/sh, "set" interprets "-rw-r--r--" as options, so the "x" below.
+set x`$ls_command /`
+
+# Find which argument is the month.
+month=
+command=
+until test $month
+do
+  test $# -gt 0 || error "failed parsing '$ls_command /' output"
+  shift
+  # Add another shift to the command.
+  command="$command shift;"
+  case $1 in
+    Jan) month=January; nummonth=1;;
+    Feb) month=February; nummonth=2;;
+    Mar) month=March; nummonth=3;;
+    Apr) month=April; nummonth=4;;
+    May) month=May; nummonth=5;;
+    Jun) month=June; nummonth=6;;
+    Jul) month=July; nummonth=7;;
+    Aug) month=August; nummonth=8;;
+    Sep) month=September; nummonth=9;;
+    Oct) month=October; nummonth=10;;
+    Nov) month=November; nummonth=11;;
+    Dec) month=December; nummonth=12;;
+  esac
+done
+
+test -n "$month" || error "failed parsing '$ls_command /' output"
+
+# Get the extended ls output of the file or directory.
+set dummy x`eval "$ls_command \"\\\$save_arg1\""`
+
+# Remove all preceding arguments
+eval $command
+
+# Because of the dummy argument above, month is in $2.
+#
+# On a POSIX system, we should have
+#
+# $# = 5
+# $1 = file size
+# $2 = month
+# $3 = day
+# $4 = year or time
+# $5 = filename
+#
+# On Darwin 7.7.0 and 7.6.0, we have
+#
+# $# = 4
+# $1 = day
+# $2 = month
+# $3 = year or time
+# $4 = filename
+
+# Get the month.
+case $2 in
+  Jan) month=January; nummonth=1;;
+  Feb) month=February; nummonth=2;;
+  Mar) month=March; nummonth=3;;
+  Apr) month=April; nummonth=4;;
+  May) month=May; nummonth=5;;
+  Jun) month=June; nummonth=6;;
+  Jul) month=July; nummonth=7;;
+  Aug) month=August; nummonth=8;;
+  Sep) month=September; nummonth=9;;
+  Oct) month=October; nummonth=10;;
+  Nov) month=November; nummonth=11;;
+  Dec) month=December; nummonth=12;;
+esac
+
+case $3 in
+  ???*) day=$1;;
+  *) day=$3; shift;;
+esac
+
+# Here we have to deal with the problem that the ls output gives either
+# the time of day or the year.
+case $3 in
+  *:*) set `date`; eval year=\$$#
+       case $2 in
+	 Jan) nummonthtod=1;;
+	 Feb) nummonthtod=2;;
+	 Mar) nummonthtod=3;;
+	 Apr) nummonthtod=4;;
+	 May) nummonthtod=5;;
+	 Jun) nummonthtod=6;;
+	 Jul) nummonthtod=7;;
+	 Aug) nummonthtod=8;;
+	 Sep) nummonthtod=9;;
+	 Oct) nummonthtod=10;;
+	 Nov) nummonthtod=11;;
+	 Dec) nummonthtod=12;;
+       esac
+       # For the first six month of the year the time notation can also
+       # be used for files modified in the last year.
+       if (expr $nummonth \> $nummonthtod) > /dev/null;
+       then
+	 year=`expr $year - 1`
+       fi;;
+  *) year=$3;;
+esac
+
+# The result.
+echo $day $month $year
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/missing b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/missing
new file mode 100755
index 0000000..db98974
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/missing
@@ -0,0 +1,215 @@
+#! /bin/sh
+# Common wrapper for a few potentially missing GNU programs.
+
+scriptversion=2013-10-28.13; # UTC
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+# Originally written by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+if test $# -eq 0; then
+  echo 1>&2 "Try '$0 --help' for more information"
+  exit 1
+fi
+
+case $1 in
+
+  --is-lightweight)
+    # Used by our autoconf macros to check whether the available missing
+    # script is modern enough.
+    exit 0
+    ;;
+
+  --run)
+    # Back-compat with the calling convention used by older automake.
+    shift
+    ;;
+
+  -h|--h|--he|--hel|--help)
+    echo "\
+$0 [OPTION]... PROGRAM [ARGUMENT]...
+
+Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due
+to PROGRAM being missing or too old.
+
+Options:
+  -h, --help      display this help and exit
+  -v, --version   output version information and exit
+
+Supported PROGRAM values:
+  aclocal   autoconf  autoheader   autom4te  automake  makeinfo
+  bison     yacc      flex         lex       help2man
+
+Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and
+'g' are ignored when checking the name.
+
+Send bug reports to <bug-automake@gnu.org>."
+    exit $?
+    ;;
+
+  -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
+    echo "missing $scriptversion (GNU Automake)"
+    exit $?
+    ;;
+
+  -*)
+    echo 1>&2 "$0: unknown '$1' option"
+    echo 1>&2 "Try '$0 --help' for more information"
+    exit 1
+    ;;
+
+esac
+
+# Run the given program, remember its exit status.
+"$@"; st=$?
+
+# If it succeeded, we are done.
+test $st -eq 0 && exit 0
+
+# Also exit now if we it failed (or wasn't found), and '--version' was
+# passed; such an option is passed most likely to detect whether the
+# program is present and works.
+case $2 in --version|--help) exit $st;; esac
+
+# Exit code 63 means version mismatch.  This often happens when the user
+# tries to use an ancient version of a tool on a file that requires a
+# minimum version.
+if test $st -eq 63; then
+  msg="probably too old"
+elif test $st -eq 127; then
+  # Program was missing.
+  msg="missing on your system"
+else
+  # Program was found and executed, but failed.  Give up.
+  exit $st
+fi
+
+perl_URL=http://www.perl.org/
+flex_URL=http://flex.sourceforge.net/
+gnu_software_URL=http://www.gnu.org/software
+
+program_details ()
+{
+  case $1 in
+    aclocal|automake)
+      echo "The '$1' program is part of the GNU Automake package:"
+      echo "<$gnu_software_URL/automake>"
+      echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:"
+      echo "<$gnu_software_URL/autoconf>"
+      echo "<$gnu_software_URL/m4/>"
+      echo "<$perl_URL>"
+      ;;
+    autoconf|autom4te|autoheader)
+      echo "The '$1' program is part of the GNU Autoconf package:"
+      echo "<$gnu_software_URL/autoconf/>"
+      echo "It also requires GNU m4 and Perl in order to run:"
+      echo "<$gnu_software_URL/m4/>"
+      echo "<$perl_URL>"
+      ;;
+  esac
+}
+
+give_advice ()
+{
+  # Normalize program name to check for.
+  normalized_program=`echo "$1" | sed '
+    s/^gnu-//; t
+    s/^gnu//; t
+    s/^g//; t'`
+
+  printf '%s\n' "'$1' is $msg."
+
+  configure_deps="'configure.ac' or m4 files included by 'configure.ac'"
+  case $normalized_program in
+    autoconf*)
+      echo "You should only need it if you modified 'configure.ac',"
+      echo "or m4 files included by it."
+      program_details 'autoconf'
+      ;;
+    autoheader*)
+      echo "You should only need it if you modified 'acconfig.h' or"
+      echo "$configure_deps."
+      program_details 'autoheader'
+      ;;
+    automake*)
+      echo "You should only need it if you modified 'Makefile.am' or"
+      echo "$configure_deps."
+      program_details 'automake'
+      ;;
+    aclocal*)
+      echo "You should only need it if you modified 'acinclude.m4' or"
+      echo "$configure_deps."
+      program_details 'aclocal'
+      ;;
+   autom4te*)
+      echo "You might have modified some maintainer files that require"
+      echo "the 'autom4te' program to be rebuilt."
+      program_details 'autom4te'
+      ;;
+    bison*|yacc*)
+      echo "You should only need it if you modified a '.y' file."
+      echo "You may want to install the GNU Bison package:"
+      echo "<$gnu_software_URL/bison/>"
+      ;;
+    lex*|flex*)
+      echo "You should only need it if you modified a '.l' file."
+      echo "You may want to install the Fast Lexical Analyzer package:"
+      echo "<$flex_URL>"
+      ;;
+    help2man*)
+      echo "You should only need it if you modified a dependency" \
+           "of a man page."
+      echo "You may want to install the GNU Help2man package:"
+      echo "<$gnu_software_URL/help2man/>"
+    ;;
+    makeinfo*)
+      echo "You should only need it if you modified a '.texi' file, or"
+      echo "any other file indirectly affecting the aspect of the manual."
+      echo "You might want to install the Texinfo package:"
+      echo "<$gnu_software_URL/texinfo/>"
+      echo "The spurious makeinfo call might also be the consequence of"
+      echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might"
+      echo "want to install GNU make:"
+      echo "<$gnu_software_URL/make/>"
+      ;;
+    *)
+      echo "You might have modified some files without having the proper"
+      echo "tools for further handling them.  Check the 'README' file, it"
+      echo "often tells you about the needed prerequisites for installing"
+      echo "this package.  You may also peek at any GNU archive site, in"
+      echo "case some other package contains this missing '$1' program."
+      ;;
+  esac
+}
+
+give_advice "$1" | sed -e '1s/^/WARNING: /' \
+                       -e '2,$s/^/         /' >&2
+
+# Propagate the correct exit status (expected to be 127 for a program
+# not found, 63 for a program that failed due to version mismatch).
+exit $st
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mkinstalldirs b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mkinstalldirs
new file mode 100755
index 0000000..55d537f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/mkinstalldirs
@@ -0,0 +1,162 @@
+#! /bin/sh
+# mkinstalldirs --- make directory hierarchy
+
+scriptversion=2009-04-28.21; # UTC
+
+# Original author: Noah Friedman <friedman@prep.ai.mit.edu>
+# Created: 1993-05-16
+# Public domain.
+#
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+IFS=" ""	$nl"
+errstatus=0
+dirmode=
+
+usage="\
+Usage: mkinstalldirs [-h] [--help] [--version] [-m MODE] DIR ...
+
+Create each directory DIR (with mode MODE, if specified), including all
+leading file name components.
+
+Report bugs to <bug-automake@gnu.org>."
+
+# process command line arguments
+while test $# -gt 0 ; do
+  case $1 in
+    -h | --help | --h*)         # -h for help
+      echo "$usage"
+      exit $?
+      ;;
+    -m)                         # -m PERM arg
+      shift
+      test $# -eq 0 && { echo "$usage" 1>&2; exit 1; }
+      dirmode=$1
+      shift
+      ;;
+    --version)
+      echo "$0 $scriptversion"
+      exit $?
+      ;;
+    --)                         # stop option processing
+      shift
+      break
+      ;;
+    -*)                         # unknown option
+      echo "$usage" 1>&2
+      exit 1
+      ;;
+    *)                          # first non-opt arg
+      break
+      ;;
+  esac
+done
+
+for file
+do
+  if test -d "$file"; then
+    shift
+  else
+    break
+  fi
+done
+
+case $# in
+  0) exit 0 ;;
+esac
+
+# Solaris 8's mkdir -p isn't thread-safe.  If you mkdir -p a/b and
+# mkdir -p a/c at the same time, both will detect that a is missing,
+# one will create a, then the other will try to create a and die with
+# a "File exists" error.  This is a problem when calling mkinstalldirs
+# from a parallel make.  We use --version in the probe to restrict
+# ourselves to GNU mkdir, which is thread-safe.
+case $dirmode in
+  '')
+    if mkdir -p --version . >/dev/null 2>&1 && test ! -d ./--version; then
+      echo "mkdir -p -- $*"
+      exec mkdir -p -- "$@"
+    else
+      # On NextStep and OpenStep, the 'mkdir' command does not
+      # recognize any option.  It will interpret all options as
+      # directories to create, and then abort because '.' already
+      # exists.
+      test -d ./-p && rmdir ./-p
+      test -d ./--version && rmdir ./--version
+    fi
+    ;;
+  *)
+    if mkdir -m "$dirmode" -p --version . >/dev/null 2>&1 &&
+       test ! -d ./--version; then
+      echo "mkdir -m $dirmode -p -- $*"
+      exec mkdir -m "$dirmode" -p -- "$@"
+    else
+      # Clean up after NextStep and OpenStep mkdir.
+      for d in ./-m ./-p ./--version "./$dirmode";
+      do
+        test -d $d && rmdir $d
+      done
+    fi
+    ;;
+esac
+
+for file
+do
+  case $file in
+    /*) pathcomp=/ ;;
+    *)  pathcomp= ;;
+  esac
+  oIFS=$IFS
+  IFS=/
+  set fnord $file
+  shift
+  IFS=$oIFS
+
+  for d
+  do
+    test "x$d" = x && continue
+
+    pathcomp=$pathcomp$d
+    case $pathcomp in
+      -*) pathcomp=./$pathcomp ;;
+    esac
+
+    if test ! -d "$pathcomp"; then
+      echo "mkdir $pathcomp"
+
+      mkdir "$pathcomp" || lasterr=$?
+
+      if test ! -d "$pathcomp"; then
+	errstatus=$lasterr
+      else
+	if test ! -z "$dirmode"; then
+	  echo "chmod $dirmode $pathcomp"
+	  lasterr=
+	  chmod "$dirmode" "$pathcomp" || lasterr=$?
+
+	  if test ! -z "$lasterr"; then
+	    errstatus=$lasterr
+	  fi
+	fi
+      fi
+    fi
+
+    pathcomp=$pathcomp/
+  done
+done
+
+exit $errstatus
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/py-compile b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/py-compile
new file mode 100755
index 0000000..46ea866
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/py-compile
@@ -0,0 +1,170 @@
+#!/bin/sh
+# py-compile - Compile a Python program
+
+scriptversion=2011-06-08.12; # UTC
+
+# Copyright (C) 2000-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+if [ -z "$PYTHON" ]; then
+  PYTHON=python
+fi
+
+me=py-compile
+
+usage_error ()
+{
+  echo "$me: $*" >&2
+  echo "Try '$me --help' for more information." >&2
+  exit 1
+}
+
+basedir=
+destdir=
+while test $# -ne 0; do
+  case "$1" in
+    --basedir)
+      if test $# -lt 2; then
+        usage_error "option '--basedir' requires an argument"
+      else
+        basedir=$2
+      fi
+      shift
+      ;;
+    --destdir)
+      if test $# -lt 2; then
+        usage_error "option '--destdir' requires an argument"
+      else
+        destdir=$2
+      fi
+      shift
+      ;;
+    -h|--help)
+      cat <<\EOF
+Usage: py-compile [--help] [--version] [--basedir DIR] [--destdir DIR] FILES..."
+
+Byte compile some python scripts FILES.  Use --destdir to specify any
+leading directory path to the FILES that you don't want to include in the
+byte compiled file.  Specify --basedir for any additional path information you
+do want to be shown in the byte compiled file.
+
+Example:
+  py-compile --destdir /tmp/pkg-root --basedir /usr/share/test test.py test2.py
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+      exit $?
+      ;;
+    -v|--version)
+      echo "$me $scriptversion"
+      exit $?
+      ;;
+    --)
+      shift
+      break
+      ;;
+    -*)
+      usage_error "unrecognized option '$1'"
+      ;;
+    *)
+      break
+      ;;
+  esac
+  shift
+done
+
+files=$*
+if test -z "$files"; then
+    usage_error "no files given"
+fi
+
+# if basedir was given, then it should be prepended to filenames before
+# byte compilation.
+if [ -z "$basedir" ]; then
+    pathtrans="path = file"
+else
+    pathtrans="path = os.path.join('$basedir', file)"
+fi
+
+# if destdir was given, then it needs to be prepended to the filename to
+# byte compile but not go into the compiled file.
+if [ -z "$destdir" ]; then
+    filetrans="filepath = path"
+else
+    filetrans="filepath = os.path.normpath('$destdir' + os.sep + path)"
+fi
+
+$PYTHON -c "
+import sys, os, py_compile, imp
+
+files = '''$files'''
+
+sys.stdout.write('Byte-compiling python modules...\n')
+for file in files.split():
+    $pathtrans
+    $filetrans
+    if not os.path.exists(filepath) or not (len(filepath) >= 3
+                                            and filepath[-3:] == '.py'):
+	    continue
+    sys.stdout.write(file)
+    sys.stdout.flush()
+    if hasattr(imp, 'get_tag'):
+        py_compile.compile(filepath, imp.cache_from_source(filepath), path)
+    else:
+        py_compile.compile(filepath, filepath + 'c', path)
+sys.stdout.write('\n')" || exit $?
+
+# this will fail for python < 1.5, but that doesn't matter ...
+$PYTHON -O -c "
+import sys, os, py_compile, imp
+
+# pypy does not use .pyo optimization
+if hasattr(sys, 'pypy_translation_info'):
+    sys.exit(0)
+
+files = '''$files'''
+sys.stdout.write('Byte-compiling python modules (optimized versions) ...\n')
+for file in files.split():
+    $pathtrans
+    $filetrans
+    if not os.path.exists(filepath) or not (len(filepath) >= 3
+                                            and filepath[-3:] == '.py'):
+	    continue
+    sys.stdout.write(file)
+    sys.stdout.flush()
+    if hasattr(imp, 'get_tag'):
+        py_compile.compile(filepath, imp.cache_from_source(filepath, False), path)
+    else:
+        py_compile.compile(filepath, filepath + 'o', path)
+sys.stdout.write('\n')" 2>/dev/null || :
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.pl b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.pl
new file mode 100755
index 0000000..aca65fe
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.pl
@@ -0,0 +1,564 @@
+#! /usr/bin/env perl
+# Copyright (C) 2011-2013 Free Software Foundation, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+# ---------------------------------- #
+#  Imports, static data, and setup.  #
+# ---------------------------------- #
+
+use warnings FATAL => 'all';
+use strict;
+use Getopt::Long ();
+use TAP::Parser;
+
+my $VERSION = '2012-02-01.19'; # UTC
+
+my $ME = "tap-driver.pl";
+
+my $USAGE = <<'END';
+Usage:
+  tap-driver --test-name=NAME --log-file=PATH --trs-file=PATH
+             [--expect-failure={yes|no}] [--color-tests={yes|no}]
+             [--enable-hard-errors={yes|no}] [--ignore-exit]
+             [--diagnostic-string=STRING] [--merge|--no-merge]
+             [--comments|--no-comments] [--] TEST-COMMAND
+The `--test-name', `--log-file' and `--trs-file' options are mandatory.
+END
+
+my $HELP = "$ME: TAP-aware test driver for Automake testsuite harness." .
+           "\n" . $USAGE;
+
+# Keep this in sync with `lib/am/check.am:$(am__tty_colors)'.
+my %COLOR = (
+  red => "\e[0;31m",
+  grn => "\e[0;32m",
+  lgn => "\e[1;32m",
+  blu => "\e[1;34m",
+  mgn => "\e[0;35m",
+  brg => "\e[1m",
+  std => "\e[m",
+);
+
+# It's important that NO_PLAN evaluates "false" as a boolean.
+use constant NO_PLAN => 0;
+use constant EARLY_PLAN => 1;
+use constant LATE_PLAN => 2;
+
+# ------------------- #
+#  Global variables.  #
+# ------------------- #
+
+my $testno = 0;     # Number of test results seen so far.
+my $bailed_out = 0; # Whether a "Bail out!" directive has been seen.
+my $parser;         # TAP parser object (will be initialized later).
+
+# Whether the TAP plan has been seen or not, and if yes, which kind
+# it is ("early" is seen before any test result, "late" otherwise).
+my $plan_seen = NO_PLAN;
+
+# ----------------- #
+#  Option parsing.  #
+# ----------------- #
+
+my %cfg = (
+  "color-tests" => 0,
+  "expect-failure" => 0,
+  "merge" => 0,
+  "comments" => 0,
+  "ignore-exit" => 0,
+);
+
+my $test_script_name = undef;
+my $log_file = undef;
+my $trs_file = undef;
+my $diag_string = "#";
+
+Getopt::Long::GetOptions
+  (
+    'help' => sub { print $HELP; exit 0; },
+    'version' => sub { print "$ME $VERSION\n"; exit 0; },
+    'test-name=s' => \$test_script_name,
+    'log-file=s' => \$log_file,
+    'trs-file=s' => \$trs_file,
+    'color-tests=s'  => \&bool_opt,
+    'expect-failure=s'  => \&bool_opt,
+    'enable-hard-errors=s' => sub {}, # No-op.
+    'diagnostic-string=s' => \$diag_string,
+    'comments' => sub { $cfg{"comments"} = 1; },
+    'no-comments' => sub { $cfg{"comments"} = 0; },
+    'merge' => sub { $cfg{"merge"} = 1; },
+    'no-merge' => sub { $cfg{"merge"} = 0; },
+    'ignore-exit' => sub { $cfg{"ignore-exit"} = 1; },
+  ) or exit 1;
+
+# ------------- #
+#  Prototypes.  #
+# ------------- #
+
+sub add_test_result ($);
+sub bool_opt ($$);
+sub colored ($$);
+sub copy_in_global_log ();
+sub decorate_result ($);
+sub extract_tap_comment ($);
+sub finish ();
+sub get_global_test_result ();
+sub get_test_exit_message ();
+sub get_test_results ();
+sub handle_tap_bailout ($);
+sub handle_tap_plan ($);
+sub handle_tap_result ($);
+sub is_null_string ($);
+sub main (@);
+sub must_recheck ();
+sub report ($;$);
+sub setup_io ();
+sub setup_parser (@);
+sub stringify_result_obj ($);
+sub testsuite_error ($);
+sub trap_perl_warnings_and_errors ();
+sub write_test_results ();
+sub yn ($);
+
+# -------------- #
+#  Subroutines.  #
+# -------------- #
+
+sub bool_opt ($$)
+{
+  my ($opt, $val) = @_;
+  if ($val =~ /^(?:y|yes)\z/i)
+    {
+      $cfg{$opt} = 1;
+    }
+  elsif ($val =~ /^(?:n|no)\z/i)
+    {
+      $cfg{$opt} = 0;
+    }
+  else
+    {
+      die "$ME: invalid argument '$val' for option '$opt'\n";
+    }
+}
+
+# If the given string is undefined or empty, return true, otherwise
+# return false.  This function is useful to avoid pitfalls like:
+#   if ($message) { print "$message\n"; }
+# which wouldn't print anything if $message is the literal "0".
+sub is_null_string ($)
+{
+  my $str = shift;
+  return ! (defined $str and length $str);
+}
+
+# Convert a boolean to a "yes"/"no" string.
+sub yn ($)
+{
+  my $bool = shift;
+  return $bool ? "yes" : "no";
+}
+
+TEST_RESULTS :
+{
+  my (@test_results_list, %test_results_seen);
+
+  sub add_test_result ($)
+  {
+    my $res = shift;
+    push @test_results_list, $res;
+    $test_results_seen{$res} = 1;
+  }
+
+  sub get_test_results ()
+  {
+    return @test_results_list;
+  }
+
+  # Whether the test script should be re-run by "make recheck".
+  sub must_recheck ()
+  {
+    return grep { !/^(?:XFAIL|PASS|SKIP)$/ } (keys %test_results_seen);
+  }
+
+  # Whether the content of the log file associated to this test should
+  # be copied into the "global" test-suite.log.
+  sub copy_in_global_log ()
+  {
+    return grep { not $_ eq "PASS" } (keys %test_results_seen);
+  }
+
+  # FIXME: this can certainly be improved ...
+  sub get_global_test_result ()
+  {
+    return "ERROR"
+      if $test_results_seen{"ERROR"};
+    return "FAIL"
+      if $test_results_seen{"FAIL"} || $test_results_seen{"XPASS"};
+    return "SKIP"
+      if scalar keys %test_results_seen == 1 && $test_results_seen{"SKIP"};
+    return "PASS";
+  }
+
+}
+
+sub write_test_results ()
+{
+  open RES, ">", $trs_file or die "$ME: opening $trs_file: $!\n";
+  print RES ":global-test-result: " . get_global_test_result . "\n";
+  print RES ":recheck: " . yn (must_recheck) . "\n";
+  print RES ":copy-in-global-log: " . yn (copy_in_global_log) . "\n";
+  foreach my $result (get_test_results)
+    {
+      print RES ":test-result: $result\n";
+    }
+  close RES or die "$ME: closing $trs_file: $!\n";
+}
+
+sub trap_perl_warnings_and_errors ()
+{
+  $SIG{__WARN__} = $SIG{__DIE__} = sub
+    {
+      # Be sure to send the warning/error message to the original stderr
+      # (presumably the console), not into the log file.
+      open STDERR, ">&OLDERR";
+      die @_;
+    }
+}
+
+sub setup_io ()
+{
+  # Redirect stderr and stdout to a temporary log file.  Save the
+  # original stdout stream, since we need it to print testsuite
+  # progress output. Save original stderr stream, so that we can
+  # redirect warning and error messages from perl there.
+  open LOG, ">", $log_file or die "$ME: opening $log_file: $!\n";
+  open OLDOUT, ">&STDOUT" or die "$ME: duplicating stdout: $!\n";
+  open OLDERR, ">&STDERR" or die "$ME: duplicating stdout: $!\n";
+  *OLDERR = *OLDERR; # To pacify a "used only once" warning.
+  trap_perl_warnings_and_errors;
+  open STDOUT, ">&LOG" or die "$ME: redirecting stdout: $!\n";
+  open STDERR, ">&LOG" or die "$ME: redirecting stderr: $!\n";
+}
+
+sub setup_parser (@)
+{
+  local $@ = '';
+  eval { $parser = TAP::Parser->new ({exec => \@_, merge => $cfg{merge}}) };
+  if ($@ ne '')
+    {
+      # Don't use the error message in $@ as set by TAP::Parser, since
+      # currently it's both too generic (at the point of being basically
+      # useless) and quite long.
+      report "ERROR", "- couldn't execute test script";
+      finish;
+    }
+}
+
+sub get_test_exit_message ()
+{
+  my $wstatus = $parser->wait;
+  # Watch out for possible internal errors.
+  die "$ME: couldn't get the exit status of the TAP producer"
+    unless defined $wstatus;
+  # Return an undefined value if the producer exited with success.
+  return unless $wstatus;
+  # Otherwise, determine whether it exited with error or was terminated
+  # by a signal.
+  use POSIX qw (WIFEXITED WEXITSTATUS WIFSIGNALED WTERMSIG);
+  if (WIFEXITED ($wstatus))
+  {
+    return sprintf "exited with status %d", WEXITSTATUS ($wstatus);
+  }
+  elsif (WIFSIGNALED ($wstatus))
+    {
+      return sprintf "terminated by signal %d", WTERMSIG ($wstatus);
+    }
+  else
+    {
+      return "terminated abnormally";
+    }
+}
+
+sub stringify_result_obj ($)
+{
+  my $result_obj = shift;
+  my $COOKED_PASS = $cfg{"expect-failure"} ? "XPASS": "PASS";
+  my $COOKED_FAIL = $cfg{"expect-failure"} ? "XFAIL": "FAIL";
+  if ($result_obj->is_unplanned || $result_obj->number != $testno)
+    {
+      return "ERROR";
+    }
+  elsif ($plan_seen == LATE_PLAN)
+    {
+      return "ERROR";
+    }
+  elsif (!$result_obj->directive)
+    {
+      return $result_obj->is_ok ? $COOKED_PASS: $COOKED_FAIL;
+    }
+  elsif ($result_obj->has_todo)
+    {
+      return $result_obj->is_actual_ok ? "XPASS" : "XFAIL";
+    }
+  elsif ($result_obj->has_skip)
+    {
+      return $result_obj->is_ok ? "SKIP" : $COOKED_FAIL;
+    }
+  die "$ME: INTERNAL ERROR"; # NOTREACHED
+}
+
+sub colored ($$)
+{
+  my ($color_name, $text) = @_;
+  return $COLOR{$color_name} . $text . $COLOR{'std'};
+}
+
+sub decorate_result ($)
+{
+  my $result = shift;
+  return $result unless $cfg{"color-tests"};
+  my %color_for_result =
+    (
+      "ERROR" => 'mgn',
+      "PASS"  => 'grn',
+      "XPASS" => 'red',
+      "FAIL"  => 'red',
+      "XFAIL" => 'lgn',
+      "SKIP"  => 'blu',
+    );
+  if (my $color = $color_for_result{$result})
+    {
+      return colored ($color, $result);
+    }
+  else
+    {
+      return $result; # Don't colorize unknown stuff.
+    }
+}
+
+sub report ($;$)
+{
+  my ($msg, $result, $explanation) = (undef, @_);
+  if ($result =~ /^(?:X?(?:PASS|FAIL)|SKIP|ERROR)/)
+    {
+      $msg = ": $test_script_name";
+      add_test_result $result;
+    }
+  elsif ($result eq "#")
+    {
+      $msg = " $test_script_name:";
+    }
+  else
+    {
+      die "$ME: INTERNAL ERROR"; # NOTREACHED
+    }
+  $msg .= " $explanation" if defined $explanation;
+  $msg .= "\n";
+  # Output on console might be colorized.
+  print OLDOUT decorate_result ($result) . $msg;
+  # Log the result in the log file too, to help debugging (this is
+  # especially true when said result is a TAP error or "Bail out!").
+  print $result . $msg;
+}
+
+sub testsuite_error ($)
+{
+  report "ERROR", "- $_[0]";
+}
+
+sub handle_tap_result ($)
+{
+  $testno++;
+  my $result_obj = shift;
+
+  my $test_result = stringify_result_obj $result_obj;
+  my $string = $result_obj->number;
+
+  my $description = $result_obj->description;
+  $string .= " $description"
+    unless is_null_string $description;
+
+  if ($plan_seen == LATE_PLAN)
+    {
+      $string .= " # AFTER LATE PLAN";
+    }
+  elsif ($result_obj->is_unplanned)
+    {
+      $string .= " # UNPLANNED";
+    }
+  elsif ($result_obj->number != $testno)
+    {
+      $string .= " # OUT-OF-ORDER (expecting $testno)";
+    }
+  elsif (my $directive = $result_obj->directive)
+    {
+      $string .= " # $directive";
+      my $explanation = $result_obj->explanation;
+      $string .= " $explanation"
+        unless is_null_string $explanation;
+    }
+
+  report $test_result, $string;
+}
+
+sub handle_tap_plan ($)
+{
+  my $plan = shift;
+  if ($plan_seen)
+    {
+      # Error, only one plan per stream is acceptable.
+      testsuite_error "multiple test plans";
+      return;
+    }
+  # The TAP plan can come before or after *all* the TAP results; we speak
+  # respectively of an "early" or a "late" plan.  If we see the plan line
+  # after at least one TAP result has been seen, assume we have a late
+  # plan; in this case, any further test result seen after the plan will
+  # be flagged as an error.
+  $plan_seen = ($testno >= 1 ? LATE_PLAN : EARLY_PLAN);
+  # If $testno > 0, we have an error ("too many tests run") that will be
+  # automatically dealt with later, so don't worry about it here.  If
+  # $plan_seen is true, we have an error due to a repeated plan, and that
+  # has already been dealt with above.  Otherwise, we have a valid "plan
+  # with SKIP" specification, and should report it as a particular kind
+  # of SKIP result.
+  if ($plan->directive && $testno == 0)
+    {
+      my $explanation = is_null_string ($plan->explanation) ?
+                        undef : "- " . $plan->explanation;
+      report "SKIP", $explanation;
+    }
+}
+
+sub handle_tap_bailout ($)
+{
+  my ($bailout, $msg) = ($_[0], "Bail out!");
+  $bailed_out = 1;
+  $msg .= " " . $bailout->explanation
+    unless is_null_string $bailout->explanation;
+  testsuite_error $msg;
+}
+
+sub extract_tap_comment ($)
+{
+  my $line = shift;
+  if (index ($line, $diag_string) == 0)
+    {
+      # Strip leading `$diag_string' from `$line'.
+      $line = substr ($line, length ($diag_string));
+      # And strip any leading and trailing whitespace left.
+      $line =~ s/(?:^\s*|\s*$)//g;
+      # Return what is left (if any).
+      return $line;
+    }
+  return "";
+}
+
+sub finish ()
+{
+  write_test_results;
+  close LOG or die "$ME: closing $log_file: $!\n";
+  exit 0;
+}
+
+sub main (@)
+{
+  setup_io;
+  setup_parser @_;
+
+  while (defined (my $cur = $parser->next))
+    {
+      # Verbatim copy any input line into the log file.
+      print $cur->raw . "\n";
+      # Parsing of TAP input should stop after a "Bail out!" directive.
+      next if $bailed_out;
+
+      if ($cur->is_plan)
+        {
+          handle_tap_plan ($cur);
+        }
+      elsif ($cur->is_test)
+        {
+          handle_tap_result ($cur);
+        }
+      elsif ($cur->is_bailout)
+        {
+          handle_tap_bailout ($cur);
+        }
+      elsif ($cfg{comments})
+        {
+          my $comment = extract_tap_comment ($cur->raw);
+          report "#", "$comment" if length $comment;
+       }
+    }
+  # A "Bail out!" directive should cause us to ignore any following TAP
+  # error, as well as a non-zero exit status from the TAP producer.
+  if (!$bailed_out)
+    {
+      if (!$plan_seen)
+        {
+          testsuite_error "missing test plan";
+        }
+      elsif ($parser->tests_planned != $parser->tests_run)
+        {
+          my ($planned, $run) = ($parser->tests_planned, $parser->tests_run);
+          my $bad_amount = $run > $planned ? "many" : "few";
+          testsuite_error (sprintf "too %s tests run (expected %d, got %d)",
+                                   $bad_amount, $planned, $run);
+        }
+      if (!$cfg{"ignore-exit"})
+        {
+          my $msg = get_test_exit_message ();
+          testsuite_error $msg if $msg;
+        }
+    }
+  finish;
+}
+
+# ----------- #
+#  Main code. #
+# ----------- #
+
+main @ARGV;
+
+# Local Variables:
+# perl-indent-level: 2
+# perl-continued-statement-offset: 2
+# perl-continued-brace-offset: 0
+# perl-brace-offset: 0
+# perl-brace-imaginary-offset: 0
+# perl-label-offset: -2
+# cperl-indent-level: 2
+# cperl-brace-offset: 0
+# cperl-continued-brace-offset: 0
+# cperl-label-offset: -2
+# cperl-extra-newline-before-brace: t
+# cperl-merge-trailing-else: nil
+# cperl-continued-statement-offset: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "my $VERSION = "
+# time-stamp-format: "'%:y-%02m-%02d.%02H'"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.sh b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.sh
new file mode 100755
index 0000000..ee61fc1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/tap-driver.sh
@@ -0,0 +1,651 @@
+#! /bin/sh
+# Copyright (C) 2011-2013 Free Software Foundation, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+scriptversion=2013-12-23.17; # UTC
+
+# Make unconditional expansion of undefined variables an error.  This
+# helps a lot in preventing typo-related bugs.
+set -u
+
+me=tap-driver.sh
+
+fatal ()
+{
+  echo "$me: fatal: $*" >&2
+  exit 1
+}
+
+usage_error ()
+{
+  echo "$me: $*" >&2
+  print_usage >&2
+  exit 2
+}
+
+print_usage ()
+{
+  cat <<END
+Usage:
+  tap-driver.sh --test-name=NAME --log-file=PATH --trs-file=PATH
+                [--expect-failure={yes|no}] [--color-tests={yes|no}]
+                [--enable-hard-errors={yes|no}] [--ignore-exit]
+                [--diagnostic-string=STRING] [--merge|--no-merge]
+                [--comments|--no-comments] [--] TEST-COMMAND
+The '--test-name', '-log-file' and '--trs-file' options are mandatory.
+END
+}
+
+# TODO: better error handling in option parsing (in particular, ensure
+# TODO: $log_file, $trs_file and $test_name are defined).
+test_name= # Used for reporting.
+log_file=  # Where to save the result and output of the test script.
+trs_file=  # Where to save the metadata of the test run.
+expect_failure=0
+color_tests=0
+merge=0
+ignore_exit=0
+comments=0
+diag_string='#'
+while test $# -gt 0; do
+  case $1 in
+  --help) print_usage; exit $?;;
+  --version) echo "$me $scriptversion"; exit $?;;
+  --test-name) test_name=$2; shift;;
+  --log-file) log_file=$2; shift;;
+  --trs-file) trs_file=$2; shift;;
+  --color-tests) color_tests=$2; shift;;
+  --expect-failure) expect_failure=$2; shift;;
+  --enable-hard-errors) shift;; # No-op.
+  --merge) merge=1;;
+  --no-merge) merge=0;;
+  --ignore-exit) ignore_exit=1;;
+  --comments) comments=1;;
+  --no-comments) comments=0;;
+  --diagnostic-string) diag_string=$2; shift;;
+  --) shift; break;;
+  -*) usage_error "invalid option: '$1'";;
+  esac
+  shift
+done
+
+test $# -gt 0 || usage_error "missing test command"
+
+case $expect_failure in
+  yes) expect_failure=1;;
+    *) expect_failure=0;;
+esac
+
+if test $color_tests = yes; then
+  init_colors='
+    color_map["red"]="" # Red.
+    color_map["grn"]="" # Green.
+    color_map["lgn"]="" # Light green.
+    color_map["blu"]="" # Blue.
+    color_map["mgn"]="" # Magenta.
+    color_map["std"]=""     # No color.
+    color_for_result["ERROR"] = "mgn"
+    color_for_result["PASS"]  = "grn"
+    color_for_result["XPASS"] = "red"
+    color_for_result["FAIL"]  = "red"
+    color_for_result["XFAIL"] = "lgn"
+    color_for_result["SKIP"]  = "blu"'
+else
+  init_colors=''
+fi
+
+# :; is there to work around a bug in bash 3.2 (and earlier) which
+# does not always set '$?' properly on redirection failure.
+# See the Autoconf manual for more details.
+:;{
+  (
+    # Ignore common signals (in this subshell only!), to avoid potential
+    # problems with Korn shells.  Some Korn shells are known to propagate
+    # to themselves signals that have killed a child process they were
+    # waiting for; this is done at least for SIGINT (and usually only for
+    # it, in truth).  Without the `trap' below, such a behaviour could
+    # cause a premature exit in the current subshell, e.g., in case the
+    # test command it runs gets terminated by a SIGINT.  Thus, the awk
+    # script we are piping into would never seen the exit status it
+    # expects on its last input line (which is displayed below by the
+    # last `echo $?' statement), and would thus die reporting an internal
+    # error.
+    # For more information, see the Autoconf manual and the threads:
+    # <http://lists.gnu.org/archive/html/bug-autoconf/2011-09/msg00004.html>
+    # <http://mail.opensolaris.org/pipermail/ksh93-integration-discuss/2009-February/004121.html>
+    trap : 1 3 2 13 15
+    if test $merge -gt 0; then
+      exec 2>&1
+    else
+      exec 2>&3
+    fi
+    "$@"
+    echo $?
+  ) | LC_ALL=C ${AM_TAP_AWK-awk} \
+        -v me="$me" \
+        -v test_script_name="$test_name" \
+        -v log_file="$log_file" \
+        -v trs_file="$trs_file" \
+        -v expect_failure="$expect_failure" \
+        -v merge="$merge" \
+        -v ignore_exit="$ignore_exit" \
+        -v comments="$comments" \
+        -v diag_string="$diag_string" \
+'
+# TODO: the usages of "cat >&3" below could be optimized when using
+#       GNU awk, and/on on systems that supports /dev/fd/.
+
+# Implementation note: in what follows, `result_obj` will be an
+# associative array that (partly) simulates a TAP result object
+# from the `TAP::Parser` perl module.
+
+## ----------- ##
+##  FUNCTIONS  ##
+## ----------- ##
+
+function fatal(msg)
+{
+  print me ": " msg | "cat >&2"
+  exit 1
+}
+
+function abort(where)
+{
+  fatal("internal error " where)
+}
+
+# Convert a boolean to a "yes"/"no" string.
+function yn(bool)
+{
+  return bool ? "yes" : "no";
+}
+
+function add_test_result(result)
+{
+  if (!test_results_index)
+    test_results_index = 0
+  test_results_list[test_results_index] = result
+  test_results_index += 1
+  test_results_seen[result] = 1;
+}
+
+# Whether the test script should be re-run by "make recheck".
+function must_recheck()
+{
+  for (k in test_results_seen)
+    if (k != "XFAIL" && k != "PASS" && k != "SKIP")
+      return 1
+  return 0
+}
+
+# Whether the content of the log file associated to this test should
+# be copied into the "global" test-suite.log.
+function copy_in_global_log()
+{
+  for (k in test_results_seen)
+    if (k != "PASS")
+      return 1
+  return 0
+}
+
+function get_global_test_result()
+{
+    if ("ERROR" in test_results_seen)
+      return "ERROR"
+    if ("FAIL" in test_results_seen || "XPASS" in test_results_seen)
+      return "FAIL"
+    all_skipped = 1
+    for (k in test_results_seen)
+      if (k != "SKIP")
+        all_skipped = 0
+    if (all_skipped)
+      return "SKIP"
+    return "PASS";
+}
+
+function stringify_result_obj(result_obj)
+{
+  if (result_obj["is_unplanned"] || result_obj["number"] != testno)
+    return "ERROR"
+
+  if (plan_seen == LATE_PLAN)
+    return "ERROR"
+
+  if (result_obj["directive"] == "TODO")
+    return result_obj["is_ok"] ? "XPASS" : "XFAIL"
+
+  if (result_obj["directive"] == "SKIP")
+    return result_obj["is_ok"] ? "SKIP" : COOKED_FAIL;
+
+  if (length(result_obj["directive"]))
+      abort("in function stringify_result_obj()")
+
+  return result_obj["is_ok"] ? COOKED_PASS : COOKED_FAIL
+}
+
+function decorate_result(result)
+{
+  color_name = color_for_result[result]
+  if (color_name)
+    return color_map[color_name] "" result "" color_map["std"]
+  # If we are not using colorized output, or if we do not know how
+  # to colorize the given result, we should return it unchanged.
+  return result
+}
+
+function report(result, details)
+{
+  if (result ~ /^(X?(PASS|FAIL)|SKIP|ERROR)/)
+    {
+      msg = ": " test_script_name
+      add_test_result(result)
+    }
+  else if (result == "#")
+    {
+      msg = " " test_script_name ":"
+    }
+  else
+    {
+      abort("in function report()")
+    }
+  if (length(details))
+    msg = msg " " details
+  # Output on console might be colorized.
+  print decorate_result(result) msg
+  # Log the result in the log file too, to help debugging (this is
+  # especially true when said result is a TAP error or "Bail out!").
+  print result msg | "cat >&3";
+}
+
+function testsuite_error(error_message)
+{
+  report("ERROR", "- " error_message)
+}
+
+function handle_tap_result()
+{
+  details = result_obj["number"];
+  if (length(result_obj["description"]))
+    details = details " " result_obj["description"]
+
+  if (plan_seen == LATE_PLAN)
+    {
+      details = details " # AFTER LATE PLAN";
+    }
+  else if (result_obj["is_unplanned"])
+    {
+       details = details " # UNPLANNED";
+    }
+  else if (result_obj["number"] != testno)
+    {
+       details = sprintf("%s # OUT-OF-ORDER (expecting %d)",
+                         details, testno);
+    }
+  else if (result_obj["directive"])
+    {
+      details = details " # " result_obj["directive"];
+      if (length(result_obj["explanation"]))
+        details = details " " result_obj["explanation"]
+    }
+
+  report(stringify_result_obj(result_obj), details)
+}
+
+# `skip_reason` should be empty whenever planned > 0.
+function handle_tap_plan(planned, skip_reason)
+{
+  planned += 0 # Avoid getting confused if, say, `planned` is "00"
+  if (length(skip_reason) && planned > 0)
+    abort("in function handle_tap_plan()")
+  if (plan_seen)
+    {
+      # Error, only one plan per stream is acceptable.
+      testsuite_error("multiple test plans")
+      return;
+    }
+  planned_tests = planned
+  # The TAP plan can come before or after *all* the TAP results; we speak
+  # respectively of an "early" or a "late" plan.  If we see the plan line
+  # after at least one TAP result has been seen, assume we have a late
+  # plan; in this case, any further test result seen after the plan will
+  # be flagged as an error.
+  plan_seen = (testno >= 1 ? LATE_PLAN : EARLY_PLAN)
+  # If testno > 0, we have an error ("too many tests run") that will be
+  # automatically dealt with later, so do not worry about it here.  If
+  # $plan_seen is true, we have an error due to a repeated plan, and that
+  # has already been dealt with above.  Otherwise, we have a valid "plan
+  # with SKIP" specification, and should report it as a particular kind
+  # of SKIP result.
+  if (planned == 0 && testno == 0)
+    {
+      if (length(skip_reason))
+        skip_reason = "- "  skip_reason;
+      report("SKIP", skip_reason);
+    }
+}
+
+function extract_tap_comment(line)
+{
+  if (index(line, diag_string) == 1)
+    {
+      # Strip leading `diag_string` from `line`.
+      line = substr(line, length(diag_string) + 1)
+      # And strip any leading and trailing whitespace left.
+      sub("^[ \t]*", "", line)
+      sub("[ \t]*$", "", line)
+      # Return what is left (if any).
+      return line;
+    }
+  return "";
+}
+
+# When this function is called, we know that line is a TAP result line,
+# so that it matches the (perl) RE "^(not )?ok\b".
+function setup_result_obj(line)
+{
+  # Get the result, and remove it from the line.
+  result_obj["is_ok"] = (substr(line, 1, 2) == "ok" ? 1 : 0)
+  sub("^(not )?ok[ \t]*", "", line)
+
+  # If the result has an explicit number, get it and strip it; otherwise,
+  # automatically assing the next progresive number to it.
+  if (line ~ /^[0-9]+$/ || line ~ /^[0-9]+[^a-zA-Z0-9_]/)
+    {
+      match(line, "^[0-9]+")
+      # The final `+ 0` is to normalize numbers with leading zeros.
+      result_obj["number"] = substr(line, 1, RLENGTH) + 0
+      line = substr(line, RLENGTH + 1)
+    }
+  else
+    {
+      result_obj["number"] = testno
+    }
+
+  if (plan_seen == LATE_PLAN)
+    # No further test results are acceptable after a "late" TAP plan
+    # has been seen.
+    result_obj["is_unplanned"] = 1
+  else if (plan_seen && testno > planned_tests)
+    result_obj["is_unplanned"] = 1
+  else
+    result_obj["is_unplanned"] = 0
+
+  # Strip trailing and leading whitespace.
+  sub("^[ \t]*", "", line)
+  sub("[ \t]*$", "", line)
+
+  # This will have to be corrected if we have a "TODO"/"SKIP" directive.
+  result_obj["description"] = line
+  result_obj["directive"] = ""
+  result_obj["explanation"] = ""
+
+  if (index(line, "#") == 0)
+    return # No possible directive, nothing more to do.
+
+  # Directives are case-insensitive.
+  rx = "[ \t]*#[ \t]*([tT][oO][dD][oO]|[sS][kK][iI][pP])[ \t]*"
+
+  # See whether we have the directive, and if yes, where.
+  pos = match(line, rx "$")
+  if (!pos)
+    pos = match(line, rx "[^a-zA-Z0-9_]")
+
+  # If there was no TAP directive, we have nothing more to do.
+  if (!pos)
+    return
+
+  # Let`s now see if the TAP directive has been escaped.  For example:
+  #  escaped:     ok \# SKIP
+  #  not escaped: ok \\# SKIP
+  #  escaped:     ok \\\\\# SKIP
+  #  not escaped: ok \ # SKIP
+  if (substr(line, pos, 1) == "#")
+    {
+      bslash_count = 0
+      for (i = pos; i > 1 && substr(line, i - 1, 1) == "\\"; i--)
+        bslash_count += 1
+      if (bslash_count % 2)
+        return # Directive was escaped.
+    }
+
+  # Strip the directive and its explanation (if any) from the test
+  # description.
+  result_obj["description"] = substr(line, 1, pos - 1)
+  # Now remove the test description from the line, that has been dealt
+  # with already.
+  line = substr(line, pos)
+  # Strip the directive, and save its value (normalized to upper case).
+  sub("^[ \t]*#[ \t]*", "", line)
+  result_obj["directive"] = toupper(substr(line, 1, 4))
+  line = substr(line, 5)
+  # Now get the explanation for the directive (if any), with leading
+  # and trailing whitespace removed.
+  sub("^[ \t]*", "", line)
+  sub("[ \t]*$", "", line)
+  result_obj["explanation"] = line
+}
+
+function get_test_exit_message(status)
+{
+  if (status == 0)
+    return ""
+  if (status !~ /^[1-9][0-9]*$/)
+    abort("getting exit status")
+  if (status < 127)
+    exit_details = ""
+  else if (status == 127)
+    exit_details = " (command not found?)"
+  else if (status >= 128 && status <= 255)
+    exit_details = sprintf(" (terminated by signal %d?)", status - 128)
+  else if (status > 256 && status <= 384)
+    # We used to report an "abnormal termination" here, but some Korn
+    # shells, when a child process die due to signal number n, can leave
+    # in $? an exit status of 256+n instead of the more standard 128+n.
+    # Apparently, both behaviours are allowed by POSIX (2008), so be
+    # prepared to handle them both.  See also Austing Group report ID
+    # 0000051 <http://www.austingroupbugs.net/view.php?id=51>
+    exit_details = sprintf(" (terminated by signal %d?)", status - 256)
+  else
+    # Never seen in practice.
+    exit_details = " (abnormal termination)"
+  return sprintf("exited with status %d%s", status, exit_details)
+}
+
+function write_test_results()
+{
+  print ":global-test-result: " get_global_test_result() > trs_file
+  print ":recheck: "  yn(must_recheck()) > trs_file
+  print ":copy-in-global-log: " yn(copy_in_global_log()) > trs_file
+  for (i = 0; i < test_results_index; i += 1)
+    print ":test-result: " test_results_list[i] > trs_file
+  close(trs_file);
+}
+
+BEGIN {
+
+## ------- ##
+##  SETUP  ##
+## ------- ##
+
+'"$init_colors"'
+
+# Properly initialized once the TAP plan is seen.
+planned_tests = 0
+
+COOKED_PASS = expect_failure ? "XPASS": "PASS";
+COOKED_FAIL = expect_failure ? "XFAIL": "FAIL";
+
+# Enumeration-like constants to remember which kind of plan (if any)
+# has been seen.  It is important that NO_PLAN evaluates "false" as
+# a boolean.
+NO_PLAN = 0
+EARLY_PLAN = 1
+LATE_PLAN = 2
+
+testno = 0     # Number of test results seen so far.
+bailed_out = 0 # Whether a "Bail out!" directive has been seen.
+
+# Whether the TAP plan has been seen or not, and if yes, which kind
+# it is ("early" is seen before any test result, "late" otherwise).
+plan_seen = NO_PLAN
+
+## --------- ##
+##  PARSING  ##
+## --------- ##
+
+is_first_read = 1
+
+while (1)
+  {
+    # Involutions required so that we are able to read the exit status
+    # from the last input line.
+    st = getline
+    if (st < 0) # I/O error.
+      fatal("I/O error while reading from input stream")
+    else if (st == 0) # End-of-input
+      {
+        if (is_first_read)
+          abort("in input loop: only one input line")
+        break
+      }
+    if (is_first_read)
+      {
+        is_first_read = 0
+        nextline = $0
+        continue
+      }
+    else
+      {
+        curline = nextline
+        nextline = $0
+        $0 = curline
+      }
+    # Copy any input line verbatim into the log file.
+    print | "cat >&3"
+    # Parsing of TAP input should stop after a "Bail out!" directive.
+    if (bailed_out)
+      continue
+
+    # TAP test result.
+    if ($0 ~ /^(not )?ok$/ || $0 ~ /^(not )?ok[^a-zA-Z0-9_]/)
+      {
+        testno += 1
+        setup_result_obj($0)
+        handle_tap_result()
+      }
+    # TAP plan (normal or "SKIP" without explanation).
+    else if ($0 ~ /^1\.\.[0-9]+[ \t]*$/)
+      {
+        # The next two lines will put the number of planned tests in $0.
+        sub("^1\\.\\.", "")
+        sub("[^0-9]*$", "")
+        handle_tap_plan($0, "")
+        continue
+      }
+    # TAP "SKIP" plan, with an explanation.
+    else if ($0 ~ /^1\.\.0+[ \t]*#/)
+      {
+        # The next lines will put the skip explanation in $0, stripping
+        # any leading and trailing whitespace.  This is a little more
+        # tricky in truth, since we want to also strip a potential leading
+        # "SKIP" string from the message.
+        sub("^[^#]*#[ \t]*(SKIP[: \t][ \t]*)?", "")
+        sub("[ \t]*$", "");
+        handle_tap_plan(0, $0)
+      }
+    # "Bail out!" magic.
+    # Older versions of prove and TAP::Harness (e.g., 3.17) did not
+    # recognize a "Bail out!" directive when preceded by leading
+    # whitespace, but more modern versions (e.g., 3.23) do.  So we
+    # emulate the latter, "more modern" behaviour.
+    else if ($0 ~ /^[ \t]*Bail out!/)
+      {
+        bailed_out = 1
+        # Get the bailout message (if any), with leading and trailing
+        # whitespace stripped.  The message remains stored in `$0`.
+        sub("^[ \t]*Bail out![ \t]*", "");
+        sub("[ \t]*$", "");
+        # Format the error message for the
+        bailout_message = "Bail out!"
+        if (length($0))
+          bailout_message = bailout_message " " $0
+        testsuite_error(bailout_message)
+      }
+    # Maybe we have too look for dianogtic comments too.
+    else if (comments != 0)
+      {
+        comment = extract_tap_comment($0);
+        if (length(comment))
+          report("#", comment);
+      }
+  }
+
+## -------- ##
+##  FINISH  ##
+## -------- ##
+
+# A "Bail out!" directive should cause us to ignore any following TAP
+# error, as well as a non-zero exit status from the TAP producer.
+if (!bailed_out)
+  {
+    if (!plan_seen)
+      {
+        testsuite_error("missing test plan")
+      }
+    else if (planned_tests != testno)
+      {
+        bad_amount = testno > planned_tests ? "many" : "few"
+        testsuite_error(sprintf("too %s tests run (expected %d, got %d)",
+                                bad_amount, planned_tests, testno))
+      }
+    if (!ignore_exit)
+      {
+        # Fetch exit status from the last line.
+        exit_message = get_test_exit_message(nextline)
+        if (exit_message)
+          testsuite_error(exit_message)
+      }
+  }
+
+write_test_results()
+
+exit 0
+
+} # End of "BEGIN" block.
+'
+
+# TODO: document that we consume the file descriptor 3 :-(
+} 3>"$log_file"
+
+test $? -eq 0 || fatal "I/O or internal error"
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/test-driver b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/test-driver
new file mode 100755
index 0000000..d306056
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/test-driver
@@ -0,0 +1,139 @@
+#! /bin/sh
+# test-driver - basic testsuite driver script.
+
+scriptversion=2013-07-13.22; # UTC
+
+# Copyright (C) 2011-2013 Free Software Foundation, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+# Make unconditional expansion of undefined variables an error.  This
+# helps a lot in preventing typo-related bugs.
+set -u
+
+usage_error ()
+{
+  echo "$0: $*" >&2
+  print_usage >&2
+  exit 2
+}
+
+print_usage ()
+{
+  cat <<END
+Usage:
+  test-driver --test-name=NAME --log-file=PATH --trs-file=PATH
+              [--expect-failure={yes|no}] [--color-tests={yes|no}]
+              [--enable-hard-errors={yes|no}] [--]
+              TEST-SCRIPT [TEST-SCRIPT-ARGUMENTS]
+The '--test-name', '--log-file' and '--trs-file' options are mandatory.
+END
+}
+
+test_name= # Used for reporting.
+log_file=  # Where to save the output of the test script.
+trs_file=  # Where to save the metadata of the test run.
+expect_failure=no
+color_tests=no
+enable_hard_errors=yes
+while test $# -gt 0; do
+  case $1 in
+  --help) print_usage; exit $?;;
+  --version) echo "test-driver $scriptversion"; exit $?;;
+  --test-name) test_name=$2; shift;;
+  --log-file) log_file=$2; shift;;
+  --trs-file) trs_file=$2; shift;;
+  --color-tests) color_tests=$2; shift;;
+  --expect-failure) expect_failure=$2; shift;;
+  --enable-hard-errors) enable_hard_errors=$2; shift;;
+  --) shift; break;;
+  -*) usage_error "invalid option: '$1'";;
+   *) break;;
+  esac
+  shift
+done
+
+missing_opts=
+test x"$test_name" = x && missing_opts="$missing_opts --test-name"
+test x"$log_file"  = x && missing_opts="$missing_opts --log-file"
+test x"$trs_file"  = x && missing_opts="$missing_opts --trs-file"
+if test x"$missing_opts" != x; then
+  usage_error "the following mandatory options are missing:$missing_opts"
+fi
+
+if test $# -eq 0; then
+  usage_error "missing argument"
+fi
+
+if test $color_tests = yes; then
+  # Keep this in sync with 'lib/am/check.am:$(am__tty_colors)'.
+  red='' # Red.
+  grn='' # Green.
+  lgn='' # Light green.
+  blu='' # Blue.
+  mgn='' # Magenta.
+  std=''     # No color.
+else
+  red= grn= lgn= blu= mgn= std=
+fi
+
+do_exit='rm -f $log_file $trs_file; (exit $st); exit $st'
+trap "st=129; $do_exit" 1
+trap "st=130; $do_exit" 2
+trap "st=141; $do_exit" 13
+trap "st=143; $do_exit" 15
+
+# Test script is run here.
+"$@" >$log_file 2>&1
+estatus=$?
+if test $enable_hard_errors = no && test $estatus -eq 99; then
+  estatus=1
+fi
+
+case $estatus:$expect_failure in
+  0:yes) col=$red res=XPASS recheck=yes gcopy=yes;;
+  0:*)   col=$grn res=PASS  recheck=no  gcopy=no;;
+  77:*)  col=$blu res=SKIP  recheck=no  gcopy=yes;;
+  99:*)  col=$mgn res=ERROR recheck=yes gcopy=yes;;
+  *:yes) col=$lgn res=XFAIL recheck=no  gcopy=yes;;
+  *:*)   col=$red res=FAIL  recheck=yes gcopy=yes;;
+esac
+
+# Report outcome to console.
+echo "${col}${res}${std}: $test_name"
+
+# Register the test result, and other relevant metadata.
+echo ":test-result: $res" > $trs_file
+echo ":global-test-result: $res" >> $trs_file
+echo ":recheck: $recheck" >> $trs_file
+echo ":copy-in-global-log: $gcopy" >> $trs_file
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/texinfo.tex b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/texinfo.tex
new file mode 100644
index 0000000..85f184c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/texinfo.tex
@@ -0,0 +1,10079 @@
+% texinfo.tex -- TeX macros to handle Texinfo files.
+% 
+% Load plain if necessary, i.e., if running under initex.
+\expandafter\ifx\csname fmtname\endcsname\relax\input plain\fi
+%
+\def\texinfoversion{2013-02-01.11}
+%
+% Copyright 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995,
+% 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+% 2007, 2008, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
+%
+% This texinfo.tex file is free software: you can redistribute it and/or
+% modify it under the terms of the GNU General Public License as
+% published by the Free Software Foundation, either version 3 of the
+% License, or (at your option) any later version.
+%
+% This texinfo.tex file is distributed in the hope that it will be
+% useful, but WITHOUT ANY WARRANTY; without even the implied warranty
+% of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+% General Public License for more details.
+%
+% You should have received a copy of the GNU General Public License
+% along with this program.  If not, see <http://www.gnu.org/licenses/>.
+%
+% As a special exception, when this file is read by TeX when processing
+% a Texinfo source document, you may use the result without
+% restriction. This Exception is an additional permission under section 7
+% of the GNU General Public License, version 3 ("GPLv3").
+%
+% Please try the latest version of texinfo.tex before submitting bug
+% reports; you can get the latest version from:
+%   http://ftp.gnu.org/gnu/texinfo/ (the Texinfo release area), or
+%   http://ftpmirror.gnu.org/texinfo/ (same, via a mirror), or
+%   http://www.gnu.org/software/texinfo/ (the Texinfo home page)
+% The texinfo.tex in any given distribution could well be out
+% of date, so if that's what you're using, please check.
+%
+% Send bug reports to bug-texinfo@gnu.org.  Please include including a
+% complete document in each bug report with which we can reproduce the
+% problem.  Patches are, of course, greatly appreciated.
+%
+% To process a Texinfo manual with TeX, it's most reliable to use the
+% texi2dvi shell script that comes with the distribution.  For a simple
+% manual foo.texi, however, you can get away with this:
+%   tex foo.texi
+%   texindex foo.??
+%   tex foo.texi
+%   tex foo.texi
+%   dvips foo.dvi -o  # or whatever; this makes foo.ps.
+% The extra TeX runs get the cross-reference information correct.
+% Sometimes one run after texindex suffices, and sometimes you need more
+% than two; texi2dvi does it as many times as necessary.
+%
+% It is possible to adapt texinfo.tex for other languages, to some
+% extent.  You can get the existing language-specific files from the
+% full Texinfo distribution.
+%
+% The GNU Texinfo home page is http://www.gnu.org/software/texinfo.
+
+
+\message{Loading texinfo [version \texinfoversion]:}
+
+% If in a .fmt file, print the version number
+% and turn on active characters that we couldn't do earlier because
+% they might have appeared in the input file name.
+\everyjob{\message{[Texinfo version \texinfoversion]}%
+  \catcode`+=\active \catcode`\_=\active}
+
+\chardef\other=12
+
+% We never want plain's \outer definition of \+ in Texinfo.
+% For @tex, we can use \tabalign.
+\let\+ = \relax
+
+% Save some plain tex macros whose names we will redefine.
+\let\ptexb=\b
+\let\ptexbullet=\bullet
+\let\ptexc=\c
+\let\ptexcomma=\,
+\let\ptexdot=\.
+\let\ptexdots=\dots
+\let\ptexend=\end
+\let\ptexequiv=\equiv
+\let\ptexexclam=\!
+\let\ptexfootnote=\footnote
+\let\ptexgtr=>
+\let\ptexhat=^
+\let\ptexi=\i
+\let\ptexindent=\indent
+\let\ptexinsert=\insert
+\let\ptexlbrace=\{
+\let\ptexless=<
+\let\ptexnewwrite\newwrite
+\let\ptexnoindent=\noindent
+\let\ptexplus=+
+\let\ptexraggedright=\raggedright
+\let\ptexrbrace=\}
+\let\ptexslash=\/
+\let\ptexstar=\*
+\let\ptext=\t
+\let\ptextop=\top
+{\catcode`\'=\active \global\let\ptexquoteright'}% active in plain's math mode
+
+% If this character appears in an error message or help string, it
+% starts a new line in the output.
+\newlinechar = `^^J
+
+% Use TeX 3.0's \inputlineno to get the line number, for better error
+% messages, but if we're using an old version of TeX, don't do anything.
+%
+\ifx\inputlineno\thisisundefined
+  \let\linenumber = \empty % Pre-3.0.
+\else
+  \def\linenumber{l.\the\inputlineno:\space}
+\fi
+
+% Set up fixed words for English if not already set.
+\ifx\putwordAppendix\undefined  \gdef\putwordAppendix{Appendix}\fi
+\ifx\putwordChapter\undefined   \gdef\putwordChapter{Chapter}\fi
+\ifx\putworderror\undefined     \gdef\putworderror{error}\fi
+\ifx\putwordfile\undefined      \gdef\putwordfile{file}\fi
+\ifx\putwordin\undefined        \gdef\putwordin{in}\fi
+\ifx\putwordIndexIsEmpty\undefined       \gdef\putwordIndexIsEmpty{(Index is empty)}\fi
+\ifx\putwordIndexNonexistent\undefined   \gdef\putwordIndexNonexistent{(Index is nonexistent)}\fi
+\ifx\putwordInfo\undefined      \gdef\putwordInfo{Info}\fi
+\ifx\putwordInstanceVariableof\undefined \gdef\putwordInstanceVariableof{Instance Variable of}\fi
+\ifx\putwordMethodon\undefined  \gdef\putwordMethodon{Method on}\fi
+\ifx\putwordNoTitle\undefined   \gdef\putwordNoTitle{No Title}\fi
+\ifx\putwordof\undefined        \gdef\putwordof{of}\fi
+\ifx\putwordon\undefined        \gdef\putwordon{on}\fi
+\ifx\putwordpage\undefined      \gdef\putwordpage{page}\fi
+\ifx\putwordsection\undefined   \gdef\putwordsection{section}\fi
+\ifx\putwordSection\undefined   \gdef\putwordSection{Section}\fi
+\ifx\putwordsee\undefined       \gdef\putwordsee{see}\fi
+\ifx\putwordSee\undefined       \gdef\putwordSee{See}\fi
+\ifx\putwordShortTOC\undefined  \gdef\putwordShortTOC{Short Contents}\fi
+\ifx\putwordTOC\undefined       \gdef\putwordTOC{Table of Contents}\fi
+%
+\ifx\putwordMJan\undefined \gdef\putwordMJan{January}\fi
+\ifx\putwordMFeb\undefined \gdef\putwordMFeb{February}\fi
+\ifx\putwordMMar\undefined \gdef\putwordMMar{March}\fi
+\ifx\putwordMApr\undefined \gdef\putwordMApr{April}\fi
+\ifx\putwordMMay\undefined \gdef\putwordMMay{May}\fi
+\ifx\putwordMJun\undefined \gdef\putwordMJun{June}\fi
+\ifx\putwordMJul\undefined \gdef\putwordMJul{July}\fi
+\ifx\putwordMAug\undefined \gdef\putwordMAug{August}\fi
+\ifx\putwordMSep\undefined \gdef\putwordMSep{September}\fi
+\ifx\putwordMOct\undefined \gdef\putwordMOct{October}\fi
+\ifx\putwordMNov\undefined \gdef\putwordMNov{November}\fi
+\ifx\putwordMDec\undefined \gdef\putwordMDec{December}\fi
+%
+\ifx\putwordDefmac\undefined    \gdef\putwordDefmac{Macro}\fi
+\ifx\putwordDefspec\undefined   \gdef\putwordDefspec{Special Form}\fi
+\ifx\putwordDefvar\undefined    \gdef\putwordDefvar{Variable}\fi
+\ifx\putwordDefopt\undefined    \gdef\putwordDefopt{User Option}\fi
+\ifx\putwordDeffunc\undefined   \gdef\putwordDeffunc{Function}\fi
+
+% Since the category of space is not known, we have to be careful.
+\chardef\spacecat = 10
+\def\spaceisspace{\catcode`\ =\spacecat}
+
+% sometimes characters are active, so we need control sequences.
+\chardef\ampChar   = `\&
+\chardef\colonChar = `\:
+\chardef\commaChar = `\,
+\chardef\dashChar  = `\-
+\chardef\dotChar   = `\.
+\chardef\exclamChar= `\!
+\chardef\hashChar  = `\#
+\chardef\lquoteChar= `\`
+\chardef\questChar = `\?
+\chardef\rquoteChar= `\'
+\chardef\semiChar  = `\;
+\chardef\slashChar = `\/
+\chardef\underChar = `\_
+
+% Ignore a token.
+%
+\def\gobble#1{}
+
+% The following is used inside several \edef's.
+\def\makecsname#1{\expandafter\noexpand\csname#1\endcsname}
+
+% Hyphenation fixes.
+\hyphenation{
+  Flor-i-da Ghost-script Ghost-view Mac-OS Post-Script
+  ap-pen-dix bit-map bit-maps
+  data-base data-bases eshell fall-ing half-way long-est man-u-script
+  man-u-scripts mini-buf-fer mini-buf-fers over-view par-a-digm
+  par-a-digms rath-er rec-tan-gu-lar ro-bot-ics se-vere-ly set-up spa-ces
+  spell-ing spell-ings
+  stand-alone strong-est time-stamp time-stamps which-ever white-space
+  wide-spread wrap-around
+}
+
+% Margin to add to right of even pages, to left of odd pages.
+\newdimen\bindingoffset
+\newdimen\normaloffset
+\newdimen\pagewidth \newdimen\pageheight
+
+% For a final copy, take out the rectangles
+% that mark overfull boxes (in case you have decided
+% that the text looks ok even though it passes the margin).
+%
+\def\finalout{\overfullrule=0pt }
+
+% Sometimes it is convenient to have everything in the transcript file
+% and nothing on the terminal.  We don't just call \tracingall here,
+% since that produces some useless output on the terminal.  We also make
+% some effort to order the tracing commands to reduce output in the log
+% file; cf. trace.sty in LaTeX.
+%
+\def\gloggingall{\begingroup \globaldefs = 1 \loggingall \endgroup}%
+\def\loggingall{%
+  \tracingstats2
+  \tracingpages1
+  \tracinglostchars2  % 2 gives us more in etex
+  \tracingparagraphs1
+  \tracingoutput1
+  \tracingmacros2
+  \tracingrestores1
+  \showboxbreadth\maxdimen \showboxdepth\maxdimen
+  \ifx\eTeXversion\thisisundefined\else % etex gives us more logging
+    \tracingscantokens1
+    \tracingifs1
+    \tracinggroups1
+    \tracingnesting2
+    \tracingassigns1
+  \fi
+  \tracingcommands3  % 3 gives us more in etex
+  \errorcontextlines16
+}%
+
+% @errormsg{MSG}.  Do the index-like expansions on MSG, but if things
+% aren't perfect, it's not the end of the world, being an error message,
+% after all.
+% 
+\def\errormsg{\begingroup \indexnofonts \doerrormsg}
+\def\doerrormsg#1{\errmessage{#1}}
+
+% add check for \lastpenalty to plain's definitions.  If the last thing
+% we did was a \nobreak, we don't want to insert more space.
+%
+\def\smallbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\smallskipamount
+  \removelastskip\penalty-50\smallskip\fi\fi}
+\def\medbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\medskipamount
+  \removelastskip\penalty-100\medskip\fi\fi}
+\def\bigbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\bigskipamount
+  \removelastskip\penalty-200\bigskip\fi\fi}
+
+% Do @cropmarks to get crop marks.
+%
+\newif\ifcropmarks
+\let\cropmarks = \cropmarkstrue
+%
+% Dimensions to add cropmarks at corners.
+% Added by P. A. MacKay, 12 Nov. 1986
+%
+\newdimen\outerhsize \newdimen\outervsize % set by the paper size routines
+\newdimen\cornerlong  \cornerlong=1pc
+\newdimen\cornerthick \cornerthick=.3pt
+\newdimen\topandbottommargin \topandbottommargin=.75in
+
+% Output a mark which sets \thischapter, \thissection and \thiscolor.
+% We dump everything together because we only have one kind of mark.
+% This works because we only use \botmark / \topmark, not \firstmark.
+%
+% A mark contains a subexpression of the \ifcase ... \fi construct.
+% \get*marks macros below extract the needed part using \ifcase.
+%
+% Another complication is to let the user choose whether \thischapter
+% (\thissection) refers to the chapter (section) in effect at the top
+% of a page, or that at the bottom of a page.  The solution is
+% described on page 260 of The TeXbook.  It involves outputting two
+% marks for the sectioning macros, one before the section break, and
+% one after.  I won't pretend I can describe this better than DEK...
+\def\domark{%
+  \toks0=\expandafter{\lastchapterdefs}%
+  \toks2=\expandafter{\lastsectiondefs}%
+  \toks4=\expandafter{\prevchapterdefs}%
+  \toks6=\expandafter{\prevsectiondefs}%
+  \toks8=\expandafter{\lastcolordefs}%
+  \mark{%
+                   \the\toks0 \the\toks2
+      \noexpand\or \the\toks4 \the\toks6
+    \noexpand\else \the\toks8
+  }%
+}
+% \topmark doesn't work for the very first chapter (after the title
+% page or the contents), so we use \firstmark there -- this gets us
+% the mark with the chapter defs, unless the user sneaks in, e.g.,
+% @setcolor (or @url, or @link, etc.) between @contents and the very
+% first @chapter.
+\def\gettopheadingmarks{%
+  \ifcase0\topmark\fi
+  \ifx\thischapter\empty \ifcase0\firstmark\fi \fi
+}
+\def\getbottomheadingmarks{\ifcase1\botmark\fi}
+\def\getcolormarks{\ifcase2\topmark\fi}
+
+% Avoid "undefined control sequence" errors.
+\def\lastchapterdefs{}
+\def\lastsectiondefs{}
+\def\prevchapterdefs{}
+\def\prevsectiondefs{}
+\def\lastcolordefs{}
+
+% Main output routine.
+\chardef\PAGE = 255
+\output = {\onepageout{\pagecontents\PAGE}}
+
+\newbox\headlinebox
+\newbox\footlinebox
+
+% \onepageout takes a vbox as an argument.  Note that \pagecontents
+% does insertions, but you have to call it yourself.
+\def\onepageout#1{%
+  \ifcropmarks \hoffset=0pt \else \hoffset=\normaloffset \fi
+  %
+  \ifodd\pageno  \advance\hoffset by \bindingoffset
+  \else \advance\hoffset by -\bindingoffset\fi
+  %
+  % Do this outside of the \shipout so @code etc. will be expanded in
+  % the headline as they should be, not taken literally (outputting ''code).
+  \ifodd\pageno \getoddheadingmarks \else \getevenheadingmarks \fi
+  \setbox\headlinebox = \vbox{\let\hsize=\pagewidth \makeheadline}%
+  \ifodd\pageno \getoddfootingmarks \else \getevenfootingmarks \fi
+  \setbox\footlinebox = \vbox{\let\hsize=\pagewidth \makefootline}%
+  %
+  {%
+    % Have to do this stuff outside the \shipout because we want it to
+    % take effect in \write's, yet the group defined by the \vbox ends
+    % before the \shipout runs.
+    %
+    \indexdummies         % don't expand commands in the output.
+    \normalturnoffactive  % \ in index entries must not stay \, e.g., if
+               % the page break happens to be in the middle of an example.
+               % We don't want .vr (or whatever) entries like this:
+               % \entry{{\tt \indexbackslash }acronym}{32}{\code {\acronym}}
+               % "\acronym" won't work when it's read back in;
+               % it needs to be
+               % {\code {{\tt \backslashcurfont }acronym}
+    \shipout\vbox{%
+      % Do this early so pdf references go to the beginning of the page.
+      \ifpdfmakepagedest \pdfdest name{\the\pageno} xyz\fi
+      %
+      \ifcropmarks \vbox to \outervsize\bgroup
+        \hsize = \outerhsize
+        \vskip-\topandbottommargin
+        \vtop to0pt{%
+          \line{\ewtop\hfil\ewtop}%
+          \nointerlineskip
+          \line{%
+            \vbox{\moveleft\cornerthick\nstop}%
+            \hfill
+            \vbox{\moveright\cornerthick\nstop}%
+          }%
+          \vss}%
+        \vskip\topandbottommargin
+        \line\bgroup
+          \hfil % center the page within the outer (page) hsize.
+          \ifodd\pageno\hskip\bindingoffset\fi
+          \vbox\bgroup
+      \fi
+      %
+      \unvbox\headlinebox
+      \pagebody{#1}%
+      \ifdim\ht\footlinebox > 0pt
+        % Only leave this space if the footline is nonempty.
+        % (We lessened \vsize for it in \oddfootingyyy.)
+        % The \baselineskip=24pt in plain's \makefootline has no effect.
+        \vskip 24pt
+        \unvbox\footlinebox
+      \fi
+      %
+      \ifcropmarks
+          \egroup % end of \vbox\bgroup
+        \hfil\egroup % end of (centering) \line\bgroup
+        \vskip\topandbottommargin plus1fill minus1fill
+        \boxmaxdepth = \cornerthick
+        \vbox to0pt{\vss
+          \line{%
+            \vbox{\moveleft\cornerthick\nsbot}%
+            \hfill
+            \vbox{\moveright\cornerthick\nsbot}%
+          }%
+          \nointerlineskip
+          \line{\ewbot\hfil\ewbot}%
+        }%
+      \egroup % \vbox from first cropmarks clause
+      \fi
+    }% end of \shipout\vbox
+  }% end of group with \indexdummies
+  \advancepageno
+  \ifnum\outputpenalty>-20000 \else\dosupereject\fi
+}
+
+\newinsert\margin \dimen\margin=\maxdimen
+
+\def\pagebody#1{\vbox to\pageheight{\boxmaxdepth=\maxdepth #1}}
+{\catcode`\@ =11
+\gdef\pagecontents#1{\ifvoid\topins\else\unvbox\topins\fi
+% marginal hacks, juha@viisa.uucp (Juha Takala)
+\ifvoid\margin\else % marginal info is present
+  \rlap{\kern\hsize\vbox to\z@{\kern1pt\box\margin \vss}}\fi
+\dimen@=\dp#1\relax \unvbox#1\relax
+\ifvoid\footins\else\vskip\skip\footins\footnoterule \unvbox\footins\fi
+\ifr@ggedbottom \kern-\dimen@ \vfil \fi}
+}
+
+% Here are the rules for the cropmarks.  Note that they are
+% offset so that the space between them is truly \outerhsize or \outervsize
+% (P. A. MacKay, 12 November, 1986)
+%
+\def\ewtop{\vrule height\cornerthick depth0pt width\cornerlong}
+\def\nstop{\vbox
+  {\hrule height\cornerthick depth\cornerlong width\cornerthick}}
+\def\ewbot{\vrule height0pt depth\cornerthick width\cornerlong}
+\def\nsbot{\vbox
+  {\hrule height\cornerlong depth\cornerthick width\cornerthick}}
+
+% Parse an argument, then pass it to #1.  The argument is the rest of
+% the input line (except we remove a trailing comment).  #1 should be a
+% macro which expects an ordinary undelimited TeX argument.
+%
+\def\parsearg{\parseargusing{}}
+\def\parseargusing#1#2{%
+  \def\argtorun{#2}%
+  \begingroup
+    \obeylines
+    \spaceisspace
+    #1%
+    \parseargline\empty% Insert the \empty token, see \finishparsearg below.
+}
+
+{\obeylines %
+  \gdef\parseargline#1^^M{%
+    \endgroup % End of the group started in \parsearg.
+    \argremovecomment #1\comment\ArgTerm%
+  }%
+}
+
+% First remove any @comment, then any @c comment.
+\def\argremovecomment#1\comment#2\ArgTerm{\argremovec #1\c\ArgTerm}
+\def\argremovec#1\c#2\ArgTerm{\argcheckspaces#1\^^M\ArgTerm}
+
+% Each occurrence of `\^^M' or `<space>\^^M' is replaced by a single space.
+%
+% \argremovec might leave us with trailing space, e.g.,
+%    @end itemize  @c foo
+% This space token undergoes the same procedure and is eventually removed
+% by \finishparsearg.
+%
+\def\argcheckspaces#1\^^M{\argcheckspacesX#1\^^M \^^M}
+\def\argcheckspacesX#1 \^^M{\argcheckspacesY#1\^^M}
+\def\argcheckspacesY#1\^^M#2\^^M#3\ArgTerm{%
+  \def\temp{#3}%
+  \ifx\temp\empty
+    % Do not use \next, perhaps the caller of \parsearg uses it; reuse \temp:
+    \let\temp\finishparsearg
+  \else
+    \let\temp\argcheckspaces
+  \fi
+  % Put the space token in:
+  \temp#1 #3\ArgTerm
+}
+
+% If a _delimited_ argument is enclosed in braces, they get stripped; so
+% to get _exactly_ the rest of the line, we had to prevent such situation.
+% We prepended an \empty token at the very beginning and we expand it now,
+% just before passing the control to \argtorun.
+% (Similarly, we have to think about #3 of \argcheckspacesY above: it is
+% either the null string, or it ends with \^^M---thus there is no danger
+% that a pair of braces would be stripped.
+%
+% But first, we have to remove the trailing space token.
+%
+\def\finishparsearg#1 \ArgTerm{\expandafter\argtorun\expandafter{#1}}
+
+% \parseargdef\foo{...}
+%	is roughly equivalent to
+% \def\foo{\parsearg\Xfoo}
+% \def\Xfoo#1{...}
+%
+% Actually, I use \csname\string\foo\endcsname, ie. \\foo, as it is my
+% favourite TeX trick.  --kasal, 16nov03
+
+\def\parseargdef#1{%
+  \expandafter \doparseargdef \csname\string#1\endcsname #1%
+}
+\def\doparseargdef#1#2{%
+  \def#2{\parsearg#1}%
+  \def#1##1%
+}
+
+% Several utility definitions with active space:
+{
+  \obeyspaces
+  \gdef\obeyedspace{ }
+
+  % Make each space character in the input produce a normal interword
+  % space in the output.  Don't allow a line break at this space, as this
+  % is used only in environments like @example, where each line of input
+  % should produce a line of output anyway.
+  %
+  \gdef\sepspaces{\obeyspaces\let =\tie}
+
+  % If an index command is used in an @example environment, any spaces
+  % therein should become regular spaces in the raw index file, not the
+  % expansion of \tie (\leavevmode \penalty \@M \ ).
+  \gdef\unsepspaces{\let =\space}
+}
+
+
+\def\flushcr{\ifx\par\lisppar \def\next##1{}\else \let\next=\relax \fi \next}
+
+% Define the framework for environments in texinfo.tex.  It's used like this:
+%
+%   \envdef\foo{...}
+%   \def\Efoo{...}
+%
+% It's the responsibility of \envdef to insert \begingroup before the
+% actual body; @end closes the group after calling \Efoo.  \envdef also
+% defines \thisenv, so the current environment is known; @end checks
+% whether the environment name matches.  The \checkenv macro can also be
+% used to check whether the current environment is the one expected.
+%
+% Non-false conditionals (@iftex, @ifset) don't fit into this, so they
+% are not treated as environments; they don't open a group.  (The
+% implementation of @end takes care not to call \endgroup in this
+% special case.)
+
+
+% At run-time, environments start with this:
+\def\startenvironment#1{\begingroup\def\thisenv{#1}}
+% initialize
+\let\thisenv\empty
+
+% ... but they get defined via ``\envdef\foo{...}'':
+\long\def\envdef#1#2{\def#1{\startenvironment#1#2}}
+\def\envparseargdef#1#2{\parseargdef#1{\startenvironment#1#2}}
+
+% Check whether we're in the right environment:
+\def\checkenv#1{%
+  \def\temp{#1}%
+  \ifx\thisenv\temp
+  \else
+    \badenverr
+  \fi
+}
+
+% Environment mismatch, #1 expected:
+\def\badenverr{%
+  \errhelp = \EMsimple
+  \errmessage{This command can appear only \inenvironment\temp,
+    not \inenvironment\thisenv}%
+}
+\def\inenvironment#1{%
+  \ifx#1\empty
+    outside of any environment%
+  \else
+    in environment \expandafter\string#1%
+  \fi
+}
+
+% @end foo executes the definition of \Efoo.
+% But first, it executes a specialized version of \checkenv
+%
+\parseargdef\end{%
+  \if 1\csname iscond.#1\endcsname
+  \else
+    % The general wording of \badenverr may not be ideal.
+    \expandafter\checkenv\csname#1\endcsname
+    \csname E#1\endcsname
+    \endgroup
+  \fi
+}
+
+\newhelp\EMsimple{Press RETURN to continue.}
+
+
+% Be sure we're in horizontal mode when doing a tie, since we make space
+% equivalent to this in @example-like environments. Otherwise, a space
+% at the beginning of a line will start with \penalty -- and
+% since \penalty is valid in vertical mode, we'd end up putting the
+% penalty on the vertical list instead of in the new paragraph.
+{\catcode`@ = 11
+ % Avoid using \@M directly, because that causes trouble
+ % if the definition is written into an index file.
+ \global\let\tiepenalty = \@M
+ \gdef\tie{\leavevmode\penalty\tiepenalty\ }
+}
+
+% @: forces normal size whitespace following.
+\def\:{\spacefactor=1000 }
+
+% @* forces a line break.
+\def\*{\unskip\hfil\break\hbox{}\ignorespaces}
+
+% @/ allows a line break.
+\let\/=\allowbreak
+
+% @. is an end-of-sentence period.
+\def\.{.\spacefactor=\endofsentencespacefactor\space}
+
+% @! is an end-of-sentence bang.
+\def\!{!\spacefactor=\endofsentencespacefactor\space}
+
+% @? is an end-of-sentence query.
+\def\?{?\spacefactor=\endofsentencespacefactor\space}
+
+% @frenchspacing on|off  says whether to put extra space after punctuation.
+%
+\def\onword{on}
+\def\offword{off}
+%
+\parseargdef\frenchspacing{%
+  \def\temp{#1}%
+  \ifx\temp\onword \plainfrenchspacing
+  \else\ifx\temp\offword \plainnonfrenchspacing
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @frenchspacing option `\temp', must be on|off}%
+  \fi\fi
+}
+
+% @w prevents a word break.  Without the \leavevmode, @w at the
+% beginning of a paragraph, when TeX is still in vertical mode, would
+% produce a whole line of output instead of starting the paragraph.
+\def\w#1{\leavevmode\hbox{#1}}
+
+% @group ... @end group forces ... to be all on one page, by enclosing
+% it in a TeX vbox.  We use \vtop instead of \vbox to construct the box
+% to keep its height that of a normal line.  According to the rules for
+% \topskip (p.114 of the TeXbook), the glue inserted is
+% max (\topskip - \ht (first item), 0).  If that height is large,
+% therefore, no glue is inserted, and the space between the headline and
+% the text is small, which looks bad.
+%
+% Another complication is that the group might be very large.  This can
+% cause the glue on the previous page to be unduly stretched, because it
+% does not have much material.  In this case, it's better to add an
+% explicit \vfill so that the extra space is at the bottom.  The
+% threshold for doing this is if the group is more than \vfilllimit
+% percent of a page (\vfilllimit can be changed inside of @tex).
+%
+\newbox\groupbox
+\def\vfilllimit{0.7}
+%
+\envdef\group{%
+  \ifnum\catcode`\^^M=\active \else
+    \errhelp = \groupinvalidhelp
+    \errmessage{@group invalid in context where filling is enabled}%
+  \fi
+  \startsavinginserts
+  %
+  \setbox\groupbox = \vtop\bgroup
+    % Do @comment since we are called inside an environment such as
+    % @example, where each end-of-line in the input causes an
+    % end-of-line in the output.  We don't want the end-of-line after
+    % the `@group' to put extra space in the output.  Since @group
+    % should appear on a line by itself (according to the Texinfo
+    % manual), we don't worry about eating any user text.
+    \comment
+}
+%
+% The \vtop produces a box with normal height and large depth; thus, TeX puts
+% \baselineskip glue before it, and (when the next line of text is done)
+% \lineskip glue after it.  Thus, space below is not quite equal to space
+% above.  But it's pretty close.
+\def\Egroup{%
+    % To get correct interline space between the last line of the group
+    % and the first line afterwards, we have to propagate \prevdepth.
+    \endgraf % Not \par, as it may have been set to \lisppar.
+    \global\dimen1 = \prevdepth
+  \egroup           % End the \vtop.
+  % \dimen0 is the vertical size of the group's box.
+  \dimen0 = \ht\groupbox  \advance\dimen0 by \dp\groupbox
+  % \dimen2 is how much space is left on the page (more or less).
+  \dimen2 = \pageheight   \advance\dimen2 by -\pagetotal
+  % if the group doesn't fit on the current page, and it's a big big
+  % group, force a page break.
+  \ifdim \dimen0 > \dimen2
+    \ifdim \pagetotal < \vfilllimit\pageheight
+      \page
+    \fi
+  \fi
+  \box\groupbox
+  \prevdepth = \dimen1
+  \checkinserts
+}
+%
+% TeX puts in an \escapechar (i.e., `@') at the beginning of the help
+% message, so this ends up printing `@group can only ...'.
+%
+\newhelp\groupinvalidhelp{%
+group can only be used in environments such as @example,^^J%
+where each line of input produces a line of output.}
+
+% @need space-in-mils
+% forces a page break if there is not space-in-mils remaining.
+
+\newdimen\mil  \mil=0.001in
+
+\parseargdef\need{%
+  % Ensure vertical mode, so we don't make a big box in the middle of a
+  % paragraph.
+  \par
+  %
+  % If the @need value is less than one line space, it's useless.
+  \dimen0 = #1\mil
+  \dimen2 = \ht\strutbox
+  \advance\dimen2 by \dp\strutbox
+  \ifdim\dimen0 > \dimen2
+    %
+    % Do a \strut just to make the height of this box be normal, so the
+    % normal leading is inserted relative to the preceding line.
+    % And a page break here is fine.
+    \vtop to #1\mil{\strut\vfil}%
+    %
+    % TeX does not even consider page breaks if a penalty added to the
+    % main vertical list is 10000 or more.  But in order to see if the
+    % empty box we just added fits on the page, we must make it consider
+    % page breaks.  On the other hand, we don't want to actually break the
+    % page after the empty box.  So we use a penalty of 9999.
+    %
+    % There is an extremely small chance that TeX will actually break the
+    % page at this \penalty, if there are no other feasible breakpoints in
+    % sight.  (If the user is using lots of big @group commands, which
+    % almost-but-not-quite fill up a page, TeX will have a hard time doing
+    % good page breaking, for example.)  However, I could not construct an
+    % example where a page broke at this \penalty; if it happens in a real
+    % document, then we can reconsider our strategy.
+    \penalty9999
+    %
+    % Back up by the size of the box, whether we did a page break or not.
+    \kern -#1\mil
+    %
+    % Do not allow a page break right after this kern.
+    \nobreak
+  \fi
+}
+
+% @br   forces paragraph break (and is undocumented).
+
+\let\br = \par
+
+% @page forces the start of a new page.
+%
+\def\page{\par\vfill\supereject}
+
+% @exdent text....
+% outputs text on separate line in roman font, starting at standard page margin
+
+% This records the amount of indent in the innermost environment.
+% That's how much \exdent should take out.
+\newskip\exdentamount
+
+% This defn is used inside fill environments such as @defun.
+\parseargdef\exdent{\hfil\break\hbox{\kern -\exdentamount{\rm#1}}\hfil\break}
+
+% This defn is used inside nofill environments such as @example.
+\parseargdef\nofillexdent{{\advance \leftskip by -\exdentamount
+  \leftline{\hskip\leftskip{\rm#1}}}}
+
+% @inmargin{WHICH}{TEXT} puts TEXT in the WHICH margin next to the current
+% paragraph.  For more general purposes, use the \margin insertion
+% class.  WHICH is `l' or `r'.  Not documented, written for gawk manual.
+%
+\newskip\inmarginspacing \inmarginspacing=1cm
+\def\strutdepth{\dp\strutbox}
+%
+\def\doinmargin#1#2{\strut\vadjust{%
+  \nobreak
+  \kern-\strutdepth
+  \vtop to \strutdepth{%
+    \baselineskip=\strutdepth
+    \vss
+    % if you have multiple lines of stuff to put here, you'll need to
+    % make the vbox yourself of the appropriate size.
+    \ifx#1l%
+      \llap{\ignorespaces #2\hskip\inmarginspacing}%
+    \else
+      \rlap{\hskip\hsize \hskip\inmarginspacing \ignorespaces #2}%
+    \fi
+    \null
+  }%
+}}
+\def\inleftmargin{\doinmargin l}
+\def\inrightmargin{\doinmargin r}
+%
+% @inmargin{TEXT [, RIGHT-TEXT]}
+% (if RIGHT-TEXT is given, use TEXT for left page, RIGHT-TEXT for right;
+% else use TEXT for both).
+%
+\def\inmargin#1{\parseinmargin #1,,\finish}
+\def\parseinmargin#1,#2,#3\finish{% not perfect, but better than nothing.
+  \setbox0 = \hbox{\ignorespaces #2}%
+  \ifdim\wd0 > 0pt
+    \def\lefttext{#1}%  have both texts
+    \def\righttext{#2}%
+  \else
+    \def\lefttext{#1}%  have only one text
+    \def\righttext{#1}%
+  \fi
+  %
+  \ifodd\pageno
+    \def\temp{\inrightmargin\righttext}% odd page -> outside is right margin
+  \else
+    \def\temp{\inleftmargin\lefttext}%
+  \fi
+  \temp
+}
+
+% @| inserts a changebar to the left of the current line.  It should
+% surround any changed text.  This approach does *not* work if the
+% change spans more than two lines of output.  To handle that, we would
+% have adopt a much more difficult approach (putting marks into the main
+% vertical list for the beginning and end of each change).  This command
+% is not documented, not supported, and doesn't work.
+%
+\def\|{%
+  % \vadjust can only be used in horizontal mode.
+  \leavevmode
+  %
+  % Append this vertical mode material after the current line in the output.
+  \vadjust{%
+    % We want to insert a rule with the height and depth of the current
+    % leading; that is exactly what \strutbox is supposed to record.
+    \vskip-\baselineskip
+    %
+    % \vadjust-items are inserted at the left edge of the type.  So
+    % the \llap here moves out into the left-hand margin.
+    \llap{%
+      %
+      % For a thicker or thinner bar, change the `1pt'.
+      \vrule height\baselineskip width1pt
+      %
+      % This is the space between the bar and the text.
+      \hskip 12pt
+    }%
+  }%
+}
+
+% @include FILE -- \input text of FILE.
+%
+\def\include{\parseargusing\filenamecatcodes\includezzz}
+\def\includezzz#1{%
+  \pushthisfilestack
+  \def\thisfile{#1}%
+  {%
+    \makevalueexpandable  % we want to expand any @value in FILE.
+    \turnoffactive        % and allow special characters in the expansion
+    \indexnofonts         % Allow `@@' and other weird things in file names.
+    \wlog{texinfo.tex: doing @include of #1^^J}%
+    \edef\temp{\noexpand\input #1 }%
+    %
+    % This trickery is to read FILE outside of a group, in case it makes
+    % definitions, etc.
+    \expandafter
+  }\temp
+  \popthisfilestack
+}
+\def\filenamecatcodes{%
+  \catcode`\\=\other
+  \catcode`~=\other
+  \catcode`^=\other
+  \catcode`_=\other
+  \catcode`|=\other
+  \catcode`<=\other
+  \catcode`>=\other
+  \catcode`+=\other
+  \catcode`-=\other
+  \catcode`\`=\other
+  \catcode`\'=\other
+}
+
+\def\pushthisfilestack{%
+  \expandafter\pushthisfilestackX\popthisfilestack\StackTerm
+}
+\def\pushthisfilestackX{%
+  \expandafter\pushthisfilestackY\thisfile\StackTerm
+}
+\def\pushthisfilestackY #1\StackTerm #2\StackTerm {%
+  \gdef\popthisfilestack{\gdef\thisfile{#1}\gdef\popthisfilestack{#2}}%
+}
+
+\def\popthisfilestack{\errthisfilestackempty}
+\def\errthisfilestackempty{\errmessage{Internal error:
+  the stack of filenames is empty.}}
+%
+\def\thisfile{}
+
+% @center line
+% outputs that line, centered.
+%
+\parseargdef\center{%
+  \ifhmode
+    \let\centersub\centerH
+  \else
+    \let\centersub\centerV
+  \fi
+  \centersub{\hfil \ignorespaces#1\unskip \hfil}%
+  \let\centersub\relax % don't let the definition persist, just in case
+}
+\def\centerH#1{{%
+  \hfil\break
+  \advance\hsize by -\leftskip
+  \advance\hsize by -\rightskip
+  \line{#1}%
+  \break
+}}
+%
+\newcount\centerpenalty
+\def\centerV#1{%
+  % The idea here is the same as in \startdefun, \cartouche, etc.: if
+  % @center is the first thing after a section heading, we need to wipe
+  % out the negative parskip inserted by \sectionheading, but still
+  % prevent a page break here.
+  \centerpenalty = \lastpenalty
+  \ifnum\centerpenalty>10000 \vskip\parskip \fi
+  \ifnum\centerpenalty>9999 \penalty\centerpenalty \fi
+  \line{\kern\leftskip #1\kern\rightskip}%
+}
+
+% @sp n   outputs n lines of vertical space
+%
+\parseargdef\sp{\vskip #1\baselineskip}
+
+% @comment ...line which is ignored...
+% @c is the same as @comment
+% @ignore ... @end ignore  is another way to write a comment
+%
+\def\comment{\begingroup \catcode`\^^M=\other%
+\catcode`\@=\other \catcode`\{=\other \catcode`\}=\other%
+\commentxxx}
+{\catcode`\^^M=\other \gdef\commentxxx#1^^M{\endgroup}}
+%
+\let\c=\comment
+
+% @paragraphindent NCHARS
+% We'll use ems for NCHARS, close enough.
+% NCHARS can also be the word `asis' or `none'.
+% We cannot feasibly implement @paragraphindent asis, though.
+%
+\def\asisword{asis} % no translation, these are keywords
+\def\noneword{none}
+%
+\parseargdef\paragraphindent{%
+  \def\temp{#1}%
+  \ifx\temp\asisword
+  \else
+    \ifx\temp\noneword
+      \defaultparindent = 0pt
+    \else
+      \defaultparindent = #1em
+    \fi
+  \fi
+  \parindent = \defaultparindent
+}
+
+% @exampleindent NCHARS
+% We'll use ems for NCHARS like @paragraphindent.
+% It seems @exampleindent asis isn't necessary, but
+% I preserve it to make it similar to @paragraphindent.
+\parseargdef\exampleindent{%
+  \def\temp{#1}%
+  \ifx\temp\asisword
+  \else
+    \ifx\temp\noneword
+      \lispnarrowing = 0pt
+    \else
+      \lispnarrowing = #1em
+    \fi
+  \fi
+}
+
+% @firstparagraphindent WORD
+% If WORD is `none', then suppress indentation of the first paragraph
+% after a section heading.  If WORD is `insert', then do indent at such
+% paragraphs.
+%
+% The paragraph indentation is suppressed or not by calling
+% \suppressfirstparagraphindent, which the sectioning commands do.
+% We switch the definition of this back and forth according to WORD.
+% By default, we suppress indentation.
+%
+\def\suppressfirstparagraphindent{\dosuppressfirstparagraphindent}
+\def\insertword{insert}
+%
+\parseargdef\firstparagraphindent{%
+  \def\temp{#1}%
+  \ifx\temp\noneword
+    \let\suppressfirstparagraphindent = \dosuppressfirstparagraphindent
+  \else\ifx\temp\insertword
+    \let\suppressfirstparagraphindent = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @firstparagraphindent option `\temp'}%
+  \fi\fi
+}
+
+% Here is how we actually suppress indentation.  Redefine \everypar to
+% \kern backwards by \parindent, and then reset itself to empty.
+%
+% We also make \indent itself not actually do anything until the next
+% paragraph.
+%
+\gdef\dosuppressfirstparagraphindent{%
+  \gdef\indent{%
+    \restorefirstparagraphindent
+    \indent
+  }%
+  \gdef\noindent{%
+    \restorefirstparagraphindent
+    \noindent
+  }%
+  \global\everypar = {%
+    \kern -\parindent
+    \restorefirstparagraphindent
+  }%
+}
+
+\gdef\restorefirstparagraphindent{%
+  \global \let \indent = \ptexindent
+  \global \let \noindent = \ptexnoindent
+  \global \everypar = {}%
+}
+
+
+% @refill is a no-op.
+\let\refill=\relax
+
+% If working on a large document in chapters, it is convenient to
+% be able to disable indexing, cross-referencing, and contents, for test runs.
+% This is done with @novalidate (before @setfilename).
+%
+\newif\iflinks \linkstrue % by default we want the aux files.
+\let\novalidate = \linksfalse
+
+% @setfilename is done at the beginning of every texinfo file.
+% So open here the files we need to have open while reading the input.
+% This makes it possible to make a .fmt file for texinfo.
+\def\setfilename{%
+   \fixbackslash  % Turn off hack to swallow `\input texinfo'.
+   \iflinks
+     \tryauxfile
+     % Open the new aux file.  TeX will close it automatically at exit.
+     \immediate\openout\auxfile=\jobname.aux
+   \fi % \openindices needs to do some work in any case.
+   \openindices
+   \let\setfilename=\comment % Ignore extra @setfilename cmds.
+   %
+   % If texinfo.cnf is present on the system, read it.
+   % Useful for site-wide @afourpaper, etc.
+   \openin 1 texinfo.cnf
+   \ifeof 1 \else \input texinfo.cnf \fi
+   \closein 1
+   %
+   \comment % Ignore the actual filename.
+}
+
+% Called from \setfilename.
+%
+\def\openindices{%
+  \newindex{cp}%
+  \newcodeindex{fn}%
+  \newcodeindex{vr}%
+  \newcodeindex{tp}%
+  \newcodeindex{ky}%
+  \newcodeindex{pg}%
+}
+
+% @bye.
+\outer\def\bye{\pagealignmacro\tracingstats=1\ptexend}
+
+
+\message{pdf,}
+% adobe `portable' document format
+\newcount\tempnum
+\newcount\lnkcount
+\newtoks\filename
+\newcount\filenamelength
+\newcount\pgn
+\newtoks\toksA
+\newtoks\toksB
+\newtoks\toksC
+\newtoks\toksD
+\newbox\boxA
+\newcount\countA
+\newif\ifpdf
+\newif\ifpdfmakepagedest
+
+% when pdftex is run in dvi mode, \pdfoutput is defined (so \pdfoutput=1
+% can be set).  So we test for \relax and 0 as well as being undefined.
+\ifx\pdfoutput\thisisundefined
+\else
+  \ifx\pdfoutput\relax
+  \else
+    \ifcase\pdfoutput
+    \else
+      \pdftrue
+    \fi
+  \fi
+\fi
+
+% PDF uses PostScript string constants for the names of xref targets,
+% for display in the outlines, and in other places.  Thus, we have to
+% double any backslashes.  Otherwise, a name like "\node" will be
+% interpreted as a newline (\n), followed by o, d, e.  Not good.
+% 
+% See http://www.ntg.nl/pipermail/ntg-pdftex/2004-July/000654.html and
+% related messages.  The final outcome is that it is up to the TeX user
+% to double the backslashes and otherwise make the string valid, so
+% that's what we do.  pdftex 1.30.0 (ca.2005) introduced a primitive to
+% do this reliably, so we use it.
+
+% #1 is a control sequence in which to do the replacements,
+% which we \xdef.
+\def\txiescapepdf#1{%
+  \ifx\pdfescapestring\thisisundefined
+    % No primitive available; should we give a warning or log?
+    % Many times it won't matter.
+  \else
+    % The expandable \pdfescapestring primitive escapes parentheses,
+    % backslashes, and other special chars.
+    \xdef#1{\pdfescapestring{#1}}%
+  \fi
+}
+
+\newhelp\nopdfimagehelp{Texinfo supports .png, .jpg, .jpeg, and .pdf images
+with PDF output, and none of those formats could be found.  (.eps cannot
+be supported due to the design of the PDF format; use regular TeX (DVI
+output) for that.)}
+
+\ifpdf
+  %
+  % Color manipulation macros based on pdfcolor.tex,
+  % except using rgb instead of cmyk; the latter is said to render as a
+  % very dark gray on-screen and a very dark halftone in print, instead
+  % of actual black.
+  \def\rgbDarkRed{0.50 0.09 0.12}
+  \def\rgbBlack{0 0 0}
+  %
+  % k sets the color for filling (usual text, etc.);
+  % K sets the color for stroking (thin rules, e.g., normal _'s).
+  \def\pdfsetcolor#1{\pdfliteral{#1 rg  #1 RG}}
+  %
+  % Set color, and create a mark which defines \thiscolor accordingly,
+  % so that \makeheadline knows which color to restore.
+  \def\setcolor#1{%
+    \xdef\lastcolordefs{\gdef\noexpand\thiscolor{#1}}%
+    \domark
+    \pdfsetcolor{#1}%
+  }
+  %
+  \def\maincolor{\rgbBlack}
+  \pdfsetcolor{\maincolor}
+  \edef\thiscolor{\maincolor}
+  \def\lastcolordefs{}
+  %
+  \def\makefootline{%
+    \baselineskip24pt
+    \line{\pdfsetcolor{\maincolor}\the\footline}%
+  }
+  %
+  \def\makeheadline{%
+    \vbox to 0pt{%
+      \vskip-22.5pt
+      \line{%
+        \vbox to8.5pt{}%
+        % Extract \thiscolor definition from the marks.
+        \getcolormarks
+        % Typeset the headline with \maincolor, then restore the color.
+        \pdfsetcolor{\maincolor}\the\headline\pdfsetcolor{\thiscolor}%
+      }%
+      \vss
+    }%
+    \nointerlineskip
+  }
+  %
+  %
+  \pdfcatalog{/PageMode /UseOutlines}
+  %
+  % #1 is image name, #2 width (might be empty/whitespace), #3 height (ditto).
+  \def\dopdfimage#1#2#3{%
+    \def\pdfimagewidth{#2}\setbox0 = \hbox{\ignorespaces #2}%
+    \def\pdfimageheight{#3}\setbox2 = \hbox{\ignorespaces #3}%
+    %
+    % pdftex (and the PDF format) support .pdf, .png, .jpg (among
+    % others).  Let's try in that order, PDF first since if
+    % someone has a scalable image, presumably better to use that than a
+    % bitmap.
+    \let\pdfimgext=\empty
+    \begingroup
+      \openin 1 #1.pdf \ifeof 1
+        \openin 1 #1.PDF \ifeof 1
+          \openin 1 #1.png \ifeof 1
+            \openin 1 #1.jpg \ifeof 1
+              \openin 1 #1.jpeg \ifeof 1
+                \openin 1 #1.JPG \ifeof 1
+                  \errhelp = \nopdfimagehelp
+                  \errmessage{Could not find image file #1 for pdf}%
+                \else \gdef\pdfimgext{JPG}%
+                \fi
+              \else \gdef\pdfimgext{jpeg}%
+              \fi
+            \else \gdef\pdfimgext{jpg}%
+            \fi
+          \else \gdef\pdfimgext{png}%
+          \fi
+        \else \gdef\pdfimgext{PDF}%
+        \fi
+      \else \gdef\pdfimgext{pdf}%
+      \fi
+      \closein 1
+    \endgroup
+    %
+    % without \immediate, ancient pdftex seg faults when the same image is
+    % included twice.  (Version 3.14159-pre-1.0-unofficial-20010704.)
+    \ifnum\pdftexversion < 14
+      \immediate\pdfimage
+    \else
+      \immediate\pdfximage
+    \fi
+      \ifdim \wd0 >0pt width \pdfimagewidth \fi
+      \ifdim \wd2 >0pt height \pdfimageheight \fi
+      \ifnum\pdftexversion<13
+         #1.\pdfimgext
+       \else
+         {#1.\pdfimgext}%
+       \fi
+    \ifnum\pdftexversion < 14 \else
+      \pdfrefximage \pdflastximage
+    \fi}
+  %
+  \def\pdfmkdest#1{{%
+    % We have to set dummies so commands such as @code, and characters
+    % such as \, aren't expanded when present in a section title.
+    \indexnofonts
+    \turnoffactive
+    \makevalueexpandable
+    \def\pdfdestname{#1}%
+    \txiescapepdf\pdfdestname
+    \safewhatsit{\pdfdest name{\pdfdestname} xyz}%
+  }}
+  %
+  % used to mark target names; must be expandable.
+  \def\pdfmkpgn#1{#1}
+  %
+  % by default, use a color that is dark enough to print on paper as
+  % nearly black, but still distinguishable for online viewing.
+  \def\urlcolor{\rgbDarkRed}
+  \def\linkcolor{\rgbDarkRed}
+  \def\endlink{\setcolor{\maincolor}\pdfendlink}
+  %
+  % Adding outlines to PDF; macros for calculating structure of outlines
+  % come from Petr Olsak
+  \def\expnumber#1{\expandafter\ifx\csname#1\endcsname\relax 0%
+    \else \csname#1\endcsname \fi}
+  \def\advancenumber#1{\tempnum=\expnumber{#1}\relax
+    \advance\tempnum by 1
+    \expandafter\xdef\csname#1\endcsname{\the\tempnum}}
+  %
+  % #1 is the section text, which is what will be displayed in the
+  % outline by the pdf viewer.  #2 is the pdf expression for the number
+  % of subentries (or empty, for subsubsections).  #3 is the node text,
+  % which might be empty if this toc entry had no corresponding node.
+  % #4 is the page number
+  %
+  \def\dopdfoutline#1#2#3#4{%
+    % Generate a link to the node text if that exists; else, use the
+    % page number.  We could generate a destination for the section
+    % text in the case where a section has no node, but it doesn't
+    % seem worth the trouble, since most documents are normally structured.
+    \edef\pdfoutlinedest{#3}%
+    \ifx\pdfoutlinedest\empty
+      \def\pdfoutlinedest{#4}%
+    \else
+      \txiescapepdf\pdfoutlinedest
+    \fi
+    %
+    % Also escape PDF chars in the display string.
+    \edef\pdfoutlinetext{#1}%
+    \txiescapepdf\pdfoutlinetext
+    %
+    \pdfoutline goto name{\pdfmkpgn{\pdfoutlinedest}}#2{\pdfoutlinetext}%
+  }
+  %
+  \def\pdfmakeoutlines{%
+    \begingroup
+      % Read toc silently, to get counts of subentries for \pdfoutline.
+      \def\partentry##1##2##3##4{}% ignore parts in the outlines
+      \def\numchapentry##1##2##3##4{%
+	\def\thischapnum{##2}%
+	\def\thissecnum{0}%
+	\def\thissubsecnum{0}%
+      }%
+      \def\numsecentry##1##2##3##4{%
+	\advancenumber{chap\thischapnum}%
+	\def\thissecnum{##2}%
+	\def\thissubsecnum{0}%
+      }%
+      \def\numsubsecentry##1##2##3##4{%
+	\advancenumber{sec\thissecnum}%
+	\def\thissubsecnum{##2}%
+      }%
+      \def\numsubsubsecentry##1##2##3##4{%
+	\advancenumber{subsec\thissubsecnum}%
+      }%
+      \def\thischapnum{0}%
+      \def\thissecnum{0}%
+      \def\thissubsecnum{0}%
+      %
+      % use \def rather than \let here because we redefine \chapentry et
+      % al. a second time, below.
+      \def\appentry{\numchapentry}%
+      \def\appsecentry{\numsecentry}%
+      \def\appsubsecentry{\numsubsecentry}%
+      \def\appsubsubsecentry{\numsubsubsecentry}%
+      \def\unnchapentry{\numchapentry}%
+      \def\unnsecentry{\numsecentry}%
+      \def\unnsubsecentry{\numsubsecentry}%
+      \def\unnsubsubsecentry{\numsubsubsecentry}%
+      \readdatafile{toc}%
+      %
+      % Read toc second time, this time actually producing the outlines.
+      % The `-' means take the \expnumber as the absolute number of
+      % subentries, which we calculated on our first read of the .toc above.
+      %
+      % We use the node names as the destinations.
+      \def\numchapentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{chap##2}}{##3}{##4}}%
+      \def\numsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{sec##2}}{##3}{##4}}%
+      \def\numsubsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{subsec##2}}{##3}{##4}}%
+      \def\numsubsubsecentry##1##2##3##4{% count is always zero
+        \dopdfoutline{##1}{}{##3}{##4}}%
+      %
+      % PDF outlines are displayed using system fonts, instead of
+      % document fonts.  Therefore we cannot use special characters,
+      % since the encoding is unknown.  For example, the eogonek from
+      % Latin 2 (0xea) gets translated to a | character.  Info from
+      % Staszek Wawrykiewicz, 19 Jan 2004 04:09:24 +0100.
+      %
+      % TODO this right, we have to translate 8-bit characters to
+      % their "best" equivalent, based on the @documentencoding.  Too
+      % much work for too little return.  Just use the ASCII equivalents
+      % we use for the index sort strings.
+      % 
+      \indexnofonts
+      \setupdatafile
+      % We can have normal brace characters in the PDF outlines, unlike
+      % Texinfo index files.  So set that up.
+      \def\{{\lbracecharliteral}%
+      \def\}{\rbracecharliteral}%
+      \catcode`\\=\active \otherbackslash
+      \input \tocreadfilename
+    \endgroup
+  }
+  {\catcode`[=1 \catcode`]=2
+   \catcode`{=\other \catcode`}=\other
+   \gdef\lbracecharliteral[{]%
+   \gdef\rbracecharliteral[}]%
+  ]
+  %
+  \def\skipspaces#1{\def\PP{#1}\def\D{|}%
+    \ifx\PP\D\let\nextsp\relax
+    \else\let\nextsp\skipspaces
+      \addtokens{\filename}{\PP}%
+      \advance\filenamelength by 1
+    \fi
+    \nextsp}
+  \def\getfilename#1{%
+    \filenamelength=0
+    % If we don't expand the argument now, \skipspaces will get
+    % snagged on things like "@value{foo}".
+    \edef\temp{#1}%
+    \expandafter\skipspaces\temp|\relax
+  }
+  \ifnum\pdftexversion < 14
+    \let \startlink \pdfannotlink
+  \else
+    \let \startlink \pdfstartlink
+  \fi
+  % make a live url in pdf output.
+  \def\pdfurl#1{%
+    \begingroup
+      % it seems we really need yet another set of dummies; have not
+      % tried to figure out what each command should do in the context
+      % of @url.  for now, just make @/ a no-op, that's the only one
+      % people have actually reported a problem with.
+      %
+      \normalturnoffactive
+      \def\@{@}%
+      \let\/=\empty
+      \makevalueexpandable
+      % do we want to go so far as to use \indexnofonts instead of just
+      % special-casing \var here?
+      \def\var##1{##1}%
+      %
+      \leavevmode\setcolor{\urlcolor}%
+      \startlink attr{/Border [0 0 0]}%
+        user{/Subtype /Link /A << /S /URI /URI (#1) >>}%
+    \endgroup}
+  \def\pdfgettoks#1.{\setbox\boxA=\hbox{\toksA={#1.}\toksB={}\maketoks}}
+  \def\addtokens#1#2{\edef\addtoks{\noexpand#1={\the#1#2}}\addtoks}
+  \def\adn#1{\addtokens{\toksC}{#1}\global\countA=1\let\next=\maketoks}
+  \def\poptoks#1#2|ENDTOKS|{\let\first=#1\toksD={#1}\toksA={#2}}
+  \def\maketoks{%
+    \expandafter\poptoks\the\toksA|ENDTOKS|\relax
+    \ifx\first0\adn0
+    \else\ifx\first1\adn1 \else\ifx\first2\adn2 \else\ifx\first3\adn3
+    \else\ifx\first4\adn4 \else\ifx\first5\adn5 \else\ifx\first6\adn6
+    \else\ifx\first7\adn7 \else\ifx\first8\adn8 \else\ifx\first9\adn9
+    \else
+      \ifnum0=\countA\else\makelink\fi
+      \ifx\first.\let\next=\done\else
+        \let\next=\maketoks
+        \addtokens{\toksB}{\the\toksD}
+        \ifx\first,\addtokens{\toksB}{\space}\fi
+      \fi
+    \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+    \next}
+  \def\makelink{\addtokens{\toksB}%
+    {\noexpand\pdflink{\the\toksC}}\toksC={}\global\countA=0}
+  \def\pdflink#1{%
+    \startlink attr{/Border [0 0 0]} goto name{\pdfmkpgn{#1}}
+    \setcolor{\linkcolor}#1\endlink}
+  \def\done{\edef\st{\global\noexpand\toksA={\the\toksB}}\st}
+\else
+  % non-pdf mode
+  \let\pdfmkdest = \gobble
+  \let\pdfurl = \gobble
+  \let\endlink = \relax
+  \let\setcolor = \gobble
+  \let\pdfsetcolor = \gobble
+  \let\pdfmakeoutlines = \relax
+\fi  % \ifx\pdfoutput
+
+
+\message{fonts,}
+
+% Change the current font style to #1, remembering it in \curfontstyle.
+% For now, we do not accumulate font styles: @b{@i{foo}} prints foo in
+% italics, not bold italics.
+%
+\def\setfontstyle#1{%
+  \def\curfontstyle{#1}% not as a control sequence, because we are \edef'd.
+  \csname ten#1\endcsname  % change the current font
+}
+
+% Select #1 fonts with the current style.
+%
+\def\selectfonts#1{\csname #1fonts\endcsname \csname\curfontstyle\endcsname}
+
+\def\rm{\fam=0 \setfontstyle{rm}}
+\def\it{\fam=\itfam \setfontstyle{it}}
+\def\sl{\fam=\slfam \setfontstyle{sl}}
+\def\bf{\fam=\bffam \setfontstyle{bf}}\def\bfstylename{bf}
+\def\tt{\fam=\ttfam \setfontstyle{tt}}
+
+% Unfortunately, we have to override this for titles and the like, since
+% in those cases "rm" is bold.  Sigh.
+\def\rmisbold{\rm\def\curfontstyle{bf}}
+
+% Texinfo sort of supports the sans serif font style, which plain TeX does not.
+% So we set up a \sf.
+\newfam\sffam
+\def\sf{\fam=\sffam \setfontstyle{sf}}
+\let\li = \sf % Sometimes we call it \li, not \sf.
+
+% We don't need math for this font style.
+\def\ttsl{\setfontstyle{ttsl}}
+
+
+% Set the baselineskip to #1, and the lineskip and strut size
+% correspondingly.  There is no deep meaning behind these magic numbers
+% used as factors; they just match (closely enough) what Knuth defined.
+%
+\def\lineskipfactor{.08333}
+\def\strutheightpercent{.70833}
+\def\strutdepthpercent {.29167}
+%
+% can get a sort of poor man's double spacing by redefining this.
+\def\baselinefactor{1}
+%
+\newdimen\textleading
+\def\setleading#1{%
+  \dimen0 = #1\relax
+  \normalbaselineskip = \baselinefactor\dimen0
+  \normallineskip = \lineskipfactor\normalbaselineskip
+  \normalbaselines
+  \setbox\strutbox =\hbox{%
+    \vrule width0pt height\strutheightpercent\baselineskip
+                    depth \strutdepthpercent \baselineskip
+  }%
+}
+
+% PDF CMaps.  See also LaTeX's t1.cmap.
+%
+% do nothing with this by default.
+\expandafter\let\csname cmapOT1\endcsname\gobble
+\expandafter\let\csname cmapOT1IT\endcsname\gobble
+\expandafter\let\csname cmapOT1TT\endcsname\gobble
+
+% if we are producing pdf, and we have \pdffontattr, then define cmaps.
+% (\pdffontattr was introduced many years ago, but people still run
+% older pdftex's; it's easy to conditionalize, so we do.)
+\ifpdf \ifx\pdffontattr\thisisundefined \else
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1-0)
+%%Title: (TeX-OT1-0 TeX OT1 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+8 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<23> <26> <0023>
+<28> <3B> <0028>
+<3F> <5B> <003F>
+<5D> <5E> <005D>
+<61> <7A> <0061>
+<7B> <7C> <2013>
+endbfrange
+40 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <00660066>
+<0C> <00660069>
+<0D> <0066006C>
+<0E> <006600660069>
+<0F> <00660066006C>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<21> <0021>
+<22> <201D>
+<27> <2019>
+<3C> <00A1>
+<3D> <003D>
+<3E> <00BF>
+<5C> <201C>
+<5F> <02D9>
+<60> <2018>
+<7D> <02DD>
+<7E> <007E>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+%
+% \cmapOT1IT
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1IT-0)
+%%Title: (TeX-OT1IT-0 TeX OT1IT 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1IT)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1IT-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+8 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<25> <26> <0025>
+<28> <3B> <0028>
+<3F> <5B> <003F>
+<5D> <5E> <005D>
+<61> <7A> <0061>
+<7B> <7C> <2013>
+endbfrange
+42 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <00660066>
+<0C> <00660069>
+<0D> <0066006C>
+<0E> <006600660069>
+<0F> <00660066006C>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<21> <0021>
+<22> <201D>
+<23> <0023>
+<24> <00A3>
+<27> <2019>
+<3C> <00A1>
+<3D> <003D>
+<3E> <00BF>
+<5C> <201C>
+<5F> <02D9>
+<60> <2018>
+<7D> <02DD>
+<7E> <007E>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1IT\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+%
+% \cmapOT1TT
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1TT-0)
+%%Title: (TeX-OT1TT-0 TeX OT1TT 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1TT)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1TT-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+5 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<21> <26> <0021>
+<28> <5F> <0028>
+<61> <7E> <0061>
+endbfrange
+32 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <2191>
+<0C> <2193>
+<0D> <0027>
+<0E> <00A1>
+<0F> <00BF>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<20> <2423>
+<27> <2019>
+<60> <2018>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1TT\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+\fi\fi
+
+
+% Set the font macro #1 to the font named \fontprefix#2.
+% #3 is the font's design size, #4 is a scale factor, #5 is the CMap
+% encoding (only OT1, OT1IT and OT1TT are allowed, or empty to omit).
+% Example:
+% #1 = \textrm
+% #2 = \rmshape
+% #3 = 10
+% #4 = \mainmagstep
+% #5 = OT1
+%
+\def\setfont#1#2#3#4#5{%
+  \font#1=\fontprefix#2#3 scaled #4
+  \csname cmap#5\endcsname#1%
+}
+% This is what gets called when #5 of \setfont is empty.
+\let\cmap\gobble
+%
+% (end of cmaps)
+
+% Use cm as the default font prefix.
+% To specify the font prefix, you must define \fontprefix
+% before you read in texinfo.tex.
+\ifx\fontprefix\thisisundefined
+\def\fontprefix{cm}
+\fi
+% Support font families that don't use the same naming scheme as CM.
+\def\rmshape{r}
+\def\rmbshape{bx}               % where the normal face is bold
+\def\bfshape{b}
+\def\bxshape{bx}
+\def\ttshape{tt}
+\def\ttbshape{tt}
+\def\ttslshape{sltt}
+\def\itshape{ti}
+\def\itbshape{bxti}
+\def\slshape{sl}
+\def\slbshape{bxsl}
+\def\sfshape{ss}
+\def\sfbshape{ss}
+\def\scshape{csc}
+\def\scbshape{csc}
+
+% Definitions for a main text size of 11pt.  (The default in Texinfo.)
+%
+\def\definetextfontsizexi{%
+% Text fonts (11.2pt, magstep1).
+\def\textnominalsize{11pt}
+\edef\mainmagstep{\magstephalf}
+\setfont\textrm\rmshape{10}{\mainmagstep}{OT1}
+\setfont\texttt\ttshape{10}{\mainmagstep}{OT1TT}
+\setfont\textbf\bfshape{10}{\mainmagstep}{OT1}
+\setfont\textit\itshape{10}{\mainmagstep}{OT1IT}
+\setfont\textsl\slshape{10}{\mainmagstep}{OT1}
+\setfont\textsf\sfshape{10}{\mainmagstep}{OT1}
+\setfont\textsc\scshape{10}{\mainmagstep}{OT1}
+\setfont\textttsl\ttslshape{10}{\mainmagstep}{OT1TT}
+\font\texti=cmmi10 scaled \mainmagstep
+\font\textsy=cmsy10 scaled \mainmagstep
+\def\textecsize{1095}
+
+% A few fonts for @defun names and args.
+\setfont\defbf\bfshape{10}{\magstep1}{OT1}
+\setfont\deftt\ttshape{10}{\magstep1}{OT1TT}
+\setfont\defttsl\ttslshape{10}{\magstep1}{OT1TT}
+\def\df{\let\tentt=\deftt \let\tenbf = \defbf \let\tenttsl=\defttsl \bf}
+
+% Fonts for indices, footnotes, small examples (9pt).
+\def\smallnominalsize{9pt}
+\setfont\smallrm\rmshape{9}{1000}{OT1}
+\setfont\smalltt\ttshape{9}{1000}{OT1TT}
+\setfont\smallbf\bfshape{10}{900}{OT1}
+\setfont\smallit\itshape{9}{1000}{OT1IT}
+\setfont\smallsl\slshape{9}{1000}{OT1}
+\setfont\smallsf\sfshape{9}{1000}{OT1}
+\setfont\smallsc\scshape{10}{900}{OT1}
+\setfont\smallttsl\ttslshape{10}{900}{OT1TT}
+\font\smalli=cmmi9
+\font\smallsy=cmsy9
+\def\smallecsize{0900}
+
+% Fonts for small examples (8pt).
+\def\smallernominalsize{8pt}
+\setfont\smallerrm\rmshape{8}{1000}{OT1}
+\setfont\smallertt\ttshape{8}{1000}{OT1TT}
+\setfont\smallerbf\bfshape{10}{800}{OT1}
+\setfont\smallerit\itshape{8}{1000}{OT1IT}
+\setfont\smallersl\slshape{8}{1000}{OT1}
+\setfont\smallersf\sfshape{8}{1000}{OT1}
+\setfont\smallersc\scshape{10}{800}{OT1}
+\setfont\smallerttsl\ttslshape{10}{800}{OT1TT}
+\font\smalleri=cmmi8
+\font\smallersy=cmsy8
+\def\smallerecsize{0800}
+
+% Fonts for title page (20.4pt):
+\def\titlenominalsize{20pt}
+\setfont\titlerm\rmbshape{12}{\magstep3}{OT1}
+\setfont\titleit\itbshape{10}{\magstep4}{OT1IT}
+\setfont\titlesl\slbshape{10}{\magstep4}{OT1}
+\setfont\titlett\ttbshape{12}{\magstep3}{OT1TT}
+\setfont\titlettsl\ttslshape{10}{\magstep4}{OT1TT}
+\setfont\titlesf\sfbshape{17}{\magstep1}{OT1}
+\let\titlebf=\titlerm
+\setfont\titlesc\scbshape{10}{\magstep4}{OT1}
+\font\titlei=cmmi12 scaled \magstep3
+\font\titlesy=cmsy10 scaled \magstep4
+\def\titleecsize{2074}
+
+% Chapter (and unnumbered) fonts (17.28pt).
+\def\chapnominalsize{17pt}
+\setfont\chaprm\rmbshape{12}{\magstep2}{OT1}
+\setfont\chapit\itbshape{10}{\magstep3}{OT1IT}
+\setfont\chapsl\slbshape{10}{\magstep3}{OT1}
+\setfont\chaptt\ttbshape{12}{\magstep2}{OT1TT}
+\setfont\chapttsl\ttslshape{10}{\magstep3}{OT1TT}
+\setfont\chapsf\sfbshape{17}{1000}{OT1}
+\let\chapbf=\chaprm
+\setfont\chapsc\scbshape{10}{\magstep3}{OT1}
+\font\chapi=cmmi12 scaled \magstep2
+\font\chapsy=cmsy10 scaled \magstep3
+\def\chapecsize{1728}
+
+% Section fonts (14.4pt).
+\def\secnominalsize{14pt}
+\setfont\secrm\rmbshape{12}{\magstep1}{OT1}
+\setfont\secit\itbshape{10}{\magstep2}{OT1IT}
+\setfont\secsl\slbshape{10}{\magstep2}{OT1}
+\setfont\sectt\ttbshape{12}{\magstep1}{OT1TT}
+\setfont\secttsl\ttslshape{10}{\magstep2}{OT1TT}
+\setfont\secsf\sfbshape{12}{\magstep1}{OT1}
+\let\secbf\secrm
+\setfont\secsc\scbshape{10}{\magstep2}{OT1}
+\font\seci=cmmi12 scaled \magstep1
+\font\secsy=cmsy10 scaled \magstep2
+\def\sececsize{1440}
+
+% Subsection fonts (13.15pt).
+\def\ssecnominalsize{13pt}
+\setfont\ssecrm\rmbshape{12}{\magstephalf}{OT1}
+\setfont\ssecit\itbshape{10}{1315}{OT1IT}
+\setfont\ssecsl\slbshape{10}{1315}{OT1}
+\setfont\ssectt\ttbshape{12}{\magstephalf}{OT1TT}
+\setfont\ssecttsl\ttslshape{10}{1315}{OT1TT}
+\setfont\ssecsf\sfbshape{12}{\magstephalf}{OT1}
+\let\ssecbf\ssecrm
+\setfont\ssecsc\scbshape{10}{1315}{OT1}
+\font\sseci=cmmi12 scaled \magstephalf
+\font\ssecsy=cmsy10 scaled 1315
+\def\ssececsize{1200}
+
+% Reduced fonts for @acro in text (10pt).
+\def\reducednominalsize{10pt}
+\setfont\reducedrm\rmshape{10}{1000}{OT1}
+\setfont\reducedtt\ttshape{10}{1000}{OT1TT}
+\setfont\reducedbf\bfshape{10}{1000}{OT1}
+\setfont\reducedit\itshape{10}{1000}{OT1IT}
+\setfont\reducedsl\slshape{10}{1000}{OT1}
+\setfont\reducedsf\sfshape{10}{1000}{OT1}
+\setfont\reducedsc\scshape{10}{1000}{OT1}
+\setfont\reducedttsl\ttslshape{10}{1000}{OT1TT}
+\font\reducedi=cmmi10
+\font\reducedsy=cmsy10
+\def\reducedecsize{1000}
+
+\textleading = 13.2pt % line spacing for 11pt CM
+\textfonts            % reset the current fonts
+\rm
+} % end of 11pt text font size definitions, \definetextfontsizexi
+
+
+% Definitions to make the main text be 10pt Computer Modern, with
+% section, chapter, etc., sizes following suit.  This is for the GNU
+% Press printing of the Emacs 22 manual.  Maybe other manuals in the
+% future.  Used with @smallbook, which sets the leading to 12pt.
+%
+\def\definetextfontsizex{%
+% Text fonts (10pt).
+\def\textnominalsize{10pt}
+\edef\mainmagstep{1000}
+\setfont\textrm\rmshape{10}{\mainmagstep}{OT1}
+\setfont\texttt\ttshape{10}{\mainmagstep}{OT1TT}
+\setfont\textbf\bfshape{10}{\mainmagstep}{OT1}
+\setfont\textit\itshape{10}{\mainmagstep}{OT1IT}
+\setfont\textsl\slshape{10}{\mainmagstep}{OT1}
+\setfont\textsf\sfshape{10}{\mainmagstep}{OT1}
+\setfont\textsc\scshape{10}{\mainmagstep}{OT1}
+\setfont\textttsl\ttslshape{10}{\mainmagstep}{OT1TT}
+\font\texti=cmmi10 scaled \mainmagstep
+\font\textsy=cmsy10 scaled \mainmagstep
+\def\textecsize{1000}
+
+% A few fonts for @defun names and args.
+\setfont\defbf\bfshape{10}{\magstephalf}{OT1}
+\setfont\deftt\ttshape{10}{\magstephalf}{OT1TT}
+\setfont\defttsl\ttslshape{10}{\magstephalf}{OT1TT}
+\def\df{\let\tentt=\deftt \let\tenbf = \defbf \let\tenttsl=\defttsl \bf}
+
+% Fonts for indices, footnotes, small examples (9pt).
+\def\smallnominalsize{9pt}
+\setfont\smallrm\rmshape{9}{1000}{OT1}
+\setfont\smalltt\ttshape{9}{1000}{OT1TT}
+\setfont\smallbf\bfshape{10}{900}{OT1}
+\setfont\smallit\itshape{9}{1000}{OT1IT}
+\setfont\smallsl\slshape{9}{1000}{OT1}
+\setfont\smallsf\sfshape{9}{1000}{OT1}
+\setfont\smallsc\scshape{10}{900}{OT1}
+\setfont\smallttsl\ttslshape{10}{900}{OT1TT}
+\font\smalli=cmmi9
+\font\smallsy=cmsy9
+\def\smallecsize{0900}
+
+% Fonts for small examples (8pt).
+\def\smallernominalsize{8pt}
+\setfont\smallerrm\rmshape{8}{1000}{OT1}
+\setfont\smallertt\ttshape{8}{1000}{OT1TT}
+\setfont\smallerbf\bfshape{10}{800}{OT1}
+\setfont\smallerit\itshape{8}{1000}{OT1IT}
+\setfont\smallersl\slshape{8}{1000}{OT1}
+\setfont\smallersf\sfshape{8}{1000}{OT1}
+\setfont\smallersc\scshape{10}{800}{OT1}
+\setfont\smallerttsl\ttslshape{10}{800}{OT1TT}
+\font\smalleri=cmmi8
+\font\smallersy=cmsy8
+\def\smallerecsize{0800}
+
+% Fonts for title page (20.4pt):
+\def\titlenominalsize{20pt}
+\setfont\titlerm\rmbshape{12}{\magstep3}{OT1}
+\setfont\titleit\itbshape{10}{\magstep4}{OT1IT}
+\setfont\titlesl\slbshape{10}{\magstep4}{OT1}
+\setfont\titlett\ttbshape{12}{\magstep3}{OT1TT}
+\setfont\titlettsl\ttslshape{10}{\magstep4}{OT1TT}
+\setfont\titlesf\sfbshape{17}{\magstep1}{OT1}
+\let\titlebf=\titlerm
+\setfont\titlesc\scbshape{10}{\magstep4}{OT1}
+\font\titlei=cmmi12 scaled \magstep3
+\font\titlesy=cmsy10 scaled \magstep4
+\def\titleecsize{2074}
+
+% Chapter fonts (14.4pt).
+\def\chapnominalsize{14pt}
+\setfont\chaprm\rmbshape{12}{\magstep1}{OT1}
+\setfont\chapit\itbshape{10}{\magstep2}{OT1IT}
+\setfont\chapsl\slbshape{10}{\magstep2}{OT1}
+\setfont\chaptt\ttbshape{12}{\magstep1}{OT1TT}
+\setfont\chapttsl\ttslshape{10}{\magstep2}{OT1TT}
+\setfont\chapsf\sfbshape{12}{\magstep1}{OT1}
+\let\chapbf\chaprm
+\setfont\chapsc\scbshape{10}{\magstep2}{OT1}
+\font\chapi=cmmi12 scaled \magstep1
+\font\chapsy=cmsy10 scaled \magstep2
+\def\chapecsize{1440}
+
+% Section fonts (12pt).
+\def\secnominalsize{12pt}
+\setfont\secrm\rmbshape{12}{1000}{OT1}
+\setfont\secit\itbshape{10}{\magstep1}{OT1IT}
+\setfont\secsl\slbshape{10}{\magstep1}{OT1}
+\setfont\sectt\ttbshape{12}{1000}{OT1TT}
+\setfont\secttsl\ttslshape{10}{\magstep1}{OT1TT}
+\setfont\secsf\sfbshape{12}{1000}{OT1}
+\let\secbf\secrm
+\setfont\secsc\scbshape{10}{\magstep1}{OT1}
+\font\seci=cmmi12
+\font\secsy=cmsy10 scaled \magstep1
+\def\sececsize{1200}
+
+% Subsection fonts (10pt).
+\def\ssecnominalsize{10pt}
+\setfont\ssecrm\rmbshape{10}{1000}{OT1}
+\setfont\ssecit\itbshape{10}{1000}{OT1IT}
+\setfont\ssecsl\slbshape{10}{1000}{OT1}
+\setfont\ssectt\ttbshape{10}{1000}{OT1TT}
+\setfont\ssecttsl\ttslshape{10}{1000}{OT1TT}
+\setfont\ssecsf\sfbshape{10}{1000}{OT1}
+\let\ssecbf\ssecrm
+\setfont\ssecsc\scbshape{10}{1000}{OT1}
+\font\sseci=cmmi10
+\font\ssecsy=cmsy10
+\def\ssececsize{1000}
+
+% Reduced fonts for @acro in text (9pt).
+\def\reducednominalsize{9pt}
+\setfont\reducedrm\rmshape{9}{1000}{OT1}
+\setfont\reducedtt\ttshape{9}{1000}{OT1TT}
+\setfont\reducedbf\bfshape{10}{900}{OT1}
+\setfont\reducedit\itshape{9}{1000}{OT1IT}
+\setfont\reducedsl\slshape{9}{1000}{OT1}
+\setfont\reducedsf\sfshape{9}{1000}{OT1}
+\setfont\reducedsc\scshape{10}{900}{OT1}
+\setfont\reducedttsl\ttslshape{10}{900}{OT1TT}
+\font\reducedi=cmmi9
+\font\reducedsy=cmsy9
+\def\reducedecsize{0900}
+
+\divide\parskip by 2  % reduce space between paragraphs
+\textleading = 12pt   % line spacing for 10pt CM
+\textfonts            % reset the current fonts
+\rm
+} % end of 10pt text font size definitions, \definetextfontsizex
+
+
+% We provide the user-level command
+%   @fonttextsize 10
+% (or 11) to redefine the text font size.  pt is assumed.
+%
+\def\xiword{11}
+\def\xword{10}
+\def\xwordpt{10pt}
+%
+\parseargdef\fonttextsize{%
+  \def\textsizearg{#1}%
+  %\wlog{doing @fonttextsize \textsizearg}%
+  %
+  % Set \globaldefs so that documents can use this inside @tex, since
+  % makeinfo 4.8 does not support it, but we need it nonetheless.
+  %
+ \begingroup \globaldefs=1
+  \ifx\textsizearg\xword \definetextfontsizex
+  \else \ifx\textsizearg\xiword \definetextfontsizexi
+  \else
+    \errhelp=\EMsimple
+    \errmessage{@fonttextsize only supports `10' or `11', not `\textsizearg'}
+  \fi\fi
+ \endgroup
+}
+
+
+% In order for the font changes to affect most math symbols and letters,
+% we have to define the \textfont of the standard families.  Since
+% texinfo doesn't allow for producing subscripts and superscripts except
+% in the main text, we don't bother to reset \scriptfont and
+% \scriptscriptfont (which would also require loading a lot more fonts).
+%
+\def\resetmathfonts{%
+  \textfont0=\tenrm \textfont1=\teni \textfont2=\tensy
+  \textfont\itfam=\tenit \textfont\slfam=\tensl \textfont\bffam=\tenbf
+  \textfont\ttfam=\tentt \textfont\sffam=\tensf
+}
+
+% The font-changing commands redefine the meanings of \tenSTYLE, instead
+% of just \STYLE.  We do this because \STYLE needs to also set the
+% current \fam for math mode.  Our \STYLE (e.g., \rm) commands hardwire
+% \tenSTYLE to set the current font.
+%
+% Each font-changing command also sets the names \lsize (one size lower)
+% and \lllsize (three sizes lower).  These relative commands are used in
+% the LaTeX logo and acronyms.
+%
+% This all needs generalizing, badly.
+%
+\def\textfonts{%
+  \let\tenrm=\textrm \let\tenit=\textit \let\tensl=\textsl
+  \let\tenbf=\textbf \let\tentt=\texttt \let\smallcaps=\textsc
+  \let\tensf=\textsf \let\teni=\texti \let\tensy=\textsy
+  \let\tenttsl=\textttsl
+  \def\curfontsize{text}%
+  \def\lsize{reduced}\def\lllsize{smaller}%
+  \resetmathfonts \setleading{\textleading}}
+\def\titlefonts{%
+  \let\tenrm=\titlerm \let\tenit=\titleit \let\tensl=\titlesl
+  \let\tenbf=\titlebf \let\tentt=\titlett \let\smallcaps=\titlesc
+  \let\tensf=\titlesf \let\teni=\titlei \let\tensy=\titlesy
+  \let\tenttsl=\titlettsl
+  \def\curfontsize{title}%
+  \def\lsize{chap}\def\lllsize{subsec}%
+  \resetmathfonts \setleading{27pt}}
+\def\titlefont#1{{\titlefonts\rmisbold #1}}
+\def\chapfonts{%
+  \let\tenrm=\chaprm \let\tenit=\chapit \let\tensl=\chapsl
+  \let\tenbf=\chapbf \let\tentt=\chaptt \let\smallcaps=\chapsc
+  \let\tensf=\chapsf \let\teni=\chapi \let\tensy=\chapsy
+  \let\tenttsl=\chapttsl
+  \def\curfontsize{chap}%
+  \def\lsize{sec}\def\lllsize{text}%
+  \resetmathfonts \setleading{19pt}}
+\def\secfonts{%
+  \let\tenrm=\secrm \let\tenit=\secit \let\tensl=\secsl
+  \let\tenbf=\secbf \let\tentt=\sectt \let\smallcaps=\secsc
+  \let\tensf=\secsf \let\teni=\seci \let\tensy=\secsy
+  \let\tenttsl=\secttsl
+  \def\curfontsize{sec}%
+  \def\lsize{subsec}\def\lllsize{reduced}%
+  \resetmathfonts \setleading{16pt}}
+\def\subsecfonts{%
+  \let\tenrm=\ssecrm \let\tenit=\ssecit \let\tensl=\ssecsl
+  \let\tenbf=\ssecbf \let\tentt=\ssectt \let\smallcaps=\ssecsc
+  \let\tensf=\ssecsf \let\teni=\sseci \let\tensy=\ssecsy
+  \let\tenttsl=\ssecttsl
+  \def\curfontsize{ssec}%
+  \def\lsize{text}\def\lllsize{small}%
+  \resetmathfonts \setleading{15pt}}
+\let\subsubsecfonts = \subsecfonts
+\def\reducedfonts{%
+  \let\tenrm=\reducedrm \let\tenit=\reducedit \let\tensl=\reducedsl
+  \let\tenbf=\reducedbf \let\tentt=\reducedtt \let\reducedcaps=\reducedsc
+  \let\tensf=\reducedsf \let\teni=\reducedi \let\tensy=\reducedsy
+  \let\tenttsl=\reducedttsl
+  \def\curfontsize{reduced}%
+  \def\lsize{small}\def\lllsize{smaller}%
+  \resetmathfonts \setleading{10.5pt}}
+\def\smallfonts{%
+  \let\tenrm=\smallrm \let\tenit=\smallit \let\tensl=\smallsl
+  \let\tenbf=\smallbf \let\tentt=\smalltt \let\smallcaps=\smallsc
+  \let\tensf=\smallsf \let\teni=\smalli \let\tensy=\smallsy
+  \let\tenttsl=\smallttsl
+  \def\curfontsize{small}%
+  \def\lsize{smaller}\def\lllsize{smaller}%
+  \resetmathfonts \setleading{10.5pt}}
+\def\smallerfonts{%
+  \let\tenrm=\smallerrm \let\tenit=\smallerit \let\tensl=\smallersl
+  \let\tenbf=\smallerbf \let\tentt=\smallertt \let\smallcaps=\smallersc
+  \let\tensf=\smallersf \let\teni=\smalleri \let\tensy=\smallersy
+  \let\tenttsl=\smallerttsl
+  \def\curfontsize{smaller}%
+  \def\lsize{smaller}\def\lllsize{smaller}%
+  \resetmathfonts \setleading{9.5pt}}
+
+% Fonts for short table of contents.
+\setfont\shortcontrm\rmshape{12}{1000}{OT1}
+\setfont\shortcontbf\bfshape{10}{\magstep1}{OT1}  % no cmb12
+\setfont\shortcontsl\slshape{12}{1000}{OT1}
+\setfont\shortconttt\ttshape{12}{1000}{OT1TT}
+
+% Define these just so they can be easily changed for other fonts.
+\def\angleleft{$\langle$}
+\def\angleright{$\rangle$}
+
+% Set the fonts to use with the @small... environments.
+\let\smallexamplefonts = \smallfonts
+
+% About \smallexamplefonts.  If we use \smallfonts (9pt), @smallexample
+% can fit this many characters:
+%   8.5x11=86   smallbook=72  a4=90  a5=69
+% If we use \scriptfonts (8pt), then we can fit this many characters:
+%   8.5x11=90+  smallbook=80  a4=90+  a5=77
+% For me, subjectively, the few extra characters that fit aren't worth
+% the additional smallness of 8pt.  So I'm making the default 9pt.
+%
+% By the way, for comparison, here's what fits with @example (10pt):
+%   8.5x11=71  smallbook=60  a4=75  a5=58
+% --karl, 24jan03.
+
+% Set up the default fonts, so we can use them for creating boxes.
+%
+\definetextfontsizexi
+
+
+\message{markup,}
+
+% Check if we are currently using a typewriter font.  Since all the
+% Computer Modern typewriter fonts have zero interword stretch (and
+% shrink), and it is reasonable to expect all typewriter fonts to have
+% this property, we can check that font parameter.
+%
+\def\ifmonospace{\ifdim\fontdimen3\font=0pt }
+
+% Markup style infrastructure.  \defmarkupstylesetup\INITMACRO will
+% define and register \INITMACRO to be called on markup style changes.
+% \INITMACRO can check \currentmarkupstyle for the innermost
+% style and the set of \ifmarkupSTYLE switches for all styles
+% currently in effect.
+\newif\ifmarkupvar
+\newif\ifmarkupsamp
+\newif\ifmarkupkey
+%\newif\ifmarkupfile % @file == @samp.
+%\newif\ifmarkupoption % @option == @samp.
+\newif\ifmarkupcode
+\newif\ifmarkupkbd
+%\newif\ifmarkupenv % @env == @code.
+%\newif\ifmarkupcommand % @command == @code.
+\newif\ifmarkuptex % @tex (and part of @math, for now).
+\newif\ifmarkupexample
+\newif\ifmarkupverb
+\newif\ifmarkupverbatim
+
+\let\currentmarkupstyle\empty
+
+\def\setupmarkupstyle#1{%
+  \csname markup#1true\endcsname
+  \def\currentmarkupstyle{#1}%
+  \markupstylesetup
+}
+
+\let\markupstylesetup\empty
+
+\def\defmarkupstylesetup#1{%
+  \expandafter\def\expandafter\markupstylesetup
+    \expandafter{\markupstylesetup #1}%
+  \def#1%
+}
+
+% Markup style setup for left and right quotes.
+\defmarkupstylesetup\markupsetuplq{%
+  \expandafter\let\expandafter \temp
+    \csname markupsetuplq\currentmarkupstyle\endcsname
+  \ifx\temp\relax \markupsetuplqdefault \else \temp \fi
+}
+
+\defmarkupstylesetup\markupsetuprq{%
+  \expandafter\let\expandafter \temp
+    \csname markupsetuprq\currentmarkupstyle\endcsname
+  \ifx\temp\relax \markupsetuprqdefault \else \temp \fi
+}
+
+{
+\catcode`\'=\active
+\catcode`\`=\active
+
+\gdef\markupsetuplqdefault{\let`\lq}
+\gdef\markupsetuprqdefault{\let'\rq}
+
+\gdef\markupsetcodequoteleft{\let`\codequoteleft}
+\gdef\markupsetcodequoteright{\let'\codequoteright}
+}
+
+\let\markupsetuplqcode \markupsetcodequoteleft
+\let\markupsetuprqcode \markupsetcodequoteright
+%
+\let\markupsetuplqexample \markupsetcodequoteleft
+\let\markupsetuprqexample \markupsetcodequoteright
+%
+\let\markupsetuplqkbd     \markupsetcodequoteleft
+\let\markupsetuprqkbd     \markupsetcodequoteright
+%
+\let\markupsetuplqsamp \markupsetcodequoteleft
+\let\markupsetuprqsamp \markupsetcodequoteright
+%
+\let\markupsetuplqverb \markupsetcodequoteleft
+\let\markupsetuprqverb \markupsetcodequoteright
+%
+\let\markupsetuplqverbatim \markupsetcodequoteleft
+\let\markupsetuprqverbatim \markupsetcodequoteright
+
+% Allow an option to not use regular directed right quote/apostrophe
+% (char 0x27), but instead the undirected quote from cmtt (char 0x0d).
+% The undirected quote is ugly, so don't make it the default, but it
+% works for pasting with more pdf viewers (at least evince), the
+% lilypond developers report.  xpdf does work with the regular 0x27.
+%
+\def\codequoteright{%
+  \expandafter\ifx\csname SETtxicodequoteundirected\endcsname\relax
+    \expandafter\ifx\csname SETcodequoteundirected\endcsname\relax
+      '%
+    \else \char'15 \fi
+  \else \char'15 \fi
+}
+%
+% and a similar option for the left quote char vs. a grave accent.
+% Modern fonts display ASCII 0x60 as a grave accent, so some people like
+% the code environments to do likewise.
+%
+\def\codequoteleft{%
+  \expandafter\ifx\csname SETtxicodequotebacktick\endcsname\relax
+    \expandafter\ifx\csname SETcodequotebacktick\endcsname\relax
+      % [Knuth] pp. 380,381,391
+      % \relax disables Spanish ligatures ?` and !` of \tt font.
+      \relax`%
+    \else \char'22 \fi
+  \else \char'22 \fi
+}
+
+% Commands to set the quote options.
+% 
+\parseargdef\codequoteundirected{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxicodequoteundirected\endcsname
+      = t%
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxicodequoteundirected\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @codequoteundirected value `\temp', must be on|off}%
+  \fi\fi
+}
+%
+\parseargdef\codequotebacktick{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxicodequotebacktick\endcsname
+      = t%
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxicodequotebacktick\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @codequotebacktick value `\temp', must be on|off}%
+  \fi\fi
+}
+
+% [Knuth] pp. 380,381,391, disable Spanish ligatures ?` and !` of \tt font.
+\def\noligaturesquoteleft{\relax\lq}
+
+% Count depth in font-changes, for error checks
+\newcount\fontdepth \fontdepth=0
+
+% Font commands.
+
+% #1 is the font command (\sl or \it), #2 is the text to slant.
+% If we are in a monospaced environment, however, 1) always use \ttsl,
+% and 2) do not add an italic correction.
+\def\dosmartslant#1#2{%
+  \ifusingtt 
+    {{\ttsl #2}\let\next=\relax}%
+    {\def\next{{#1#2}\futurelet\next\smartitaliccorrection}}%
+  \next
+}
+\def\smartslanted{\dosmartslant\sl}
+\def\smartitalic{\dosmartslant\it}
+
+% Output an italic correction unless \next (presumed to be the following
+% character) is such as not to need one.
+\def\smartitaliccorrection{%
+  \ifx\next,%
+  \else\ifx\next-%
+  \else\ifx\next.%
+  \else\ptexslash
+  \fi\fi\fi
+  \aftersmartic
+}
+
+% Unconditional use \ttsl, and no ic.  @var is set to this for defuns.
+\def\ttslanted#1{{\ttsl #1}}
+
+% @cite is like \smartslanted except unconditionally use \sl.  We never want
+% ttsl for book titles, do we?
+\def\cite#1{{\sl #1}\futurelet\next\smartitaliccorrection}
+
+\def\aftersmartic{}
+\def\var#1{%
+  \let\saveaftersmartic = \aftersmartic
+  \def\aftersmartic{\null\let\aftersmartic=\saveaftersmartic}%
+  \smartslanted{#1}%
+}
+
+\let\i=\smartitalic
+\let\slanted=\smartslanted
+\let\dfn=\smartslanted
+\let\emph=\smartitalic
+
+% Explicit font changes: @r, @sc, undocumented @ii.
+\def\r#1{{\rm #1}}              % roman font
+\def\sc#1{{\smallcaps#1}}       % smallcaps font
+\def\ii#1{{\it #1}}             % italic font
+
+% @b, explicit bold.  Also @strong.
+\def\b#1{{\bf #1}}
+\let\strong=\b
+
+% @sansserif, explicit sans.
+\def\sansserif#1{{\sf #1}}
+
+% We can't just use \exhyphenpenalty, because that only has effect at
+% the end of a paragraph.  Restore normal hyphenation at the end of the
+% group within which \nohyphenation is presumably called.
+%
+\def\nohyphenation{\hyphenchar\font = -1  \aftergroup\restorehyphenation}
+\def\restorehyphenation{\hyphenchar\font = `- }
+
+% Set sfcode to normal for the chars that usually have another value.
+% Can't use plain's \frenchspacing because it uses the `\x notation, and
+% sometimes \x has an active definition that messes things up.
+%
+\catcode`@=11
+  \def\plainfrenchspacing{%
+    \sfcode\dotChar  =\@m \sfcode\questChar=\@m \sfcode\exclamChar=\@m
+    \sfcode\colonChar=\@m \sfcode\semiChar =\@m \sfcode\commaChar =\@m
+    \def\endofsentencespacefactor{1000}% for @. and friends
+  }
+  \def\plainnonfrenchspacing{%
+    \sfcode`\.3000\sfcode`\?3000\sfcode`\!3000
+    \sfcode`\:2000\sfcode`\;1500\sfcode`\,1250
+    \def\endofsentencespacefactor{3000}% for @. and friends
+  }
+\catcode`@=\other
+\def\endofsentencespacefactor{3000}% default
+
+% @t, explicit typewriter.
+\def\t#1{%
+  {\tt \rawbackslash \plainfrenchspacing #1}%
+  \null
+}
+
+% @samp.
+\def\samp#1{{\setupmarkupstyle{samp}\lq\tclose{#1}\rq\null}}
+
+% @indicateurl is \samp, that is, with quotes.
+\let\indicateurl=\samp
+
+% @code (and similar) prints in typewriter, but with spaces the same
+% size as normal in the surrounding text, without hyphenation, etc.
+% This is a subroutine for that.
+\def\tclose#1{%
+  {%
+    % Change normal interword space to be same as for the current font.
+    \spaceskip = \fontdimen2\font
+    %
+    % Switch to typewriter.
+    \tt
+    %
+    % But `\ ' produces the large typewriter interword space.
+    \def\ {{\spaceskip = 0pt{} }}%
+    %
+    % Turn off hyphenation.
+    \nohyphenation
+    %
+    \rawbackslash
+    \plainfrenchspacing
+    #1%
+  }%
+  \null % reset spacefactor to 1000
+}
+
+% We *must* turn on hyphenation at `-' and `_' in @code.
+% Otherwise, it is too hard to avoid overfull hboxes
+% in the Emacs manual, the Library manual, etc.
+%
+% Unfortunately, TeX uses one parameter (\hyphenchar) to control
+% both hyphenation at - and hyphenation within words.
+% We must therefore turn them both off (\tclose does that)
+% and arrange explicitly to hyphenate at a dash.
+%  -- rms.
+{
+  \catcode`\-=\active \catcode`\_=\active
+  \catcode`\'=\active \catcode`\`=\active
+  \global\let'=\rq \global\let`=\lq  % default definitions
+  %
+  \global\def\code{\begingroup
+    \setupmarkupstyle{code}%
+    % The following should really be moved into \setupmarkupstyle handlers.
+    \catcode\dashChar=\active  \catcode\underChar=\active
+    \ifallowcodebreaks
+     \let-\codedash
+     \let_\codeunder
+    \else
+     \let-\normaldash
+     \let_\realunder
+    \fi
+    \codex
+  }
+}
+
+\def\codex #1{\tclose{#1}\endgroup}
+
+\def\normaldash{-}
+\def\codedash{-\discretionary{}{}{}}
+\def\codeunder{%
+  % this is all so @math{@code{var_name}+1} can work.  In math mode, _
+  % is "active" (mathcode"8000) and \normalunderscore (or \char95, etc.)
+  % will therefore expand the active definition of _, which is us
+  % (inside @code that is), therefore an endless loop.
+  \ifusingtt{\ifmmode
+               \mathchar"075F % class 0=ordinary, family 7=ttfam, pos 0x5F=_.
+             \else\normalunderscore \fi
+             \discretionary{}{}{}}%
+            {\_}%
+}
+
+% An additional complication: the above will allow breaks after, e.g.,
+% each of the four underscores in __typeof__.  This is bad.
+% @allowcodebreaks provides a document-level way to turn breaking at -
+% and _ on and off.
+%
+\newif\ifallowcodebreaks  \allowcodebreakstrue
+
+\def\keywordtrue{true}
+\def\keywordfalse{false}
+
+\parseargdef\allowcodebreaks{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\keywordtrue
+    \allowcodebreakstrue
+  \else\ifx\txiarg\keywordfalse
+    \allowcodebreaksfalse
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @allowcodebreaks option `\txiarg', must be true|false}%
+  \fi\fi
+}
+
+% For @command, @env, @file, @option quotes seem unnecessary,
+% so use \code rather than \samp.
+\let\command=\code
+\let\env=\code
+\let\file=\code
+\let\option=\code
+
+% @uref (abbreviation for `urlref') takes an optional (comma-separated)
+% second argument specifying the text to display and an optional third
+% arg as text to display instead of (rather than in addition to) the url
+% itself.  First (mandatory) arg is the url.
+% (This \urefnobreak definition isn't used now, leaving it for a while
+% for comparison.)
+\def\urefnobreak#1{\dourefnobreak #1,,,\finish}
+\def\dourefnobreak#1,#2,#3,#4\finish{\begingroup
+  \unsepspaces
+  \pdfurl{#1}%
+  \setbox0 = \hbox{\ignorespaces #3}%
+  \ifdim\wd0 > 0pt
+    \unhbox0 % third arg given, show only that
+  \else
+    \setbox0 = \hbox{\ignorespaces #2}%
+    \ifdim\wd0 > 0pt
+      \ifpdf
+        \unhbox0             % PDF: 2nd arg given, show only it
+      \else
+        \unhbox0\ (\code{#1})% DVI: 2nd arg given, show both it and url
+      \fi
+    \else
+      \code{#1}% only url given, so show it
+    \fi
+  \fi
+  \endlink
+\endgroup}
+
+% This \urefbreak definition is the active one.
+\def\urefbreak{\begingroup \urefcatcodes \dourefbreak}
+\let\uref=\urefbreak
+\def\dourefbreak#1{\urefbreakfinish #1,,,\finish}
+\def\urefbreakfinish#1,#2,#3,#4\finish{% doesn't work in @example
+  \unsepspaces
+  \pdfurl{#1}%
+  \setbox0 = \hbox{\ignorespaces #3}%
+  \ifdim\wd0 > 0pt
+    \unhbox0 % third arg given, show only that
+  \else
+    \setbox0 = \hbox{\ignorespaces #2}%
+    \ifdim\wd0 > 0pt
+      \ifpdf
+        \unhbox0             % PDF: 2nd arg given, show only it
+      \else
+        \unhbox0\ (\urefcode{#1})% DVI: 2nd arg given, show both it and url
+      \fi
+    \else
+      \urefcode{#1}% only url given, so show it
+    \fi
+  \fi
+  \endlink
+\endgroup}
+
+% Allow line breaks around only a few characters (only).
+\def\urefcatcodes{%
+  \catcode\ampChar=\active   \catcode\dotChar=\active
+  \catcode\hashChar=\active  \catcode\questChar=\active
+  \catcode\slashChar=\active
+}
+{
+  \urefcatcodes
+  %
+  \global\def\urefcode{\begingroup
+    \setupmarkupstyle{code}%
+    \urefcatcodes
+    \let&\urefcodeamp
+    \let.\urefcodedot
+    \let#\urefcodehash
+    \let?\urefcodequest
+    \let/\urefcodeslash
+    \codex
+  }
+  %
+  % By default, they are just regular characters.
+  \global\def&{\normalamp}
+  \global\def.{\normaldot}
+  \global\def#{\normalhash}
+  \global\def?{\normalquest}
+  \global\def/{\normalslash}
+}
+
+% we put a little stretch before and after the breakable chars, to help
+% line breaking of long url's.  The unequal skips make look better in
+% cmtt at least, especially for dots.
+\def\urefprestretch{\urefprebreak \hskip0pt plus.13em }
+\def\urefpoststretch{\urefpostbreak \hskip0pt plus.1em }
+%
+\def\urefcodeamp{\urefprestretch \&\urefpoststretch}
+\def\urefcodedot{\urefprestretch .\urefpoststretch}
+\def\urefcodehash{\urefprestretch \#\urefpoststretch}
+\def\urefcodequest{\urefprestretch ?\urefpoststretch}
+\def\urefcodeslash{\futurelet\next\urefcodeslashfinish}
+{
+  \catcode`\/=\active
+  \global\def\urefcodeslashfinish{%
+    \urefprestretch \slashChar
+    % Allow line break only after the final / in a sequence of
+    % slashes, to avoid line break between the slashes in http://.
+    \ifx\next/\else \urefpoststretch \fi
+  }
+}
+
+% One more complication: by default we'll break after the special
+% characters, but some people like to break before the special chars, so
+% allow that.  Also allow no breaking at all, for manual control.
+% 
+\parseargdef\urefbreakstyle{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\wordnone
+    \def\urefprebreak{\nobreak}\def\urefpostbreak{\nobreak}
+  \else\ifx\txiarg\wordbefore
+    \def\urefprebreak{\allowbreak}\def\urefpostbreak{\nobreak}
+  \else\ifx\txiarg\wordafter
+    \def\urefprebreak{\nobreak}\def\urefpostbreak{\allowbreak}
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @urefbreakstyle setting `\txiarg'}%
+  \fi\fi\fi
+}
+\def\wordafter{after}
+\def\wordbefore{before}
+\def\wordnone{none}
+
+\urefbreakstyle after
+
+% @url synonym for @uref, since that's how everyone uses it.
+%
+\let\url=\uref
+
+% rms does not like angle brackets --karl, 17may97.
+% So now @email is just like @uref, unless we are pdf.
+%
+%\def\email#1{\angleleft{\tt #1}\angleright}
+\ifpdf
+  \def\email#1{\doemail#1,,\finish}
+  \def\doemail#1,#2,#3\finish{\begingroup
+    \unsepspaces
+    \pdfurl{mailto:#1}%
+    \setbox0 = \hbox{\ignorespaces #2}%
+    \ifdim\wd0>0pt\unhbox0\else\code{#1}\fi
+    \endlink
+  \endgroup}
+\else
+  \let\email=\uref
+\fi
+
+% @kbdinputstyle -- arg is `distinct' (@kbd uses slanted tty font always),
+%   `example' (@kbd uses ttsl only inside of @example and friends),
+%   or `code' (@kbd uses normal tty font always).
+\parseargdef\kbdinputstyle{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\worddistinct
+    \gdef\kbdexamplefont{\ttsl}\gdef\kbdfont{\ttsl}%
+  \else\ifx\txiarg\wordexample
+    \gdef\kbdexamplefont{\ttsl}\gdef\kbdfont{\tt}%
+  \else\ifx\txiarg\wordcode
+    \gdef\kbdexamplefont{\tt}\gdef\kbdfont{\tt}%
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @kbdinputstyle setting `\txiarg'}%
+  \fi\fi\fi
+}
+\def\worddistinct{distinct}
+\def\wordexample{example}
+\def\wordcode{code}
+
+% Default is `distinct'.
+\kbdinputstyle distinct
+
+% @kbd is like @code, except that if the argument is just one @key command,
+% then @kbd has no effect.
+\def\kbd#1{{\def\look{#1}\expandafter\kbdsub\look??\par}}
+
+\def\xkey{\key}
+\def\kbdsub#1#2#3\par{%
+  \def\one{#1}\def\three{#3}\def\threex{??}%
+  \ifx\one\xkey\ifx\threex\three \key{#2}%
+  \else{\tclose{\kbdfont\setupmarkupstyle{kbd}\look}}\fi
+  \else{\tclose{\kbdfont\setupmarkupstyle{kbd}\look}}\fi
+}
+
+% definition of @key that produces a lozenge.  Doesn't adjust to text size.
+%\setfont\keyrm\rmshape{8}{1000}{OT1}
+%\font\keysy=cmsy9
+%\def\key#1{{\keyrm\textfont2=\keysy \leavevmode\hbox{%
+%  \raise0.4pt\hbox{\angleleft}\kern-.08em\vtop{%
+%    \vbox{\hrule\kern-0.4pt
+%     \hbox{\raise0.4pt\hbox{\vphantom{\angleleft}}#1}}%
+%    \kern-0.4pt\hrule}%
+%  \kern-.06em\raise0.4pt\hbox{\angleright}}}}
+
+% definition of @key with no lozenge.  If the current font is already
+% monospace, don't change it; that way, we respect @kbdinputstyle.  But
+% if it isn't monospace, then use \tt.
+%
+\def\key#1{{\setupmarkupstyle{key}%
+  \nohyphenation
+  \ifmonospace\else\tt\fi
+  #1}\null}
+
+% @clicksequence{File @click{} Open ...}
+\def\clicksequence#1{\begingroup #1\endgroup}
+
+% @clickstyle @arrow   (by default)
+\parseargdef\clickstyle{\def\click{#1}}
+\def\click{\arrow}
+
+% Typeset a dimension, e.g., `in' or `pt'.  The only reason for the
+% argument is to make the input look right: @dmn{pt} instead of @dmn{}pt.
+%
+\def\dmn#1{\thinspace #1}
+
+% @l was never documented to mean ``switch to the Lisp font'',
+% and it is not used as such in any manual I can find.  We need it for
+% Polish suppressed-l.  --karl, 22sep96.
+%\def\l#1{{\li #1}\null}
+
+% @acronym for "FBI", "NATO", and the like.
+% We print this one point size smaller, since it's intended for
+% all-uppercase.
+%
+\def\acronym#1{\doacronym #1,,\finish}
+\def\doacronym#1,#2,#3\finish{%
+  {\selectfonts\lsize #1}%
+  \def\temp{#2}%
+  \ifx\temp\empty \else
+    \space ({\unsepspaces \ignorespaces \temp \unskip})%
+  \fi
+  \null % reset \spacefactor=1000
+}
+
+% @abbr for "Comput. J." and the like.
+% No font change, but don't do end-of-sentence spacing.
+%
+\def\abbr#1{\doabbr #1,,\finish}
+\def\doabbr#1,#2,#3\finish{%
+  {\plainfrenchspacing #1}%
+  \def\temp{#2}%
+  \ifx\temp\empty \else
+    \space ({\unsepspaces \ignorespaces \temp \unskip})%
+  \fi
+  \null % reset \spacefactor=1000
+}
+
+% @asis just yields its argument.  Used with @table, for example.
+%
+\def\asis#1{#1}
+
+% @math outputs its argument in math mode.
+%
+% One complication: _ usually means subscripts, but it could also mean
+% an actual _ character, as in @math{@var{some_variable} + 1}.  So make
+% _ active, and distinguish by seeing if the current family is \slfam,
+% which is what @var uses.
+{
+  \catcode`\_ = \active
+  \gdef\mathunderscore{%
+    \catcode`\_=\active
+    \def_{\ifnum\fam=\slfam \_\else\sb\fi}%
+  }
+}
+% Another complication: we want \\ (and @\) to output a math (or tt) \.
+% FYI, plain.tex uses \\ as a temporary control sequence (for no
+% particular reason), but this is not advertised and we don't care.
+%
+% The \mathchar is class=0=ordinary, family=7=ttfam, position=5C=\.
+\def\mathbackslash{\ifnum\fam=\ttfam \mathchar"075C \else\backslash \fi}
+%
+\def\math{%
+  \tex
+  \mathunderscore
+  \let\\ = \mathbackslash
+  \mathactive
+  % make the texinfo accent commands work in math mode
+  \let\"=\ddot
+  \let\'=\acute
+  \let\==\bar
+  \let\^=\hat
+  \let\`=\grave
+  \let\u=\breve
+  \let\v=\check
+  \let\~=\tilde
+  \let\dotaccent=\dot
+  $\finishmath
+}
+\def\finishmath#1{#1$\endgroup}  % Close the group opened by \tex.
+
+% Some active characters (such as <) are spaced differently in math.
+% We have to reset their definitions in case the @math was an argument
+% to a command which sets the catcodes (such as @item or @section).
+%
+{
+  \catcode`^ = \active
+  \catcode`< = \active
+  \catcode`> = \active
+  \catcode`+ = \active
+  \catcode`' = \active
+  \gdef\mathactive{%
+    \let^ = \ptexhat
+    \let< = \ptexless
+    \let> = \ptexgtr
+    \let+ = \ptexplus
+    \let' = \ptexquoteright
+  }
+}
+
+% ctrl is no longer a Texinfo command, but leave this definition for fun.
+\def\ctrl #1{{\tt \rawbackslash \hat}#1}
+
+% @inlinefmt{FMTNAME,PROCESSED-TEXT} and @inlineraw{FMTNAME,RAW-TEXT}.
+% Ignore unless FMTNAME == tex; then it is like @iftex and @tex,
+% except specified as a normal braced arg, so no newlines to worry about.
+% 
+\def\outfmtnametex{tex}
+%
+\long\def\inlinefmt#1{\doinlinefmt #1,\finish}
+\long\def\doinlinefmt#1,#2,\finish{%
+  \def\inlinefmtname{#1}%
+  \ifx\inlinefmtname\outfmtnametex \ignorespaces #2\fi
+}
+% For raw, must switch into @tex before parsing the argument, to avoid
+% setting catcodes prematurely.  Doing it this way means that, for
+% example, @inlineraw{html, foo{bar} gets a parse error instead of being
+% ignored.  But this isn't important because if people want a literal
+% *right* brace they would have to use a command anyway, so they may as
+% well use a command to get a left brace too.  We could re-use the
+% delimiter character idea from \verb, but it seems like overkill.
+% 
+\long\def\inlineraw{\tex \doinlineraw}
+\long\def\doinlineraw#1{\doinlinerawtwo #1,\finish}
+\def\doinlinerawtwo#1,#2,\finish{%
+  \def\inlinerawname{#1}%
+  \ifx\inlinerawname\outfmtnametex \ignorespaces #2\fi
+  \endgroup % close group opened by \tex.
+}
+
+
+\message{glyphs,}
+% and logos.
+
+% @@ prints an @, as does @atchar{}.
+\def\@{\char64 }
+\let\atchar=\@
+
+% @{ @} @lbracechar{} @rbracechar{} all generate brace characters.
+% Unless we're in typewriter, use \ecfont because the CM text fonts do
+% not have braces, and we don't want to switch into math.
+\def\mylbrace{{\ifmonospace\else\ecfont\fi \char123}}
+\def\myrbrace{{\ifmonospace\else\ecfont\fi \char125}}
+\let\{=\mylbrace \let\lbracechar=\{
+\let\}=\myrbrace \let\rbracechar=\}
+\begingroup
+  % Definitions to produce \{ and \} commands for indices,
+  % and @{ and @} for the aux/toc files.
+  \catcode`\{ = \other \catcode`\} = \other
+  \catcode`\[ = 1 \catcode`\] = 2
+  \catcode`\! = 0 \catcode`\\ = \other
+  !gdef!lbracecmd[\{]%
+  !gdef!rbracecmd[\}]%
+  !gdef!lbraceatcmd[@{]%
+  !gdef!rbraceatcmd[@}]%
+!endgroup
+
+% @comma{} to avoid , parsing problems.
+\let\comma = ,
+
+% Accents: @, @dotaccent @ringaccent @ubaraccent @udotaccent
+% Others are defined by plain TeX: @` @' @" @^ @~ @= @u @v @H.
+\let\, = \ptexc
+\let\dotaccent = \ptexdot
+\def\ringaccent#1{{\accent23 #1}}
+\let\tieaccent = \ptext
+\let\ubaraccent = \ptexb
+\let\udotaccent = \d
+
+% Other special characters: @questiondown @exclamdown @ordf @ordm
+% Plain TeX defines: @AA @AE @O @OE @L (plus lowercase versions) @ss.
+\def\questiondown{?`}
+\def\exclamdown{!`}
+\def\ordf{\leavevmode\raise1ex\hbox{\selectfonts\lllsize \underbar{a}}}
+\def\ordm{\leavevmode\raise1ex\hbox{\selectfonts\lllsize \underbar{o}}}
+
+% Dotless i and dotless j, used for accents.
+\def\imacro{i}
+\def\jmacro{j}
+\def\dotless#1{%
+  \def\temp{#1}%
+  \ifx\temp\imacro \ifmmode\imath \else\ptexi \fi
+  \else\ifx\temp\jmacro \ifmmode\jmath \else\j \fi
+  \else \errmessage{@dotless can be used only with i or j}%
+  \fi\fi
+}
+
+% The \TeX{} logo, as in plain, but resetting the spacing so that a
+% period following counts as ending a sentence.  (Idea found in latex.)
+%
+\edef\TeX{\TeX \spacefactor=1000 }
+
+% @LaTeX{} logo.  Not quite the same results as the definition in
+% latex.ltx, since we use a different font for the raised A; it's most
+% convenient for us to use an explicitly smaller font, rather than using
+% the \scriptstyle font (since we don't reset \scriptstyle and
+% \scriptscriptstyle).
+%
+\def\LaTeX{%
+  L\kern-.36em
+  {\setbox0=\hbox{T}%
+   \vbox to \ht0{\hbox{%
+     \ifx\textnominalsize\xwordpt
+       % for 10pt running text, \lllsize (8pt) is too small for the A in LaTeX.
+       % Revert to plain's \scriptsize, which is 7pt.
+       \count255=\the\fam $\fam\count255 \scriptstyle A$%
+     \else
+       % For 11pt, we can use our lllsize.
+       \selectfonts\lllsize A%
+     \fi
+     }%
+     \vss
+  }}%
+  \kern-.15em
+  \TeX
+}
+
+% Some math mode symbols.
+\def\bullet{$\ptexbullet$}
+\def\geq{\ifmmode \ge\else $\ge$\fi}
+\def\leq{\ifmmode \le\else $\le$\fi}
+\def\minus{\ifmmode -\else $-$\fi}
+
+% @dots{} outputs an ellipsis using the current font.
+% We do .5em per period so that it has the same spacing in the cm
+% typewriter fonts as three actual period characters; on the other hand,
+% in other typewriter fonts three periods are wider than 1.5em.  So do
+% whichever is larger.
+%
+\def\dots{%
+  \leavevmode
+  \setbox0=\hbox{...}% get width of three periods
+  \ifdim\wd0 > 1.5em
+    \dimen0 = \wd0
+  \else
+    \dimen0 = 1.5em
+  \fi
+  \hbox to \dimen0{%
+    \hskip 0pt plus.25fil
+    .\hskip 0pt plus1fil
+    .\hskip 0pt plus1fil
+    .\hskip 0pt plus.5fil
+  }%
+}
+
+% @enddots{} is an end-of-sentence ellipsis.
+%
+\def\enddots{%
+  \dots
+  \spacefactor=\endofsentencespacefactor
+}
+
+% @point{}, @result{}, @expansion{}, @print{}, @equiv{}.
+%
+% Since these characters are used in examples, they should be an even number of
+% \tt widths. Each \tt character is 1en, so two makes it 1em.
+%
+\def\point{$\star$}
+\def\arrow{\leavevmode\raise.05ex\hbox to 1em{\hfil$\rightarrow$\hfil}}
+\def\result{\leavevmode\raise.05ex\hbox to 1em{\hfil$\Rightarrow$\hfil}}
+\def\expansion{\leavevmode\hbox to 1em{\hfil$\mapsto$\hfil}}
+\def\print{\leavevmode\lower.1ex\hbox to 1em{\hfil$\dashv$\hfil}}
+\def\equiv{\leavevmode\hbox to 1em{\hfil$\ptexequiv$\hfil}}
+
+% The @error{} command.
+% Adapted from the TeXbook's \boxit.
+%
+\newbox\errorbox
+%
+{\tentt \global\dimen0 = 3em}% Width of the box.
+\dimen2 = .55pt % Thickness of rules
+% The text. (`r' is open on the right, `e' somewhat less so on the left.)
+\setbox0 = \hbox{\kern-.75pt \reducedsf \putworderror\kern-1.5pt}
+%
+\setbox\errorbox=\hbox to \dimen0{\hfil
+   \hsize = \dimen0 \advance\hsize by -5.8pt % Space to left+right.
+   \advance\hsize by -2\dimen2 % Rules.
+   \vbox{%
+      \hrule height\dimen2
+      \hbox{\vrule width\dimen2 \kern3pt          % Space to left of text.
+         \vtop{\kern2.4pt \box0 \kern2.4pt}% Space above/below.
+         \kern3pt\vrule width\dimen2}% Space to right.
+      \hrule height\dimen2}
+    \hfil}
+%
+\def\error{\leavevmode\lower.7ex\copy\errorbox}
+
+% @pounds{} is a sterling sign, which Knuth put in the CM italic font.
+%
+\def\pounds{{\it\$}}
+
+% @euro{} comes from a separate font, depending on the current style.
+% We use the free feym* fonts from the eurosym package by Henrik
+% Theiling, which support regular, slanted, bold and bold slanted (and
+% "outlined" (blackboard board, sort of) versions, which we don't need).
+% It is available from http://www.ctan.org/tex-archive/fonts/eurosym.
+%
+% Although only regular is the truly official Euro symbol, we ignore
+% that.  The Euro is designed to be slightly taller than the regular
+% font height.
+%
+% feymr - regular
+% feymo - slanted
+% feybr - bold
+% feybo - bold slanted
+%
+% There is no good (free) typewriter version, to my knowledge.
+% A feymr10 euro is ~7.3pt wide, while a normal cmtt10 char is ~5.25pt wide.
+% Hmm.
+%
+% Also doesn't work in math.  Do we need to do math with euro symbols?
+% Hope not.
+%
+%
+\def\euro{{\eurofont e}}
+\def\eurofont{%
+  % We set the font at each command, rather than predefining it in
+  % \textfonts and the other font-switching commands, so that
+  % installations which never need the symbol don't have to have the
+  % font installed.
+  %
+  % There is only one designed size (nominal 10pt), so we always scale
+  % that to the current nominal size.
+  %
+  % By the way, simply using "at 1em" works for cmr10 and the like, but
+  % does not work for cmbx10 and other extended/shrunken fonts.
+  %
+  \def\eurosize{\csname\curfontsize nominalsize\endcsname}%
+  %
+  \ifx\curfontstyle\bfstylename
+    % bold:
+    \font\thiseurofont = \ifusingit{feybo10}{feybr10} at \eurosize
+  \else
+    % regular:
+    \font\thiseurofont = \ifusingit{feymo10}{feymr10} at \eurosize
+  \fi
+  \thiseurofont
+}
+
+% Glyphs from the EC fonts.  We don't use \let for the aliases, because
+% sometimes we redefine the original macro, and the alias should reflect
+% the redefinition.
+%
+% Use LaTeX names for the Icelandic letters.
+\def\DH{{\ecfont \char"D0}} % Eth
+\def\dh{{\ecfont \char"F0}} % eth
+\def\TH{{\ecfont \char"DE}} % Thorn
+\def\th{{\ecfont \char"FE}} % thorn
+%
+\def\guillemetleft{{\ecfont \char"13}}
+\def\guillemotleft{\guillemetleft}
+\def\guillemetright{{\ecfont \char"14}}
+\def\guillemotright{\guillemetright}
+\def\guilsinglleft{{\ecfont \char"0E}}
+\def\guilsinglright{{\ecfont \char"0F}}
+\def\quotedblbase{{\ecfont \char"12}}
+\def\quotesinglbase{{\ecfont \char"0D}}
+%
+% This positioning is not perfect (see the ogonek LaTeX package), but
+% we have the precomposed glyphs for the most common cases.  We put the
+% tests to use those glyphs in the single \ogonek macro so we have fewer
+% dummy definitions to worry about for index entries, etc.
+%
+% ogonek is also used with other letters in Lithuanian (IOU), but using
+% the precomposed glyphs for those is not so easy since they aren't in
+% the same EC font.
+\def\ogonek#1{{%
+  \def\temp{#1}%
+  \ifx\temp\macrocharA\Aogonek
+  \else\ifx\temp\macrochara\aogonek
+  \else\ifx\temp\macrocharE\Eogonek
+  \else\ifx\temp\macrochare\eogonek
+  \else
+    \ecfont \setbox0=\hbox{#1}%
+    \ifdim\ht0=1ex\accent"0C #1%
+    \else\ooalign{\unhbox0\crcr\hidewidth\char"0C \hidewidth}%
+    \fi
+  \fi\fi\fi\fi
+  }%
+}
+\def\Aogonek{{\ecfont \char"81}}\def\macrocharA{A}
+\def\aogonek{{\ecfont \char"A1}}\def\macrochara{a}
+\def\Eogonek{{\ecfont \char"86}}\def\macrocharE{E}
+\def\eogonek{{\ecfont \char"A6}}\def\macrochare{e}
+%
+% Use the ec* fonts (cm-super in outline format) for non-CM glyphs.
+\def\ecfont{%
+  % We can't distinguish serif/sans and italic/slanted, but this
+  % is used for crude hacks anyway (like adding French and German
+  % quotes to documents typeset with CM, where we lose kerning), so
+  % hopefully nobody will notice/care.
+  \edef\ecsize{\csname\curfontsize ecsize\endcsname}%
+  \edef\nominalsize{\csname\curfontsize nominalsize\endcsname}%
+  \ifmonospace
+    % typewriter:
+    \font\thisecfont = ectt\ecsize \space at \nominalsize
+  \else
+    \ifx\curfontstyle\bfstylename
+      % bold:
+      \font\thisecfont = ecb\ifusingit{i}{x}\ecsize \space at \nominalsize
+    \else
+      % regular:
+      \font\thisecfont = ec\ifusingit{ti}{rm}\ecsize \space at \nominalsize
+    \fi
+  \fi
+  \thisecfont
+}
+
+% @registeredsymbol - R in a circle.  The font for the R should really
+% be smaller yet, but lllsize is the best we can do for now.
+% Adapted from the plain.tex definition of \copyright.
+%
+\def\registeredsymbol{%
+  $^{{\ooalign{\hfil\raise.07ex\hbox{\selectfonts\lllsize R}%
+               \hfil\crcr\Orb}}%
+    }$%
+}
+
+% @textdegree - the normal degrees sign.
+%
+\def\textdegree{$^\circ$}
+
+% Laurent Siebenmann reports \Orb undefined with:
+%  Textures 1.7.7 (preloaded format=plain 93.10.14)  (68K)  16 APR 2004 02:38
+% so we'll define it if necessary.
+%
+\ifx\Orb\thisisundefined
+\def\Orb{\mathhexbox20D}
+\fi
+
+% Quotes.
+\chardef\quotedblleft="5C
+\chardef\quotedblright=`\"
+\chardef\quoteleft=`\`
+\chardef\quoteright=`\'
+
+
+\message{page headings,}
+
+\newskip\titlepagetopglue \titlepagetopglue = 1.5in
+\newskip\titlepagebottomglue \titlepagebottomglue = 2pc
+
+% First the title page.  Must do @settitle before @titlepage.
+\newif\ifseenauthor
+\newif\iffinishedtitlepage
+
+% Do an implicit @contents or @shortcontents after @end titlepage if the
+% user says @setcontentsaftertitlepage or @setshortcontentsaftertitlepage.
+%
+\newif\ifsetcontentsaftertitlepage
+ \let\setcontentsaftertitlepage = \setcontentsaftertitlepagetrue
+\newif\ifsetshortcontentsaftertitlepage
+ \let\setshortcontentsaftertitlepage = \setshortcontentsaftertitlepagetrue
+
+\parseargdef\shorttitlepage{%
+  \begingroup \hbox{}\vskip 1.5in \chaprm \centerline{#1}%
+  \endgroup\page\hbox{}\page}
+
+\envdef\titlepage{%
+  % Open one extra group, as we want to close it in the middle of \Etitlepage.
+  \begingroup
+    \parindent=0pt \textfonts
+    % Leave some space at the very top of the page.
+    \vglue\titlepagetopglue
+    % No rule at page bottom unless we print one at the top with @title.
+    \finishedtitlepagetrue
+    %
+    % Most title ``pages'' are actually two pages long, with space
+    % at the top of the second.  We don't want the ragged left on the second.
+    \let\oldpage = \page
+    \def\page{%
+      \iffinishedtitlepage\else
+	 \finishtitlepage
+      \fi
+      \let\page = \oldpage
+      \page
+      \null
+    }%
+}
+
+\def\Etitlepage{%
+    \iffinishedtitlepage\else
+	\finishtitlepage
+    \fi
+    % It is important to do the page break before ending the group,
+    % because the headline and footline are only empty inside the group.
+    % If we use the new definition of \page, we always get a blank page
+    % after the title page, which we certainly don't want.
+    \oldpage
+  \endgroup
+  %
+  % Need this before the \...aftertitlepage checks so that if they are
+  % in effect the toc pages will come out with page numbers.
+  \HEADINGSon
+  %
+  % If they want short, they certainly want long too.
+  \ifsetshortcontentsaftertitlepage
+    \shortcontents
+    \contents
+    \global\let\shortcontents = \relax
+    \global\let\contents = \relax
+  \fi
+  %
+  \ifsetcontentsaftertitlepage
+    \contents
+    \global\let\contents = \relax
+    \global\let\shortcontents = \relax
+  \fi
+}
+
+\def\finishtitlepage{%
+  \vskip4pt \hrule height 2pt width \hsize
+  \vskip\titlepagebottomglue
+  \finishedtitlepagetrue
+}
+
+% Settings used for typesetting titles: no hyphenation, no indentation,
+% don't worry much about spacing, ragged right.  This should be used
+% inside a \vbox, and fonts need to be set appropriately first.  Because
+% it is always used for titles, nothing else, we call \rmisbold.  \par
+% should be specified before the end of the \vbox, since a vbox is a group.
+% 
+\def\raggedtitlesettings{%
+  \rmisbold
+  \hyphenpenalty=10000
+  \parindent=0pt
+  \tolerance=5000
+  \ptexraggedright
+}
+
+% Macros to be used within @titlepage:
+
+\let\subtitlerm=\tenrm
+\def\subtitlefont{\subtitlerm \normalbaselineskip = 13pt \normalbaselines}
+
+\parseargdef\title{%
+  \checkenv\titlepage
+  \vbox{\titlefonts \raggedtitlesettings #1\par}%
+  % print a rule at the page bottom also.
+  \finishedtitlepagefalse
+  \vskip4pt \hrule height 4pt width \hsize \vskip4pt
+}
+
+\parseargdef\subtitle{%
+  \checkenv\titlepage
+  {\subtitlefont \rightline{#1}}%
+}
+
+% @author should come last, but may come many times.
+% It can also be used inside @quotation.
+%
+\parseargdef\author{%
+  \def\temp{\quotation}%
+  \ifx\thisenv\temp
+    \def\quotationauthor{#1}% printed in \Equotation.
+  \else
+    \checkenv\titlepage
+    \ifseenauthor\else \vskip 0pt plus 1filll \seenauthortrue \fi
+    {\secfonts\rmisbold \leftline{#1}}%
+  \fi
+}
+
+
+% Set up page headings and footings.
+
+\let\thispage=\folio
+
+\newtoks\evenheadline    % headline on even pages
+\newtoks\oddheadline     % headline on odd pages
+\newtoks\evenfootline    % footline on even pages
+\newtoks\oddfootline     % footline on odd pages
+
+% Now make TeX use those variables
+\headline={{\textfonts\rm \ifodd\pageno \the\oddheadline
+                            \else \the\evenheadline \fi}}
+\footline={{\textfonts\rm \ifodd\pageno \the\oddfootline
+                            \else \the\evenfootline \fi}\HEADINGShook}
+\let\HEADINGShook=\relax
+
+% Commands to set those variables.
+% For example, this is what  @headings on  does
+% @evenheading @thistitle|@thispage|@thischapter
+% @oddheading @thischapter|@thispage|@thistitle
+% @evenfooting @thisfile||
+% @oddfooting ||@thisfile
+
+
+\def\evenheading{\parsearg\evenheadingxxx}
+\def\evenheadingxxx #1{\evenheadingyyy #1\|\|\|\|\finish}
+\def\evenheadingyyy #1\|#2\|#3\|#4\finish{%
+\global\evenheadline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\def\oddheading{\parsearg\oddheadingxxx}
+\def\oddheadingxxx #1{\oddheadingyyy #1\|\|\|\|\finish}
+\def\oddheadingyyy #1\|#2\|#3\|#4\finish{%
+\global\oddheadline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\parseargdef\everyheading{\oddheadingxxx{#1}\evenheadingxxx{#1}}%
+
+\def\evenfooting{\parsearg\evenfootingxxx}
+\def\evenfootingxxx #1{\evenfootingyyy #1\|\|\|\|\finish}
+\def\evenfootingyyy #1\|#2\|#3\|#4\finish{%
+\global\evenfootline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\def\oddfooting{\parsearg\oddfootingxxx}
+\def\oddfootingxxx #1{\oddfootingyyy #1\|\|\|\|\finish}
+\def\oddfootingyyy #1\|#2\|#3\|#4\finish{%
+  \global\oddfootline = {\rlap{\centerline{#2}}\line{#1\hfil#3}}%
+  %
+  % Leave some space for the footline.  Hopefully ok to assume
+  % @evenfooting will not be used by itself.
+  \global\advance\pageheight by -12pt
+  \global\advance\vsize by -12pt
+}
+
+\parseargdef\everyfooting{\oddfootingxxx{#1}\evenfootingxxx{#1}}
+
+% @evenheadingmarks top     \thischapter <- chapter at the top of a page
+% @evenheadingmarks bottom  \thischapter <- chapter at the bottom of a page
+%
+% The same set of arguments for:
+%
+% @oddheadingmarks
+% @evenfootingmarks
+% @oddfootingmarks
+% @everyheadingmarks
+% @everyfootingmarks
+
+\def\evenheadingmarks{\headingmarks{even}{heading}}
+\def\oddheadingmarks{\headingmarks{odd}{heading}}
+\def\evenfootingmarks{\headingmarks{even}{footing}}
+\def\oddfootingmarks{\headingmarks{odd}{footing}}
+\def\everyheadingmarks#1 {\headingmarks{even}{heading}{#1}
+                          \headingmarks{odd}{heading}{#1} }
+\def\everyfootingmarks#1 {\headingmarks{even}{footing}{#1}
+                          \headingmarks{odd}{footing}{#1} }
+% #1 = even/odd, #2 = heading/footing, #3 = top/bottom.
+\def\headingmarks#1#2#3 {%
+  \expandafter\let\expandafter\temp \csname get#3headingmarks\endcsname
+  \global\expandafter\let\csname get#1#2marks\endcsname \temp
+}
+
+\everyheadingmarks bottom
+\everyfootingmarks bottom
+
+% @headings double      turns headings on for double-sided printing.
+% @headings single      turns headings on for single-sided printing.
+% @headings off         turns them off.
+% @headings on          same as @headings double, retained for compatibility.
+% @headings after       turns on double-sided headings after this page.
+% @headings doubleafter turns on double-sided headings after this page.
+% @headings singleafter turns on single-sided headings after this page.
+% By default, they are off at the start of a document,
+% and turned `on' after @end titlepage.
+
+\def\headings #1 {\csname HEADINGS#1\endcsname}
+
+\def\headingsoff{% non-global headings elimination
+  \evenheadline={\hfil}\evenfootline={\hfil}%
+   \oddheadline={\hfil}\oddfootline={\hfil}%
+}
+
+\def\HEADINGSoff{{\globaldefs=1 \headingsoff}} % global setting
+\HEADINGSoff  % it's the default
+
+% When we turn headings on, set the page number to 1.
+% For double-sided printing, put current file name in lower left corner,
+% chapter name on inside top of right hand pages, document
+% title on inside top of left hand pages, and page numbers on outside top
+% edge of all pages.
+\def\HEADINGSdouble{%
+\global\pageno=1
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\folio\hfil\thistitle}}
+\global\oddheadline={\line{\thischapter\hfil\folio}}
+\global\let\contentsalignmacro = \chapoddpage
+}
+\let\contentsalignmacro = \chappager
+
+% For single-sided printing, chapter title goes across top left of page,
+% page number on top right.
+\def\HEADINGSsingle{%
+\global\pageno=1
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\thischapter\hfil\folio}}
+\global\oddheadline={\line{\thischapter\hfil\folio}}
+\global\let\contentsalignmacro = \chappager
+}
+\def\HEADINGSon{\HEADINGSdouble}
+
+\def\HEADINGSafter{\let\HEADINGShook=\HEADINGSdoublex}
+\let\HEADINGSdoubleafter=\HEADINGSafter
+\def\HEADINGSdoublex{%
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\folio\hfil\thistitle}}
+\global\oddheadline={\line{\thischapter\hfil\folio}}
+\global\let\contentsalignmacro = \chapoddpage
+}
+
+\def\HEADINGSsingleafter{\let\HEADINGShook=\HEADINGSsinglex}
+\def\HEADINGSsinglex{%
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\thischapter\hfil\folio}}
+\global\oddheadline={\line{\thischapter\hfil\folio}}
+\global\let\contentsalignmacro = \chappager
+}
+
+% Subroutines used in generating headings
+% This produces Day Month Year style of output.
+% Only define if not already defined, in case a txi-??.tex file has set
+% up a different format (e.g., txi-cs.tex does this).
+\ifx\today\thisisundefined
+\def\today{%
+  \number\day\space
+  \ifcase\month
+  \or\putwordMJan\or\putwordMFeb\or\putwordMMar\or\putwordMApr
+  \or\putwordMMay\or\putwordMJun\or\putwordMJul\or\putwordMAug
+  \or\putwordMSep\or\putwordMOct\or\putwordMNov\or\putwordMDec
+  \fi
+  \space\number\year}
+\fi
+
+% @settitle line...  specifies the title of the document, for headings.
+% It generates no output of its own.
+\def\thistitle{\putwordNoTitle}
+\def\settitle{\parsearg{\gdef\thistitle}}
+
+
+\message{tables,}
+% Tables -- @table, @ftable, @vtable, @item(x).
+
+% default indentation of table text
+\newdimen\tableindent \tableindent=.8in
+% default indentation of @itemize and @enumerate text
+\newdimen\itemindent  \itemindent=.3in
+% margin between end of table item and start of table text.
+\newdimen\itemmargin  \itemmargin=.1in
+
+% used internally for \itemindent minus \itemmargin
+\newdimen\itemmax
+
+% Note @table, @ftable, and @vtable define @item, @itemx, etc., with
+% these defs.
+% They also define \itemindex
+% to index the item name in whatever manner is desired (perhaps none).
+
+\newif\ifitemxneedsnegativevskip
+
+\def\itemxpar{\par\ifitemxneedsnegativevskip\nobreak\vskip-\parskip\nobreak\fi}
+
+\def\internalBitem{\smallbreak \parsearg\itemzzz}
+\def\internalBitemx{\itemxpar \parsearg\itemzzz}
+
+\def\itemzzz #1{\begingroup %
+  \advance\hsize by -\rightskip
+  \advance\hsize by -\tableindent
+  \setbox0=\hbox{\itemindicate{#1}}%
+  \itemindex{#1}%
+  \nobreak % This prevents a break before @itemx.
+  %
+  % If the item text does not fit in the space we have, put it on a line
+  % by itself, and do not allow a page break either before or after that
+  % line.  We do not start a paragraph here because then if the next
+  % command is, e.g., @kindex, the whatsit would get put into the
+  % horizontal list on a line by itself, resulting in extra blank space.
+  \ifdim \wd0>\itemmax
+    %
+    % Make this a paragraph so we get the \parskip glue and wrapping,
+    % but leave it ragged-right.
+    \begingroup
+      \advance\leftskip by-\tableindent
+      \advance\hsize by\tableindent
+      \advance\rightskip by0pt plus1fil\relax
+      \leavevmode\unhbox0\par
+    \endgroup
+    %
+    % We're going to be starting a paragraph, but we don't want the
+    % \parskip glue -- logically it's part of the @item we just started.
+    \nobreak \vskip-\parskip
+    %
+    % Stop a page break at the \parskip glue coming up.  However, if
+    % what follows is an environment such as @example, there will be no
+    % \parskip glue; then the negative vskip we just inserted would
+    % cause the example and the item to crash together.  So we use this
+    % bizarre value of 10001 as a signal to \aboveenvbreak to insert
+    % \parskip glue after all.  Section titles are handled this way also.
+    %
+    \penalty 10001
+    \endgroup
+    \itemxneedsnegativevskipfalse
+  \else
+    % The item text fits into the space.  Start a paragraph, so that the
+    % following text (if any) will end up on the same line.
+    \noindent
+    % Do this with kerns and \unhbox so that if there is a footnote in
+    % the item text, it can migrate to the main vertical list and
+    % eventually be printed.
+    \nobreak\kern-\tableindent
+    \dimen0 = \itemmax  \advance\dimen0 by \itemmargin \advance\dimen0 by -\wd0
+    \unhbox0
+    \nobreak\kern\dimen0
+    \endgroup
+    \itemxneedsnegativevskiptrue
+  \fi
+}
+
+\def\item{\errmessage{@item while not in a list environment}}
+\def\itemx{\errmessage{@itemx while not in a list environment}}
+
+% @table, @ftable, @vtable.
+\envdef\table{%
+  \let\itemindex\gobble
+  \tablecheck{table}%
+}
+\envdef\ftable{%
+  \def\itemindex ##1{\doind {fn}{\code{##1}}}%
+  \tablecheck{ftable}%
+}
+\envdef\vtable{%
+  \def\itemindex ##1{\doind {vr}{\code{##1}}}%
+  \tablecheck{vtable}%
+}
+\def\tablecheck#1{%
+  \ifnum \the\catcode`\^^M=\active
+    \endgroup
+    \errmessage{This command won't work in this context; perhaps the problem is
+      that we are \inenvironment\thisenv}%
+    \def\next{\doignore{#1}}%
+  \else
+    \let\next\tablex
+  \fi
+  \next
+}
+\def\tablex#1{%
+  \def\itemindicate{#1}%
+  \parsearg\tabley
+}
+\def\tabley#1{%
+  {%
+    \makevalueexpandable
+    \edef\temp{\noexpand\tablez #1\space\space\space}%
+    \expandafter
+  }\temp \endtablez
+}
+\def\tablez #1 #2 #3 #4\endtablez{%
+  \aboveenvbreak
+  \ifnum 0#1>0 \advance \leftskip by #1\mil \fi
+  \ifnum 0#2>0 \tableindent=#2\mil \fi
+  \ifnum 0#3>0 \advance \rightskip by #3\mil \fi
+  \itemmax=\tableindent
+  \advance \itemmax by -\itemmargin
+  \advance \leftskip by \tableindent
+  \exdentamount=\tableindent
+  \parindent = 0pt
+  \parskip = \smallskipamount
+  \ifdim \parskip=0pt \parskip=2pt \fi
+  \let\item = \internalBitem
+  \let\itemx = \internalBitemx
+}
+\def\Etable{\endgraf\afterenvbreak}
+\let\Eftable\Etable
+\let\Evtable\Etable
+\let\Eitemize\Etable
+\let\Eenumerate\Etable
+
+% This is the counter used by @enumerate, which is really @itemize
+
+\newcount \itemno
+
+\envdef\itemize{\parsearg\doitemize}
+
+\def\doitemize#1{%
+  \aboveenvbreak
+  \itemmax=\itemindent
+  \advance\itemmax by -\itemmargin
+  \advance\leftskip by \itemindent
+  \exdentamount=\itemindent
+  \parindent=0pt
+  \parskip=\smallskipamount
+  \ifdim\parskip=0pt \parskip=2pt \fi
+  %
+  % Try typesetting the item mark that if the document erroneously says
+  % something like @itemize @samp (intending @table), there's an error
+  % right away at the @itemize.  It's not the best error message in the
+  % world, but it's better than leaving it to the @item.  This means if
+  % the user wants an empty mark, they have to say @w{} not just @w.
+  \def\itemcontents{#1}%
+  \setbox0 = \hbox{\itemcontents}%
+  %
+  % @itemize with no arg is equivalent to @itemize @bullet.
+  \ifx\itemcontents\empty\def\itemcontents{\bullet}\fi
+  %
+  \let\item=\itemizeitem
+}
+
+% Definition of @item while inside @itemize and @enumerate.
+%
+\def\itemizeitem{%
+  \advance\itemno by 1  % for enumerations
+  {\let\par=\endgraf \smallbreak}% reasonable place to break
+  {%
+   % If the document has an @itemize directly after a section title, a
+   % \nobreak will be last on the list, and \sectionheading will have
+   % done a \vskip-\parskip.  In that case, we don't want to zero
+   % parskip, or the item text will crash with the heading.  On the
+   % other hand, when there is normal text preceding the item (as there
+   % usually is), we do want to zero parskip, or there would be too much
+   % space.  In that case, we won't have a \nobreak before.  At least
+   % that's the theory.
+   \ifnum\lastpenalty<10000 \parskip=0in \fi
+   \noindent
+   \hbox to 0pt{\hss \itemcontents \kern\itemmargin}%
+   %
+   \vadjust{\penalty 1200}}% not good to break after first line of item.
+  \flushcr
+}
+
+% \splitoff TOKENS\endmark defines \first to be the first token in
+% TOKENS, and \rest to be the remainder.
+%
+\def\splitoff#1#2\endmark{\def\first{#1}\def\rest{#2}}%
+
+% Allow an optional argument of an uppercase letter, lowercase letter,
+% or number, to specify the first label in the enumerated list.  No
+% argument is the same as `1'.
+%
+\envparseargdef\enumerate{\enumeratey #1  \endenumeratey}
+\def\enumeratey #1 #2\endenumeratey{%
+  % If we were given no argument, pretend we were given `1'.
+  \def\thearg{#1}%
+  \ifx\thearg\empty \def\thearg{1}\fi
+  %
+  % Detect if the argument is a single token.  If so, it might be a
+  % letter.  Otherwise, the only valid thing it can be is a number.
+  % (We will always have one token, because of the test we just made.
+  % This is a good thing, since \splitoff doesn't work given nothing at
+  % all -- the first parameter is undelimited.)
+  \expandafter\splitoff\thearg\endmark
+  \ifx\rest\empty
+    % Only one token in the argument.  It could still be anything.
+    % A ``lowercase letter'' is one whose \lccode is nonzero.
+    % An ``uppercase letter'' is one whose \lccode is both nonzero, and
+    %   not equal to itself.
+    % Otherwise, we assume it's a number.
+    %
+    % We need the \relax at the end of the \ifnum lines to stop TeX from
+    % continuing to look for a <number>.
+    %
+    \ifnum\lccode\expandafter`\thearg=0\relax
+      \numericenumerate % a number (we hope)
+    \else
+      % It's a letter.
+      \ifnum\lccode\expandafter`\thearg=\expandafter`\thearg\relax
+        \lowercaseenumerate % lowercase letter
+      \else
+        \uppercaseenumerate % uppercase letter
+      \fi
+    \fi
+  \else
+    % Multiple tokens in the argument.  We hope it's a number.
+    \numericenumerate
+  \fi
+}
+
+% An @enumerate whose labels are integers.  The starting integer is
+% given in \thearg.
+%
+\def\numericenumerate{%
+  \itemno = \thearg
+  \startenumeration{\the\itemno}%
+}
+
+% The starting (lowercase) letter is in \thearg.
+\def\lowercaseenumerate{%
+  \itemno = \expandafter`\thearg
+  \startenumeration{%
+    % Be sure we're not beyond the end of the alphabet.
+    \ifnum\itemno=0
+      \errmessage{No more lowercase letters in @enumerate; get a bigger
+                  alphabet}%
+    \fi
+    \char\lccode\itemno
+  }%
+}
+
+% The starting (uppercase) letter is in \thearg.
+\def\uppercaseenumerate{%
+  \itemno = \expandafter`\thearg
+  \startenumeration{%
+    % Be sure we're not beyond the end of the alphabet.
+    \ifnum\itemno=0
+      \errmessage{No more uppercase letters in @enumerate; get a bigger
+                  alphabet}
+    \fi
+    \char\uccode\itemno
+  }%
+}
+
+% Call \doitemize, adding a period to the first argument and supplying the
+% common last two arguments.  Also subtract one from the initial value in
+% \itemno, since @item increments \itemno.
+%
+\def\startenumeration#1{%
+  \advance\itemno by -1
+  \doitemize{#1.}\flushcr
+}
+
+% @alphaenumerate and @capsenumerate are abbreviations for giving an arg
+% to @enumerate.
+%
+\def\alphaenumerate{\enumerate{a}}
+\def\capsenumerate{\enumerate{A}}
+\def\Ealphaenumerate{\Eenumerate}
+\def\Ecapsenumerate{\Eenumerate}
+
+
+% @multitable macros
+% Amy Hendrickson, 8/18/94, 3/6/96
+%
+% @multitable ... @end multitable will make as many columns as desired.
+% Contents of each column will wrap at width given in preamble.  Width
+% can be specified either with sample text given in a template line,
+% or in percent of \hsize, the current width of text on page.
+
+% Table can continue over pages but will only break between lines.
+
+% To make preamble:
+%
+% Either define widths of columns in terms of percent of \hsize:
+%   @multitable @columnfractions .25 .3 .45
+%   @item ...
+%
+%   Numbers following @columnfractions are the percent of the total
+%   current hsize to be used for each column. You may use as many
+%   columns as desired.
+
+
+% Or use a template:
+%   @multitable {Column 1 template} {Column 2 template} {Column 3 template}
+%   @item ...
+%   using the widest term desired in each column.
+
+% Each new table line starts with @item, each subsequent new column
+% starts with @tab. Empty columns may be produced by supplying @tab's
+% with nothing between them for as many times as empty columns are needed,
+% ie, @tab@tab@tab will produce two empty columns.
+
+% @item, @tab do not need to be on their own lines, but it will not hurt
+% if they are.
+
+% Sample multitable:
+
+%   @multitable {Column 1 template} {Column 2 template} {Column 3 template}
+%   @item first col stuff @tab second col stuff @tab third col
+%   @item
+%   first col stuff
+%   @tab
+%   second col stuff
+%   @tab
+%   third col
+%   @item first col stuff @tab second col stuff
+%   @tab Many paragraphs of text may be used in any column.
+%
+%         They will wrap at the width determined by the template.
+%   @item@tab@tab This will be in third column.
+%   @end multitable
+
+% Default dimensions may be reset by user.
+% @multitableparskip is vertical space between paragraphs in table.
+% @multitableparindent is paragraph indent in table.
+% @multitablecolmargin is horizontal space to be left between columns.
+% @multitablelinespace is space to leave between table items, baseline
+%                                                            to baseline.
+%   0pt means it depends on current normal line spacing.
+%
+\newskip\multitableparskip
+\newskip\multitableparindent
+\newdimen\multitablecolspace
+\newskip\multitablelinespace
+\multitableparskip=0pt
+\multitableparindent=6pt
+\multitablecolspace=12pt
+\multitablelinespace=0pt
+
+% Macros used to set up halign preamble:
+%
+\let\endsetuptable\relax
+\def\xendsetuptable{\endsetuptable}
+\let\columnfractions\relax
+\def\xcolumnfractions{\columnfractions}
+\newif\ifsetpercent
+
+% #1 is the @columnfraction, usually a decimal number like .5, but might
+% be just 1.  We just use it, whatever it is.
+%
+\def\pickupwholefraction#1 {%
+  \global\advance\colcount by 1
+  \expandafter\xdef\csname col\the\colcount\endcsname{#1\hsize}%
+  \setuptable
+}
+
+\newcount\colcount
+\def\setuptable#1{%
+  \def\firstarg{#1}%
+  \ifx\firstarg\xendsetuptable
+    \let\go = \relax
+  \else
+    \ifx\firstarg\xcolumnfractions
+      \global\setpercenttrue
+    \else
+      \ifsetpercent
+         \let\go\pickupwholefraction
+      \else
+         \global\advance\colcount by 1
+         \setbox0=\hbox{#1\unskip\space}% Add a normal word space as a
+                   % separator; typically that is always in the input, anyway.
+         \expandafter\xdef\csname col\the\colcount\endcsname{\the\wd0}%
+      \fi
+    \fi
+    \ifx\go\pickupwholefraction
+      % Put the argument back for the \pickupwholefraction call, so
+      % we'll always have a period there to be parsed.
+      \def\go{\pickupwholefraction#1}%
+    \else
+      \let\go = \setuptable
+    \fi%
+  \fi
+  \go
+}
+
+% multitable-only commands.
+%
+% @headitem starts a heading row, which we typeset in bold.
+% Assignments have to be global since we are inside the implicit group
+% of an alignment entry.  \everycr resets \everytab so we don't have to
+% undo it ourselves.
+\def\headitemfont{\b}% for people to use in the template row; not changeable
+\def\headitem{%
+  \checkenv\multitable
+  \crcr
+  \global\everytab={\bf}% can't use \headitemfont since the parsing differs
+  \the\everytab % for the first item
+}%
+%
+% A \tab used to include \hskip1sp.  But then the space in a template
+% line is not enough.  That is bad.  So let's go back to just `&' until
+% we again encounter the problem the 1sp was intended to solve.
+%					--karl, nathan@acm.org, 20apr99.
+\def\tab{\checkenv\multitable &\the\everytab}%
+
+% @multitable ... @end multitable definitions:
+%
+\newtoks\everytab  % insert after every tab.
+%
+\envdef\multitable{%
+  \vskip\parskip
+  \startsavinginserts
+  %
+  % @item within a multitable starts a normal row.
+  % We use \def instead of \let so that if one of the multitable entries
+  % contains an @itemize, we don't choke on the \item (seen as \crcr aka
+  % \endtemplate) expanding \doitemize.
+  \def\item{\crcr}%
+  %
+  \tolerance=9500
+  \hbadness=9500
+  \setmultitablespacing
+  \parskip=\multitableparskip
+  \parindent=\multitableparindent
+  \overfullrule=0pt
+  \global\colcount=0
+  %
+  \everycr = {%
+    \noalign{%
+      \global\everytab={}%
+      \global\colcount=0 % Reset the column counter.
+      % Check for saved footnotes, etc.
+      \checkinserts
+      % Keeps underfull box messages off when table breaks over pages.
+      %\filbreak
+	% Maybe so, but it also creates really weird page breaks when the
+	% table breaks over pages. Wouldn't \vfil be better?  Wait until the
+	% problem manifests itself, so it can be fixed for real --karl.
+    }%
+  }%
+  %
+  \parsearg\domultitable
+}
+\def\domultitable#1{%
+  % To parse everything between @multitable and @item:
+  \setuptable#1 \endsetuptable
+  %
+  % This preamble sets up a generic column definition, which will
+  % be used as many times as user calls for columns.
+  % \vtop will set a single line and will also let text wrap and
+  % continue for many paragraphs if desired.
+  \halign\bgroup &%
+    \global\advance\colcount by 1
+    \multistrut
+    \vtop{%
+      % Use the current \colcount to find the correct column width:
+      \hsize=\expandafter\csname col\the\colcount\endcsname
+      %
+      % In order to keep entries from bumping into each other
+      % we will add a \leftskip of \multitablecolspace to all columns after
+      % the first one.
+      %
+      % If a template has been used, we will add \multitablecolspace
+      % to the width of each template entry.
+      %
+      % If the user has set preamble in terms of percent of \hsize we will
+      % use that dimension as the width of the column, and the \leftskip
+      % will keep entries from bumping into each other.  Table will start at
+      % left margin and final column will justify at right margin.
+      %
+      % Make sure we don't inherit \rightskip from the outer environment.
+      \rightskip=0pt
+      \ifnum\colcount=1
+	% The first column will be indented with the surrounding text.
+	\advance\hsize by\leftskip
+      \else
+	\ifsetpercent \else
+	  % If user has not set preamble in terms of percent of \hsize
+	  % we will advance \hsize by \multitablecolspace.
+	  \advance\hsize by \multitablecolspace
+	\fi
+       % In either case we will make \leftskip=\multitablecolspace:
+      \leftskip=\multitablecolspace
+      \fi
+      % Ignoring space at the beginning and end avoids an occasional spurious
+      % blank line, when TeX decides to break the line at the space before the
+      % box from the multistrut, so the strut ends up on a line by itself.
+      % For example:
+      % @multitable @columnfractions .11 .89
+      % @item @code{#}
+      % @tab Legal holiday which is valid in major parts of the whole country.
+      % Is automatically provided with highlighting sequences respectively
+      % marking characters.
+      \noindent\ignorespaces##\unskip\multistrut
+    }\cr
+}
+\def\Emultitable{%
+  \crcr
+  \egroup % end the \halign
+  \global\setpercentfalse
+}
+
+\def\setmultitablespacing{%
+  \def\multistrut{\strut}% just use the standard line spacing
+  %
+  % Compute \multitablelinespace (if not defined by user) for use in
+  % \multitableparskip calculation.  We used define \multistrut based on
+  % this, but (ironically) that caused the spacing to be off.
+  % See bug-texinfo report from Werner Lemberg, 31 Oct 2004 12:52:20 +0100.
+\ifdim\multitablelinespace=0pt
+\setbox0=\vbox{X}\global\multitablelinespace=\the\baselineskip
+\global\advance\multitablelinespace by-\ht0
+\fi
+% Test to see if parskip is larger than space between lines of
+% table. If not, do nothing.
+%        If so, set to same dimension as multitablelinespace.
+\ifdim\multitableparskip>\multitablelinespace
+\global\multitableparskip=\multitablelinespace
+\global\advance\multitableparskip-7pt % to keep parskip somewhat smaller
+                                      % than skip between lines in the table.
+\fi%
+\ifdim\multitableparskip=0pt
+\global\multitableparskip=\multitablelinespace
+\global\advance\multitableparskip-7pt % to keep parskip somewhat smaller
+                                      % than skip between lines in the table.
+\fi}
+
+
+\message{conditionals,}
+
+% @iftex, @ifnotdocbook, @ifnothtml, @ifnotinfo, @ifnotplaintext,
+% @ifnotxml always succeed.  They currently do nothing; we don't
+% attempt to check whether the conditionals are properly nested.  But we
+% have to remember that they are conditionals, so that @end doesn't
+% attempt to close an environment group.
+%
+\def\makecond#1{%
+  \expandafter\let\csname #1\endcsname = \relax
+  \expandafter\let\csname iscond.#1\endcsname = 1
+}
+\makecond{iftex}
+\makecond{ifnotdocbook}
+\makecond{ifnothtml}
+\makecond{ifnotinfo}
+\makecond{ifnotplaintext}
+\makecond{ifnotxml}
+
+% Ignore @ignore, @ifhtml, @ifinfo, and the like.
+%
+\def\direntry{\doignore{direntry}}
+\def\documentdescription{\doignore{documentdescription}}
+\def\docbook{\doignore{docbook}}
+\def\html{\doignore{html}}
+\def\ifdocbook{\doignore{ifdocbook}}
+\def\ifhtml{\doignore{ifhtml}}
+\def\ifinfo{\doignore{ifinfo}}
+\def\ifnottex{\doignore{ifnottex}}
+\def\ifplaintext{\doignore{ifplaintext}}
+\def\ifxml{\doignore{ifxml}}
+\def\ignore{\doignore{ignore}}
+\def\menu{\doignore{menu}}
+\def\xml{\doignore{xml}}
+
+% Ignore text until a line `@end #1', keeping track of nested conditionals.
+%
+% A count to remember the depth of nesting.
+\newcount\doignorecount
+
+\def\doignore#1{\begingroup
+  % Scan in ``verbatim'' mode:
+  \obeylines
+  \catcode`\@ = \other
+  \catcode`\{ = \other
+  \catcode`\} = \other
+  %
+  % Make sure that spaces turn into tokens that match what \doignoretext wants.
+  \spaceisspace
+  %
+  % Count number of #1's that we've seen.
+  \doignorecount = 0
+  %
+  % Swallow text until we reach the matching `@end #1'.
+  \dodoignore{#1}%
+}
+
+{ \catcode`_=11 % We want to use \_STOP_ which cannot appear in texinfo source.
+  \obeylines %
+  %
+  \gdef\dodoignore#1{%
+    % #1 contains the command name as a string, e.g., `ifinfo'.
+    %
+    % Define a command to find the next `@end #1'.
+    \long\def\doignoretext##1^^M@end #1{%
+      \doignoretextyyy##1^^M@#1\_STOP_}%
+    %
+    % And this command to find another #1 command, at the beginning of a
+    % line.  (Otherwise, we would consider a line `@c @ifset', for
+    % example, to count as an @ifset for nesting.)
+    \long\def\doignoretextyyy##1^^M@#1##2\_STOP_{\doignoreyyy{##2}\_STOP_}%
+    %
+    % And now expand that command.
+    \doignoretext ^^M%
+  }%
+}
+
+\def\doignoreyyy#1{%
+  \def\temp{#1}%
+  \ifx\temp\empty			% Nothing found.
+    \let\next\doignoretextzzz
+  \else					% Found a nested condition, ...
+    \advance\doignorecount by 1
+    \let\next\doignoretextyyy		% ..., look for another.
+    % If we're here, #1 ends with ^^M\ifinfo (for example).
+  \fi
+  \next #1% the token \_STOP_ is present just after this macro.
+}
+
+% We have to swallow the remaining "\_STOP_".
+%
+\def\doignoretextzzz#1{%
+  \ifnum\doignorecount = 0	% We have just found the outermost @end.
+    \let\next\enddoignore
+  \else				% Still inside a nested condition.
+    \advance\doignorecount by -1
+    \let\next\doignoretext      % Look for the next @end.
+  \fi
+  \next
+}
+
+% Finish off ignored text.
+{ \obeylines%
+  % Ignore anything after the last `@end #1'; this matters in verbatim
+  % environments, where otherwise the newline after an ignored conditional
+  % would result in a blank line in the output.
+  \gdef\enddoignore#1^^M{\endgroup\ignorespaces}%
+}
+
+
+% @set VAR sets the variable VAR to an empty value.
+% @set VAR REST-OF-LINE sets VAR to the value REST-OF-LINE.
+%
+% Since we want to separate VAR from REST-OF-LINE (which might be
+% empty), we can't just use \parsearg; we have to insert a space of our
+% own to delimit the rest of the line, and then take it out again if we
+% didn't need it.
+% We rely on the fact that \parsearg sets \catcode`\ =10.
+%
+\parseargdef\set{\setyyy#1 \endsetyyy}
+\def\setyyy#1 #2\endsetyyy{%
+  {%
+    \makevalueexpandable
+    \def\temp{#2}%
+    \edef\next{\gdef\makecsname{SET#1}}%
+    \ifx\temp\empty
+      \next{}%
+    \else
+      \setzzz#2\endsetzzz
+    \fi
+  }%
+}
+% Remove the trailing space \setxxx inserted.
+\def\setzzz#1 \endsetzzz{\next{#1}}
+
+% @clear VAR clears (i.e., unsets) the variable VAR.
+%
+\parseargdef\clear{%
+  {%
+    \makevalueexpandable
+    \global\expandafter\let\csname SET#1\endcsname=\relax
+  }%
+}
+
+% @value{foo} gets the text saved in variable foo.
+\def\value{\begingroup\makevalueexpandable\valuexxx}
+\def\valuexxx#1{\expandablevalue{#1}\endgroup}
+{
+  \catcode`\- = \active \catcode`\_ = \active
+  %
+  \gdef\makevalueexpandable{%
+    \let\value = \expandablevalue
+    % We don't want these characters active, ...
+    \catcode`\-=\other \catcode`\_=\other
+    % ..., but we might end up with active ones in the argument if
+    % we're called from @code, as @code{@value{foo-bar_}}, though.
+    % So \let them to their normal equivalents.
+    \let-\normaldash \let_\normalunderscore
+  }
+}
+
+% We have this subroutine so that we can handle at least some @value's
+% properly in indexes (we call \makevalueexpandable in \indexdummies).
+% The command has to be fully expandable (if the variable is set), since
+% the result winds up in the index file.  This means that if the
+% variable's value contains other Texinfo commands, it's almost certain
+% it will fail (although perhaps we could fix that with sufficient work
+% to do a one-level expansion on the result, instead of complete).
+%
+\def\expandablevalue#1{%
+  \expandafter\ifx\csname SET#1\endcsname\relax
+    {[No value for ``#1'']}%
+    \message{Variable `#1', used in @value, is not set.}%
+  \else
+    \csname SET#1\endcsname
+  \fi
+}
+
+% @ifset VAR ... @end ifset reads the `...' iff VAR has been defined
+% with @set.
+%
+% To get special treatment of `@end ifset,' call \makeond and the redefine.
+%
+\makecond{ifset}
+\def\ifset{\parsearg{\doifset{\let\next=\ifsetfail}}}
+\def\doifset#1#2{%
+  {%
+    \makevalueexpandable
+    \let\next=\empty
+    \expandafter\ifx\csname SET#2\endcsname\relax
+      #1% If not set, redefine \next.
+    \fi
+    \expandafter
+  }\next
+}
+\def\ifsetfail{\doignore{ifset}}
+
+% @ifclear VAR ... @end executes the `...' iff VAR has never been
+% defined with @set, or has been undefined with @clear.
+%
+% The `\else' inside the `\doifset' parameter is a trick to reuse the
+% above code: if the variable is not set, do nothing, if it is set,
+% then redefine \next to \ifclearfail.
+%
+\makecond{ifclear}
+\def\ifclear{\parsearg{\doifset{\else \let\next=\ifclearfail}}}
+\def\ifclearfail{\doignore{ifclear}}
+
+% @ifcommandisdefined CMD ... @end executes the `...' if CMD (written
+% without the @) is in fact defined.  We can only feasibly check at the
+% TeX level, so something like `mathcode' is going to considered
+% defined even though it is not a Texinfo command.
+% 
+\makecond{ifcommanddefined}
+\def\ifcommanddefined{\parsearg{\doifcmddefined{\let\next=\ifcmddefinedfail}}}
+%
+\def\doifcmddefined#1#2{{%
+    \makevalueexpandable
+    \let\next=\empty
+    \expandafter\ifx\csname #2\endcsname\relax
+      #1% If not defined, \let\next as above.
+    \fi
+    \expandafter
+  }\next
+}
+\def\ifcmddefinedfail{\doignore{ifcommanddefined}}
+
+% @ifcommandnotdefined CMD ... handled similar to @ifclear above.
+\makecond{ifcommandnotdefined}
+\def\ifcommandnotdefined{%
+  \parsearg{\doifcmddefined{\else \let\next=\ifcmdnotdefinedfail}}}
+\def\ifcmdnotdefinedfail{\doignore{ifcommandnotdefined}}
+
+% Set the `txicommandconditionals' variable, so documents have a way to
+% test if the @ifcommand...defined conditionals are available.
+\set txicommandconditionals
+
+% @dircategory CATEGORY  -- specify a category of the dir file
+% which this file should belong to.  Ignore this in TeX.
+\let\dircategory=\comment
+
+% @defininfoenclose.
+\let\definfoenclose=\comment
+
+
+\message{indexing,}
+% Index generation facilities
+
+% Define \newwrite to be identical to plain tex's \newwrite
+% except not \outer, so it can be used within macros and \if's.
+\edef\newwrite{\makecsname{ptexnewwrite}}
+
+% \newindex {foo} defines an index named foo.
+% It automatically defines \fooindex such that
+% \fooindex ...rest of line... puts an entry in the index foo.
+% It also defines \fooindfile to be the number of the output channel for
+% the file that accumulates this index.  The file's extension is foo.
+% The name of an index should be no more than 2 characters long
+% for the sake of vms.
+%
+\def\newindex#1{%
+  \iflinks
+    \expandafter\newwrite \csname#1indfile\endcsname
+    \openout \csname#1indfile\endcsname \jobname.#1 % Open the file
+  \fi
+  \expandafter\xdef\csname#1index\endcsname{%     % Define @#1index
+    \noexpand\doindex{#1}}
+}
+
+% @defindex foo  ==  \newindex{foo}
+%
+\def\defindex{\parsearg\newindex}
+
+% Define @defcodeindex, like @defindex except put all entries in @code.
+%
+\def\defcodeindex{\parsearg\newcodeindex}
+%
+\def\newcodeindex#1{%
+  \iflinks
+    \expandafter\newwrite \csname#1indfile\endcsname
+    \openout \csname#1indfile\endcsname \jobname.#1
+  \fi
+  \expandafter\xdef\csname#1index\endcsname{%
+    \noexpand\docodeindex{#1}}%
+}
+
+
+% @synindex foo bar    makes index foo feed into index bar.
+% Do this instead of @defindex foo if you don't want it as a separate index.
+%
+% @syncodeindex foo bar   similar, but put all entries made for index foo
+% inside @code.
+%
+\def\synindex#1 #2 {\dosynindex\doindex{#1}{#2}}
+\def\syncodeindex#1 #2 {\dosynindex\docodeindex{#1}{#2}}
+
+% #1 is \doindex or \docodeindex, #2 the index getting redefined (foo),
+% #3 the target index (bar).
+\def\dosynindex#1#2#3{%
+  % Only do \closeout if we haven't already done it, else we'll end up
+  % closing the target index.
+  \expandafter \ifx\csname donesynindex#2\endcsname \relax
+    % The \closeout helps reduce unnecessary open files; the limit on the
+    % Acorn RISC OS is a mere 16 files.
+    \expandafter\closeout\csname#2indfile\endcsname
+    \expandafter\let\csname donesynindex#2\endcsname = 1
+  \fi
+  % redefine \fooindfile:
+  \expandafter\let\expandafter\temp\expandafter=\csname#3indfile\endcsname
+  \expandafter\let\csname#2indfile\endcsname=\temp
+  % redefine \fooindex:
+  \expandafter\xdef\csname#2index\endcsname{\noexpand#1{#3}}%
+}
+
+% Define \doindex, the driver for all \fooindex macros.
+% Argument #1 is generated by the calling \fooindex macro,
+%  and it is "foo", the name of the index.
+
+% \doindex just uses \parsearg; it calls \doind for the actual work.
+% This is because \doind is more useful to call from other macros.
+
+% There is also \dosubind {index}{topic}{subtopic}
+% which makes an entry in a two-level index such as the operation index.
+
+\def\doindex#1{\edef\indexname{#1}\parsearg\singleindexer}
+\def\singleindexer #1{\doind{\indexname}{#1}}
+
+% like the previous two, but they put @code around the argument.
+\def\docodeindex#1{\edef\indexname{#1}\parsearg\singlecodeindexer}
+\def\singlecodeindexer #1{\doind{\indexname}{\code{#1}}}
+
+% Take care of Texinfo commands that can appear in an index entry.
+% Since there are some commands we want to expand, and others we don't,
+% we have to laboriously prevent expansion for those that we don't.
+%
+\def\indexdummies{%
+  \escapechar = `\\     % use backslash in output files.
+  \def\@{@}% change to @@ when we switch to @ as escape char in index files.
+  \def\ {\realbackslash\space }%
+  %
+  % Need these unexpandable (because we define \tt as a dummy)
+  % definitions when @{ or @} appear in index entry text.  Also, more
+  % complicated, when \tex is in effect and \{ is a \delimiter again.
+  % We can't use \lbracecmd and \rbracecmd because texindex assumes
+  % braces and backslashes are used only as delimiters.  Perhaps we
+  % should define @lbrace and @rbrace commands a la @comma.
+  \def\{{{\tt\char123}}%
+  \def\}{{\tt\char125}}%
+  %
+  % I don't entirely understand this, but when an index entry is
+  % generated from a macro call, the \endinput which \scanmacro inserts
+  % causes processing to be prematurely terminated.  This is,
+  % apparently, because \indexsorttmp is fully expanded, and \endinput
+  % is an expandable command.  The redefinition below makes \endinput
+  % disappear altogether for that purpose -- although logging shows that
+  % processing continues to some further point.  On the other hand, it
+  % seems \endinput does not hurt in the printed index arg, since that
+  % is still getting written without apparent harm.
+  %
+  % Sample source (mac-idx3.tex, reported by Graham Percival to
+  % help-texinfo, 22may06):
+  % @macro funindex {WORD}
+  % @findex xyz
+  % @end macro
+  % ...
+  % @funindex commtest
+  %
+  % The above is not enough to reproduce the bug, but it gives the flavor.
+  %
+  % Sample whatsit resulting:
+  % .@write3{\entry{xyz}{@folio }{@code {xyz@endinput }}}
+  %
+  % So:
+  \let\endinput = \empty
+  %
+  % Do the redefinitions.
+  \commondummies
+}
+
+% For the aux and toc files, @ is the escape character.  So we want to
+% redefine everything using @ as the escape character (instead of
+% \realbackslash, still used for index files).  When everything uses @,
+% this will be simpler.
+%
+\def\atdummies{%
+  \def\@{@@}%
+  \def\ {@ }%
+  \let\{ = \lbraceatcmd
+  \let\} = \rbraceatcmd
+  %
+  % Do the redefinitions.
+  \commondummies
+  \otherbackslash
+}
+
+% Called from \indexdummies and \atdummies.
+%
+\def\commondummies{%
+  %
+  % \definedummyword defines \#1 as \string\#1\space, thus effectively
+  % preventing its expansion.  This is used only for control words,
+  % not control letters, because the \space would be incorrect for
+  % control characters, but is needed to separate the control word
+  % from whatever follows.
+  %
+  % For control letters, we have \definedummyletter, which omits the
+  % space.
+  %
+  % These can be used both for control words that take an argument and
+  % those that do not.  If it is followed by {arg} in the input, then
+  % that will dutifully get written to the index (or wherever).
+  %
+  \def\definedummyword  ##1{\def##1{\string##1\space}}%
+  \def\definedummyletter##1{\def##1{\string##1}}%
+  \let\definedummyaccent\definedummyletter
+  %
+  \commondummiesnofonts
+  %
+  \definedummyletter\_%
+  \definedummyletter\-%
+  %
+  % Non-English letters.
+  \definedummyword\AA
+  \definedummyword\AE
+  \definedummyword\DH
+  \definedummyword\L
+  \definedummyword\O
+  \definedummyword\OE
+  \definedummyword\TH
+  \definedummyword\aa
+  \definedummyword\ae
+  \definedummyword\dh
+  \definedummyword\exclamdown
+  \definedummyword\l
+  \definedummyword\o
+  \definedummyword\oe
+  \definedummyword\ordf
+  \definedummyword\ordm
+  \definedummyword\questiondown
+  \definedummyword\ss
+  \definedummyword\th
+  %
+  % Although these internal commands shouldn't show up, sometimes they do.
+  \definedummyword\bf
+  \definedummyword\gtr
+  \definedummyword\hat
+  \definedummyword\less
+  \definedummyword\sf
+  \definedummyword\sl
+  \definedummyword\tclose
+  \definedummyword\tt
+  %
+  \definedummyword\LaTeX
+  \definedummyword\TeX
+  %
+  % Assorted special characters.
+  \definedummyword\arrow
+  \definedummyword\bullet
+  \definedummyword\comma
+  \definedummyword\copyright
+  \definedummyword\registeredsymbol
+  \definedummyword\dots
+  \definedummyword\enddots
+  \definedummyword\entrybreak
+  \definedummyword\equiv
+  \definedummyword\error
+  \definedummyword\euro
+  \definedummyword\expansion
+  \definedummyword\geq
+  \definedummyword\guillemetleft
+  \definedummyword\guillemetright
+  \definedummyword\guilsinglleft
+  \definedummyword\guilsinglright
+  \definedummyword\lbracechar
+  \definedummyword\leq
+  \definedummyword\minus
+  \definedummyword\ogonek
+  \definedummyword\pounds
+  \definedummyword\point
+  \definedummyword\print
+  \definedummyword\quotedblbase
+  \definedummyword\quotedblleft
+  \definedummyword\quotedblright
+  \definedummyword\quoteleft
+  \definedummyword\quoteright
+  \definedummyword\quotesinglbase
+  \definedummyword\rbracechar
+  \definedummyword\result
+  \definedummyword\textdegree
+  %
+  % We want to disable all macros so that they are not expanded by \write.
+  \macrolist
+  %
+  \normalturnoffactive
+  %
+  % Handle some cases of @value -- where it does not contain any
+  % (non-fully-expandable) commands.
+  \makevalueexpandable
+}
+
+% \commondummiesnofonts: common to \commondummies and \indexnofonts.
+%
+\def\commondummiesnofonts{%
+  % Control letters and accents.
+  \definedummyletter\!%
+  \definedummyaccent\"%
+  \definedummyaccent\'%
+  \definedummyletter\*%
+  \definedummyaccent\,%
+  \definedummyletter\.%
+  \definedummyletter\/%
+  \definedummyletter\:%
+  \definedummyaccent\=%
+  \definedummyletter\?%
+  \definedummyaccent\^%
+  \definedummyaccent\`%
+  \definedummyaccent\~%
+  \definedummyword\u
+  \definedummyword\v
+  \definedummyword\H
+  \definedummyword\dotaccent
+  \definedummyword\ogonek
+  \definedummyword\ringaccent
+  \definedummyword\tieaccent
+  \definedummyword\ubaraccent
+  \definedummyword\udotaccent
+  \definedummyword\dotless
+  %
+  % Texinfo font commands.
+  \definedummyword\b
+  \definedummyword\i
+  \definedummyword\r
+  \definedummyword\sansserif
+  \definedummyword\sc
+  \definedummyword\slanted
+  \definedummyword\t
+  %
+  % Commands that take arguments.
+  \definedummyword\abbr
+  \definedummyword\acronym
+  \definedummyword\anchor
+  \definedummyword\cite
+  \definedummyword\code
+  \definedummyword\command
+  \definedummyword\dfn
+  \definedummyword\dmn
+  \definedummyword\email
+  \definedummyword\emph
+  \definedummyword\env
+  \definedummyword\file
+  \definedummyword\image
+  \definedummyword\indicateurl
+  \definedummyword\inforef
+  \definedummyword\kbd
+  \definedummyword\key
+  \definedummyword\math
+  \definedummyword\option
+  \definedummyword\pxref
+  \definedummyword\ref
+  \definedummyword\samp
+  \definedummyword\strong
+  \definedummyword\tie
+  \definedummyword\uref
+  \definedummyword\url
+  \definedummyword\var
+  \definedummyword\verb
+  \definedummyword\w
+  \definedummyword\xref
+}
+
+% \indexnofonts is used when outputting the strings to sort the index
+% by, and when constructing control sequence names.  It eliminates all
+% control sequences and just writes whatever the best ASCII sort string
+% would be for a given command (usually its argument).
+%
+\def\indexnofonts{%
+  % Accent commands should become @asis.
+  \def\definedummyaccent##1{\let##1\asis}%
+  % We can just ignore other control letters.
+  \def\definedummyletter##1{\let##1\empty}%
+  % All control words become @asis by default; overrides below.
+  \let\definedummyword\definedummyaccent
+  %
+  \commondummiesnofonts
+  %
+  % Don't no-op \tt, since it isn't a user-level command
+  % and is used in the definitions of the active chars like <, >, |, etc.
+  % Likewise with the other plain tex font commands.
+  %\let\tt=\asis
+  %
+  \def\ { }%
+  \def\@{@}%
+  \def\_{\normalunderscore}%
+  \def\-{}% @- shouldn't affect sorting
+  %
+  % Unfortunately, texindex is not prepared to handle braces in the
+  % content at all.  So for index sorting, we map @{ and @} to strings
+  % starting with |, since that ASCII character is between ASCII { and }.
+  \def\{{|a}%
+  \def\lbracechar{|a}%
+  %
+  \def\}{|b}%
+  \def\rbracechar{|b}%
+  %
+  % Non-English letters.
+  \def\AA{AA}%
+  \def\AE{AE}%
+  \def\DH{DZZ}%
+  \def\L{L}%
+  \def\OE{OE}%
+  \def\O{O}%
+  \def\TH{ZZZ}%
+  \def\aa{aa}%
+  \def\ae{ae}%
+  \def\dh{dzz}%
+  \def\exclamdown{!}%
+  \def\l{l}%
+  \def\oe{oe}%
+  \def\ordf{a}%
+  \def\ordm{o}%
+  \def\o{o}%
+  \def\questiondown{?}%
+  \def\ss{ss}%
+  \def\th{zzz}%
+  %
+  \def\LaTeX{LaTeX}%
+  \def\TeX{TeX}%
+  %
+  % Assorted special characters.
+  % (The following {} will end up in the sort string, but that's ok.)
+  \def\arrow{->}%
+  \def\bullet{bullet}%
+  \def\comma{,}%
+  \def\copyright{copyright}%
+  \def\dots{...}%
+  \def\enddots{...}%
+  \def\equiv{==}%
+  \def\error{error}%
+  \def\euro{euro}%
+  \def\expansion{==>}%
+  \def\geq{>=}%
+  \def\guillemetleft{<<}%
+  \def\guillemetright{>>}%
+  \def\guilsinglleft{<}%
+  \def\guilsinglright{>}%
+  \def\leq{<=}%
+  \def\minus{-}%
+  \def\point{.}%
+  \def\pounds{pounds}%
+  \def\print{-|}%
+  \def\quotedblbase{"}%
+  \def\quotedblleft{"}%
+  \def\quotedblright{"}%
+  \def\quoteleft{`}%
+  \def\quoteright{'}%
+  \def\quotesinglbase{,}%
+  \def\registeredsymbol{R}%
+  \def\result{=>}%
+  \def\textdegree{o}%
+  %
+  \expandafter\ifx\csname SETtxiindexlquoteignore\endcsname\relax
+  \else \indexlquoteignore \fi
+  %
+  % We need to get rid of all macros, leaving only the arguments (if present).
+  % Of course this is not nearly correct, but it is the best we can do for now.
+  % makeinfo does not expand macros in the argument to @deffn, which ends up
+  % writing an index entry, and texindex isn't prepared for an index sort entry
+  % that starts with \.
+  %
+  % Since macro invocations are followed by braces, we can just redefine them
+  % to take a single TeX argument.  The case of a macro invocation that
+  % goes to end-of-line is not handled.
+  %
+  \macrolist
+}
+
+% Undocumented (for FSFS 2nd ed.): @set txiindexlquoteignore makes us
+% ignore left quotes in the sort term.
+{\catcode`\`=\active
+ \gdef\indexlquoteignore{\let`=\empty}}
+
+\let\indexbackslash=0  %overridden during \printindex.
+\let\SETmarginindex=\relax % put index entries in margin (undocumented)?
+
+% Most index entries go through here, but \dosubind is the general case.
+% #1 is the index name, #2 is the entry text.
+\def\doind#1#2{\dosubind{#1}{#2}{}}
+
+% Workhorse for all \fooindexes.
+% #1 is name of index, #2 is stuff to put there, #3 is subentry --
+% empty if called from \doind, as we usually are (the main exception
+% is with most defuns, which call us directly).
+%
+\def\dosubind#1#2#3{%
+  \iflinks
+  {%
+    % Store the main index entry text (including the third arg).
+    \toks0 = {#2}%
+    % If third arg is present, precede it with a space.
+    \def\thirdarg{#3}%
+    \ifx\thirdarg\empty \else
+      \toks0 = \expandafter{\the\toks0 \space #3}%
+    \fi
+    %
+    \edef\writeto{\csname#1indfile\endcsname}%
+    %
+    \safewhatsit\dosubindwrite
+  }%
+  \fi
+}
+
+% Write the entry in \toks0 to the index file:
+%
+\def\dosubindwrite{%
+  % Put the index entry in the margin if desired.
+  \ifx\SETmarginindex\relax\else
+    \insert\margin{\hbox{\vrule height8pt depth3pt width0pt \the\toks0}}%
+  \fi
+  %
+  % Remember, we are within a group.
+  \indexdummies % Must do this here, since \bf, etc expand at this stage
+  \def\backslashcurfont{\indexbackslash}% \indexbackslash isn't defined now
+      % so it will be output as is; and it will print as backslash.
+  %
+  % Process the index entry with all font commands turned off, to
+  % get the string to sort by.
+  {\indexnofonts
+   \edef\temp{\the\toks0}% need full expansion
+   \xdef\indexsorttmp{\temp}%
+  }%
+  %
+  % Set up the complete index entry, with both the sort key and
+  % the original text, including any font commands.  We write
+  % three arguments to \entry to the .?? file (four in the
+  % subentry case), texindex reduces to two when writing the .??s
+  % sorted result.
+  \edef\temp{%
+    \write\writeto{%
+      \string\entry{\indexsorttmp}{\noexpand\folio}{\the\toks0}}%
+  }%
+  \temp
+}
+
+% Take care of unwanted page breaks/skips around a whatsit:
+%
+% If a skip is the last thing on the list now, preserve it
+% by backing up by \lastskip, doing the \write, then inserting
+% the skip again.  Otherwise, the whatsit generated by the
+% \write or \pdfdest will make \lastskip zero.  The result is that
+% sequences like this:
+% @end defun
+% @tindex whatever
+% @defun ...
+% will have extra space inserted, because the \medbreak in the
+% start of the @defun won't see the skip inserted by the @end of
+% the previous defun.
+%
+% But don't do any of this if we're not in vertical mode.  We
+% don't want to do a \vskip and prematurely end a paragraph.
+%
+% Avoid page breaks due to these extra skips, too.
+%
+% But wait, there is a catch there:
+% We'll have to check whether \lastskip is zero skip.  \ifdim is not
+% sufficient for this purpose, as it ignores stretch and shrink parts
+% of the skip.  The only way seems to be to check the textual
+% representation of the skip.
+%
+% The following is almost like \def\zeroskipmacro{0.0pt} except that
+% the ``p'' and ``t'' characters have catcode \other, not 11 (letter).
+%
+\edef\zeroskipmacro{\expandafter\the\csname z@skip\endcsname}
+%
+\newskip\whatsitskip
+\newcount\whatsitpenalty
+%
+% ..., ready, GO:
+%
+\def\safewhatsit#1{\ifhmode
+  #1%
+ \else
+  % \lastskip and \lastpenalty cannot both be nonzero simultaneously.
+  \whatsitskip = \lastskip
+  \edef\lastskipmacro{\the\lastskip}%
+  \whatsitpenalty = \lastpenalty
+  %
+  % If \lastskip is nonzero, that means the last item was a
+  % skip.  And since a skip is discardable, that means this
+  % -\whatsitskip glue we're inserting is preceded by a
+  % non-discardable item, therefore it is not a potential
+  % breakpoint, therefore no \nobreak needed.
+  \ifx\lastskipmacro\zeroskipmacro
+  \else
+    \vskip-\whatsitskip
+  \fi
+  %
+  #1%
+  %
+  \ifx\lastskipmacro\zeroskipmacro
+    % If \lastskip was zero, perhaps the last item was a penalty, and
+    % perhaps it was >=10000, e.g., a \nobreak.  In that case, we want
+    % to re-insert the same penalty (values >10000 are used for various
+    % signals); since we just inserted a non-discardable item, any
+    % following glue (such as a \parskip) would be a breakpoint.  For example:
+    %   @deffn deffn-whatever
+    %   @vindex index-whatever
+    %   Description.
+    % would allow a break between the index-whatever whatsit
+    % and the "Description." paragraph.
+    \ifnum\whatsitpenalty>9999 \penalty\whatsitpenalty \fi
+  \else
+    % On the other hand, if we had a nonzero \lastskip,
+    % this make-up glue would be preceded by a non-discardable item
+    % (the whatsit from the \write), so we must insert a \nobreak.
+    \nobreak\vskip\whatsitskip
+  \fi
+\fi}
+
+% The index entry written in the file actually looks like
+%  \entry {sortstring}{page}{topic}
+% or
+%  \entry {sortstring}{page}{topic}{subtopic}
+% The texindex program reads in these files and writes files
+% containing these kinds of lines:
+%  \initial {c}
+%     before the first topic whose initial is c
+%  \entry {topic}{pagelist}
+%     for a topic that is used without subtopics
+%  \primary {topic}
+%     for the beginning of a topic that is used with subtopics
+%  \secondary {subtopic}{pagelist}
+%     for each subtopic.
+
+% Define the user-accessible indexing commands
+% @findex, @vindex, @kindex, @cindex.
+
+\def\findex {\fnindex}
+\def\kindex {\kyindex}
+\def\cindex {\cpindex}
+\def\vindex {\vrindex}
+\def\tindex {\tpindex}
+\def\pindex {\pgindex}
+
+\def\cindexsub {\begingroup\obeylines\cindexsub}
+{\obeylines %
+\gdef\cindexsub "#1" #2^^M{\endgroup %
+\dosubind{cp}{#2}{#1}}}
+
+% Define the macros used in formatting output of the sorted index material.
+
+% @printindex causes a particular index (the ??s file) to get printed.
+% It does not print any chapter heading (usually an @unnumbered).
+%
+\parseargdef\printindex{\begingroup
+  \dobreak \chapheadingskip{10000}%
+  %
+  \smallfonts \rm
+  \tolerance = 9500
+  \plainfrenchspacing
+  \everypar = {}% don't want the \kern\-parindent from indentation suppression.
+  %
+  % See if the index file exists and is nonempty.
+  % Change catcode of @ here so that if the index file contains
+  % \initial {@}
+  % as its first line, TeX doesn't complain about mismatched braces
+  % (because it thinks @} is a control sequence).
+  \catcode`\@ = 11
+  \openin 1 \jobname.#1s
+  \ifeof 1
+    % \enddoublecolumns gets confused if there is no text in the index,
+    % and it loses the chapter title and the aux file entries for the
+    % index.  The easiest way to prevent this problem is to make sure
+    % there is some text.
+    \putwordIndexNonexistent
+  \else
+    %
+    % If the index file exists but is empty, then \openin leaves \ifeof
+    % false.  We have to make TeX try to read something from the file, so
+    % it can discover if there is anything in it.
+    \read 1 to \temp
+    \ifeof 1
+      \putwordIndexIsEmpty
+    \else
+      % Index files are almost Texinfo source, but we use \ as the escape
+      % character.  It would be better to use @, but that's too big a change
+      % to make right now.
+      \def\indexbackslash{\backslashcurfont}%
+      \catcode`\\ = 0
+      \escapechar = `\\
+      \begindoublecolumns
+      \input \jobname.#1s
+      \enddoublecolumns
+    \fi
+  \fi
+  \closein 1
+\endgroup}
+
+% These macros are used by the sorted index file itself.
+% Change them to control the appearance of the index.
+
+\def\initial#1{{%
+  % Some minor font changes for the special characters.
+  \let\tentt=\sectt \let\tt=\sectt \let\sf=\sectt
+  %
+  % Remove any glue we may have, we'll be inserting our own.
+  \removelastskip
+  %
+  % We like breaks before the index initials, so insert a bonus.
+  \nobreak
+  \vskip 0pt plus 3\baselineskip
+  \penalty 0
+  \vskip 0pt plus -3\baselineskip
+  %
+  % Typeset the initial.  Making this add up to a whole number of
+  % baselineskips increases the chance of the dots lining up from column
+  % to column.  It still won't often be perfect, because of the stretch
+  % we need before each entry, but it's better.
+  %
+  % No shrink because it confuses \balancecolumns.
+  \vskip 1.67\baselineskip plus .5\baselineskip
+  \leftline{\secbf #1}%
+  % Do our best not to break after the initial.
+  \nobreak
+  \vskip .33\baselineskip plus .1\baselineskip
+}}
+
+% \entry typesets a paragraph consisting of the text (#1), dot leaders, and
+% then page number (#2) flushed to the right margin.  It is used for index
+% and table of contents entries.  The paragraph is indented by \leftskip.
+%
+% A straightforward implementation would start like this:
+%	\def\entry#1#2{...
+% But this freezes the catcodes in the argument, and can cause problems to
+% @code, which sets - active.  This problem was fixed by a kludge---
+% ``-'' was active throughout whole index, but this isn't really right.
+% The right solution is to prevent \entry from swallowing the whole text.
+%                                 --kasal, 21nov03
+\def\entry{%
+  \begingroup
+    %
+    % Start a new paragraph if necessary, so our assignments below can't
+    % affect previous text.
+    \par
+    %
+    % Do not fill out the last line with white space.
+    \parfillskip = 0in
+    %
+    % No extra space above this paragraph.
+    \parskip = 0in
+    %
+    % Do not prefer a separate line ending with a hyphen to fewer lines.
+    \finalhyphendemerits = 0
+    %
+    % \hangindent is only relevant when the entry text and page number
+    % don't both fit on one line.  In that case, bob suggests starting the
+    % dots pretty far over on the line.  Unfortunately, a large
+    % indentation looks wrong when the entry text itself is broken across
+    % lines.  So we use a small indentation and put up with long leaders.
+    %
+    % \hangafter is reset to 1 (which is the value we want) at the start
+    % of each paragraph, so we need not do anything with that.
+    \hangindent = 2em
+    %
+    % When the entry text needs to be broken, just fill out the first line
+    % with blank space.
+    \rightskip = 0pt plus1fil
+    %
+    % A bit of stretch before each entry for the benefit of balancing
+    % columns.
+    \vskip 0pt plus1pt
+    %
+    % When reading the text of entry, convert explicit line breaks
+    % from @* into spaces.  The user might give these in long section
+    % titles, for instance.
+    \def\*{\unskip\space\ignorespaces}%
+    \def\entrybreak{\hfil\break}%
+    %
+    % Swallow the left brace of the text (first parameter):
+    \afterassignment\doentry
+    \let\temp =
+}
+\def\entrybreak{\unskip\space\ignorespaces}%
+\def\doentry{%
+    \bgroup % Instead of the swallowed brace.
+      \noindent
+      \aftergroup\finishentry
+      % And now comes the text of the entry.
+}
+\def\finishentry#1{%
+    % #1 is the page number.
+    %
+    % The following is kludged to not output a line of dots in the index if
+    % there are no page numbers.  The next person who breaks this will be
+    % cursed by a Unix daemon.
+    \setbox\boxA = \hbox{#1}%
+    \ifdim\wd\boxA = 0pt
+      \ %
+    \else
+      %
+      % If we must, put the page number on a line of its own, and fill out
+      % this line with blank space.  (The \hfil is overwhelmed with the
+      % fill leaders glue in \indexdotfill if the page number does fit.)
+      \hfil\penalty50
+      \null\nobreak\indexdotfill % Have leaders before the page number.
+      %
+      % The `\ ' here is removed by the implicit \unskip that TeX does as
+      % part of (the primitive) \par.  Without it, a spurious underfull
+      % \hbox ensues.
+      \ifpdf
+	\pdfgettoks#1.%
+	\ \the\toksA
+      \else
+	\ #1%
+      \fi
+    \fi
+    \par
+  \endgroup
+}
+
+% Like plain.tex's \dotfill, except uses up at least 1 em.
+\def\indexdotfill{\cleaders
+  \hbox{$\mathsurround=0pt \mkern1.5mu.\mkern1.5mu$}\hskip 1em plus 1fill}
+
+\def\primary #1{\line{#1\hfil}}
+
+\newskip\secondaryindent \secondaryindent=0.5cm
+\def\secondary#1#2{{%
+  \parfillskip=0in
+  \parskip=0in
+  \hangindent=1in
+  \hangafter=1
+  \noindent\hskip\secondaryindent\hbox{#1}\indexdotfill
+  \ifpdf
+    \pdfgettoks#2.\ \the\toksA % The page number ends the paragraph.
+  \else
+    #2
+  \fi
+  \par
+}}
+
+% Define two-column mode, which we use to typeset indexes.
+% Adapted from the TeXbook, page 416, which is to say,
+% the manmac.tex format used to print the TeXbook itself.
+\catcode`\@=11
+
+\newbox\partialpage
+\newdimen\doublecolumnhsize
+
+\def\begindoublecolumns{\begingroup % ended by \enddoublecolumns
+  % Grab any single-column material above us.
+  \output = {%
+    %
+    % Here is a possibility not foreseen in manmac: if we accumulate a
+    % whole lot of material, we might end up calling this \output
+    % routine twice in a row (see the doublecol-lose test, which is
+    % essentially a couple of indexes with @setchapternewpage off).  In
+    % that case we just ship out what is in \partialpage with the normal
+    % output routine.  Generally, \partialpage will be empty when this
+    % runs and this will be a no-op.  See the indexspread.tex test case.
+    \ifvoid\partialpage \else
+      \onepageout{\pagecontents\partialpage}%
+    \fi
+    %
+    \global\setbox\partialpage = \vbox{%
+      % Unvbox the main output page.
+      \unvbox\PAGE
+      \kern-\topskip \kern\baselineskip
+    }%
+  }%
+  \eject % run that output routine to set \partialpage
+  %
+  % Use the double-column output routine for subsequent pages.
+  \output = {\doublecolumnout}%
+  %
+  % Change the page size parameters.  We could do this once outside this
+  % routine, in each of @smallbook, @afourpaper, and the default 8.5x11
+  % format, but then we repeat the same computation.  Repeating a couple
+  % of assignments once per index is clearly meaningless for the
+  % execution time, so we may as well do it in one place.
+  %
+  % First we halve the line length, less a little for the gutter between
+  % the columns.  We compute the gutter based on the line length, so it
+  % changes automatically with the paper format.  The magic constant
+  % below is chosen so that the gutter has the same value (well, +-<1pt)
+  % as it did when we hard-coded it.
+  %
+  % We put the result in a separate register, \doublecolumhsize, so we
+  % can restore it in \pagesofar, after \hsize itself has (potentially)
+  % been clobbered.
+  %
+  \doublecolumnhsize = \hsize
+    \advance\doublecolumnhsize by -.04154\hsize
+    \divide\doublecolumnhsize by 2
+  \hsize = \doublecolumnhsize
+  %
+  % Double the \vsize as well.  (We don't need a separate register here,
+  % since nobody clobbers \vsize.)
+  \vsize = 2\vsize
+}
+
+% The double-column output routine for all double-column pages except
+% the last.
+%
+\def\doublecolumnout{%
+  \splittopskip=\topskip \splitmaxdepth=\maxdepth
+  % Get the available space for the double columns -- the normal
+  % (undoubled) page height minus any material left over from the
+  % previous page.
+  \dimen@ = \vsize
+  \divide\dimen@ by 2
+  \advance\dimen@ by -\ht\partialpage
+  %
+  % box0 will be the left-hand column, box2 the right.
+  \setbox0=\vsplit255 to\dimen@ \setbox2=\vsplit255 to\dimen@
+  \onepageout\pagesofar
+  \unvbox255
+  \penalty\outputpenalty
+}
+%
+% Re-output the contents of the output page -- any previous material,
+% followed by the two boxes we just split, in box0 and box2.
+\def\pagesofar{%
+  \unvbox\partialpage
+  %
+  \hsize = \doublecolumnhsize
+  \wd0=\hsize \wd2=\hsize
+  \hbox to\pagewidth{\box0\hfil\box2}%
+}
+%
+% All done with double columns.
+\def\enddoublecolumns{%
+  % The following penalty ensures that the page builder is exercised
+  % _before_ we change the output routine.  This is necessary in the
+  % following situation:
+  %
+  % The last section of the index consists only of a single entry.
+  % Before this section, \pagetotal is less than \pagegoal, so no
+  % break occurs before the last section starts.  However, the last
+  % section, consisting of \initial and the single \entry, does not
+  % fit on the page and has to be broken off.  Without the following
+  % penalty the page builder will not be exercised until \eject
+  % below, and by that time we'll already have changed the output
+  % routine to the \balancecolumns version, so the next-to-last
+  % double-column page will be processed with \balancecolumns, which
+  % is wrong:  The two columns will go to the main vertical list, with
+  % the broken-off section in the recent contributions.  As soon as
+  % the output routine finishes, TeX starts reconsidering the page
+  % break.  The two columns and the broken-off section both fit on the
+  % page, because the two columns now take up only half of the page
+  % goal.  When TeX sees \eject from below which follows the final
+  % section, it invokes the new output routine that we've set after
+  % \balancecolumns below; \onepageout will try to fit the two columns
+  % and the final section into the vbox of \pageheight (see
+  % \pagebody), causing an overfull box.
+  %
+  % Note that glue won't work here, because glue does not exercise the
+  % page builder, unlike penalties (see The TeXbook, pp. 280-281).
+  \penalty0
+  %
+  \output = {%
+    % Split the last of the double-column material.  Leave it on the
+    % current page, no automatic page break.
+    \balancecolumns
+    %
+    % If we end up splitting too much material for the current page,
+    % though, there will be another page break right after this \output
+    % invocation ends.  Having called \balancecolumns once, we do not
+    % want to call it again.  Therefore, reset \output to its normal
+    % definition right away.  (We hope \balancecolumns will never be
+    % called on to balance too much material, but if it is, this makes
+    % the output somewhat more palatable.)
+    \global\output = {\onepageout{\pagecontents\PAGE}}%
+  }%
+  \eject
+  \endgroup % started in \begindoublecolumns
+  %
+  % \pagegoal was set to the doubled \vsize above, since we restarted
+  % the current page.  We're now back to normal single-column
+  % typesetting, so reset \pagegoal to the normal \vsize (after the
+  % \endgroup where \vsize got restored).
+  \pagegoal = \vsize
+}
+%
+% Called at the end of the double column material.
+\def\balancecolumns{%
+  \setbox0 = \vbox{\unvbox255}% like \box255 but more efficient, see p.120.
+  \dimen@ = \ht0
+  \advance\dimen@ by \topskip
+  \advance\dimen@ by-\baselineskip
+  \divide\dimen@ by 2 % target to split to
+  %debug\message{final 2-column material height=\the\ht0, target=\the\dimen@.}%
+  \splittopskip = \topskip
+  % Loop until we get a decent breakpoint.
+  {%
+    \vbadness = 10000
+    \loop
+      \global\setbox3 = \copy0
+      \global\setbox1 = \vsplit3 to \dimen@
+    \ifdim\ht3>\dimen@
+      \global\advance\dimen@ by 1pt
+    \repeat
+  }%
+  %debug\message{split to \the\dimen@, column heights: \the\ht1, \the\ht3.}%
+  \setbox0=\vbox to\dimen@{\unvbox1}%
+  \setbox2=\vbox to\dimen@{\unvbox3}%
+  %
+  \pagesofar
+}
+\catcode`\@ = \other
+
+
+\message{sectioning,}
+% Chapters, sections, etc.
+
+% Let's start with @part.
+\outer\parseargdef\part{\partzzz{#1}}
+\def\partzzz#1{%
+  \chapoddpage
+  \null
+  \vskip.3\vsize  % move it down on the page a bit
+  \begingroup
+    \noindent \titlefonts\rmisbold #1\par % the text
+    \let\lastnode=\empty      % no node to associate with
+    \writetocentry{part}{#1}{}% but put it in the toc
+    \headingsoff              % no headline or footline on the part page
+    \chapoddpage
+  \endgroup
+}
+
+% \unnumberedno is an oxymoron.  But we count the unnumbered
+% sections so that we can refer to them unambiguously in the pdf
+% outlines by their "section number".  We avoid collisions with chapter
+% numbers by starting them at 10000.  (If a document ever has 10000
+% chapters, we're in trouble anyway, I'm sure.)
+\newcount\unnumberedno \unnumberedno = 10000
+\newcount\chapno
+\newcount\secno        \secno=0
+\newcount\subsecno     \subsecno=0
+\newcount\subsubsecno  \subsubsecno=0
+
+% This counter is funny since it counts through charcodes of letters A, B, ...
+\newcount\appendixno  \appendixno = `\@
+%
+% \def\appendixletter{\char\the\appendixno}
+% We do the following ugly conditional instead of the above simple
+% construct for the sake of pdftex, which needs the actual
+% letter in the expansion, not just typeset.
+%
+\def\appendixletter{%
+  \ifnum\appendixno=`A A%
+  \else\ifnum\appendixno=`B B%
+  \else\ifnum\appendixno=`C C%
+  \else\ifnum\appendixno=`D D%
+  \else\ifnum\appendixno=`E E%
+  \else\ifnum\appendixno=`F F%
+  \else\ifnum\appendixno=`G G%
+  \else\ifnum\appendixno=`H H%
+  \else\ifnum\appendixno=`I I%
+  \else\ifnum\appendixno=`J J%
+  \else\ifnum\appendixno=`K K%
+  \else\ifnum\appendixno=`L L%
+  \else\ifnum\appendixno=`M M%
+  \else\ifnum\appendixno=`N N%
+  \else\ifnum\appendixno=`O O%
+  \else\ifnum\appendixno=`P P%
+  \else\ifnum\appendixno=`Q Q%
+  \else\ifnum\appendixno=`R R%
+  \else\ifnum\appendixno=`S S%
+  \else\ifnum\appendixno=`T T%
+  \else\ifnum\appendixno=`U U%
+  \else\ifnum\appendixno=`V V%
+  \else\ifnum\appendixno=`W W%
+  \else\ifnum\appendixno=`X X%
+  \else\ifnum\appendixno=`Y Y%
+  \else\ifnum\appendixno=`Z Z%
+  % The \the is necessary, despite appearances, because \appendixletter is
+  % expanded while writing the .toc file.  \char\appendixno is not
+  % expandable, thus it is written literally, thus all appendixes come out
+  % with the same letter (or @) in the toc without it.
+  \else\char\the\appendixno
+  \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+  \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi}
+
+% Each @chapter defines these (using marks) as the number+name, number
+% and name of the chapter.  Page headings and footings can use
+% these.  @section does likewise.
+\def\thischapter{}
+\def\thischapternum{}
+\def\thischaptername{}
+\def\thissection{}
+\def\thissectionnum{}
+\def\thissectionname{}
+
+\newcount\absseclevel % used to calculate proper heading level
+\newcount\secbase\secbase=0 % @raisesections/@lowersections modify this count
+
+% @raisesections: treat @section as chapter, @subsection as section, etc.
+\def\raisesections{\global\advance\secbase by -1}
+\let\up=\raisesections % original BFox name
+
+% @lowersections: treat @chapter as section, @section as subsection, etc.
+\def\lowersections{\global\advance\secbase by 1}
+\let\down=\lowersections % original BFox name
+
+% we only have subsub.
+\chardef\maxseclevel = 3
+%
+% A numbered section within an unnumbered changes to unnumbered too.
+% To achieve this, remember the "biggest" unnum. sec. we are currently in:
+\chardef\unnlevel = \maxseclevel
+%
+% Trace whether the current chapter is an appendix or not:
+% \chapheadtype is "N" or "A", unnumbered chapters are ignored.
+\def\chapheadtype{N}
+
+% Choose a heading macro
+% #1 is heading type
+% #2 is heading level
+% #3 is text for heading
+\def\genhead#1#2#3{%
+  % Compute the abs. sec. level:
+  \absseclevel=#2
+  \advance\absseclevel by \secbase
+  % Make sure \absseclevel doesn't fall outside the range:
+  \ifnum \absseclevel < 0
+    \absseclevel = 0
+  \else
+    \ifnum \absseclevel > 3
+      \absseclevel = 3
+    \fi
+  \fi
+  % The heading type:
+  \def\headtype{#1}%
+  \if \headtype U%
+    \ifnum \absseclevel < \unnlevel
+      \chardef\unnlevel = \absseclevel
+    \fi
+  \else
+    % Check for appendix sections:
+    \ifnum \absseclevel = 0
+      \edef\chapheadtype{\headtype}%
+    \else
+      \if \headtype A\if \chapheadtype N%
+	\errmessage{@appendix... within a non-appendix chapter}%
+      \fi\fi
+    \fi
+    % Check for numbered within unnumbered:
+    \ifnum \absseclevel > \unnlevel
+      \def\headtype{U}%
+    \else
+      \chardef\unnlevel = 3
+    \fi
+  \fi
+  % Now print the heading:
+  \if \headtype U%
+    \ifcase\absseclevel
+	\unnumberedzzz{#3}%
+    \or \unnumberedseczzz{#3}%
+    \or \unnumberedsubseczzz{#3}%
+    \or \unnumberedsubsubseczzz{#3}%
+    \fi
+  \else
+    \if \headtype A%
+      \ifcase\absseclevel
+	  \appendixzzz{#3}%
+      \or \appendixsectionzzz{#3}%
+      \or \appendixsubseczzz{#3}%
+      \or \appendixsubsubseczzz{#3}%
+      \fi
+    \else
+      \ifcase\absseclevel
+	  \chapterzzz{#3}%
+      \or \seczzz{#3}%
+      \or \numberedsubseczzz{#3}%
+      \or \numberedsubsubseczzz{#3}%
+      \fi
+    \fi
+  \fi
+  \suppressfirstparagraphindent
+}
+
+% an interface:
+\def\numhead{\genhead N}
+\def\apphead{\genhead A}
+\def\unnmhead{\genhead U}
+
+% @chapter, @appendix, @unnumbered.  Increment top-level counter, reset
+% all lower-level sectioning counters to zero.
+%
+% Also set \chaplevelprefix, which we prepend to @float sequence numbers
+% (e.g., figures), q.v.  By default (before any chapter), that is empty.
+\let\chaplevelprefix = \empty
+%
+\outer\parseargdef\chapter{\numhead0{#1}} % normally numhead0 calls chapterzzz
+\def\chapterzzz#1{%
+  % section resetting is \global in case the chapter is in a group, such
+  % as an @include file.
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\chapno by 1
+  %
+  % Used for \float.
+  \gdef\chaplevelprefix{\the\chapno.}%
+  \resetallfloatnos
+  %
+  % \putwordChapter can contain complex things in translations.
+  \toks0=\expandafter{\putwordChapter}%
+  \message{\the\toks0 \space \the\chapno}%
+  %
+  % Write the actual heading.
+  \chapmacro{#1}{Ynumbered}{\the\chapno}%
+  %
+  % So @section and the like are numbered underneath this chapter.
+  \global\let\section = \numberedsec
+  \global\let\subsection = \numberedsubsec
+  \global\let\subsubsection = \numberedsubsubsec
+}
+
+\outer\parseargdef\appendix{\apphead0{#1}} % normally calls appendixzzz
+%
+\def\appendixzzz#1{%
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\appendixno by 1
+  \gdef\chaplevelprefix{\appendixletter.}%
+  \resetallfloatnos
+  %
+  % \putwordAppendix can contain complex things in translations.
+  \toks0=\expandafter{\putwordAppendix}%
+  \message{\the\toks0 \space \appendixletter}%
+  %
+  \chapmacro{#1}{Yappendix}{\appendixletter}%
+  %
+  \global\let\section = \appendixsec
+  \global\let\subsection = \appendixsubsec
+  \global\let\subsubsection = \appendixsubsubsec
+}
+
+% normally unnmhead0 calls unnumberedzzz:
+\outer\parseargdef\unnumbered{\unnmhead0{#1}}
+\def\unnumberedzzz#1{%
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\unnumberedno by 1
+  %
+  % Since an unnumbered has no number, no prefix for figures.
+  \global\let\chaplevelprefix = \empty
+  \resetallfloatnos
+  %
+  % This used to be simply \message{#1}, but TeX fully expands the
+  % argument to \message.  Therefore, if #1 contained @-commands, TeX
+  % expanded them.  For example, in `@unnumbered The @cite{Book}', TeX
+  % expanded @cite (which turns out to cause errors because \cite is meant
+  % to be executed, not expanded).
+  %
+  % Anyway, we don't want the fully-expanded definition of @cite to appear
+  % as a result of the \message, we just want `@cite' itself.  We use
+  % \the<toks register> to achieve this: TeX expands \the<toks> only once,
+  % simply yielding the contents of <toks register>.  (We also do this for
+  % the toc entries.)
+  \toks0 = {#1}%
+  \message{(\the\toks0)}%
+  %
+  \chapmacro{#1}{Ynothing}{\the\unnumberedno}%
+  %
+  \global\let\section = \unnumberedsec
+  \global\let\subsection = \unnumberedsubsec
+  \global\let\subsubsection = \unnumberedsubsubsec
+}
+
+% @centerchap is like @unnumbered, but the heading is centered.
+\outer\parseargdef\centerchap{%
+  % Well, we could do the following in a group, but that would break
+  % an assumption that \chapmacro is called at the outermost level.
+  % Thus we are safer this way:		--kasal, 24feb04
+  \let\centerparametersmaybe = \centerparameters
+  \unnmhead0{#1}%
+  \let\centerparametersmaybe = \relax
+}
+
+% @top is like @unnumbered.
+\let\top\unnumbered
+
+% Sections.
+% 
+\outer\parseargdef\numberedsec{\numhead1{#1}} % normally calls seczzz
+\def\seczzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Ynumbered}{\the\chapno.\the\secno}%
+}
+
+% normally calls appendixsectionzzz:
+\outer\parseargdef\appendixsection{\apphead1{#1}}
+\def\appendixsectionzzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Yappendix}{\appendixletter.\the\secno}%
+}
+\let\appendixsec\appendixsection
+
+% normally calls unnumberedseczzz:
+\outer\parseargdef\unnumberedsec{\unnmhead1{#1}}
+\def\unnumberedseczzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Ynothing}{\the\unnumberedno.\the\secno}%
+}
+
+% Subsections.
+% 
+% normally calls numberedsubseczzz:
+\outer\parseargdef\numberedsubsec{\numhead2{#1}}
+\def\numberedsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Ynumbered}{\the\chapno.\the\secno.\the\subsecno}%
+}
+
+% normally calls appendixsubseczzz:
+\outer\parseargdef\appendixsubsec{\apphead2{#1}}
+\def\appendixsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Yappendix}%
+                 {\appendixletter.\the\secno.\the\subsecno}%
+}
+
+% normally calls unnumberedsubseczzz:
+\outer\parseargdef\unnumberedsubsec{\unnmhead2{#1}}
+\def\unnumberedsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Ynothing}%
+                 {\the\unnumberedno.\the\secno.\the\subsecno}%
+}
+
+% Subsubsections.
+% 
+% normally numberedsubsubseczzz:
+\outer\parseargdef\numberedsubsubsec{\numhead3{#1}}
+\def\numberedsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Ynumbered}%
+                 {\the\chapno.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% normally appendixsubsubseczzz:
+\outer\parseargdef\appendixsubsubsec{\apphead3{#1}}
+\def\appendixsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Yappendix}%
+                 {\appendixletter.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% normally unnumberedsubsubseczzz:
+\outer\parseargdef\unnumberedsubsubsec{\unnmhead3{#1}}
+\def\unnumberedsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Ynothing}%
+                 {\the\unnumberedno.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% These macros control what the section commands do, according
+% to what kind of chapter we are in (ordinary, appendix, or unnumbered).
+% Define them by default for a numbered chapter.
+\let\section = \numberedsec
+\let\subsection = \numberedsubsec
+\let\subsubsection = \numberedsubsubsec
+
+% Define @majorheading, @heading and @subheading
+
+\def\majorheading{%
+  {\advance\chapheadingskip by 10pt \chapbreak }%
+  \parsearg\chapheadingzzz
+}
+
+\def\chapheading{\chapbreak \parsearg\chapheadingzzz}
+\def\chapheadingzzz#1{%
+  \vbox{\chapfonts \raggedtitlesettings #1\par}%
+  \nobreak\bigskip \nobreak
+  \suppressfirstparagraphindent
+}
+
+% @heading, @subheading, @subsubheading.
+\parseargdef\heading{\sectionheading{#1}{sec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+\parseargdef\subheading{\sectionheading{#1}{subsec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+\parseargdef\subsubheading{\sectionheading{#1}{subsubsec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+
+% These macros generate a chapter, section, etc. heading only
+% (including whitespace, linebreaking, etc. around it),
+% given all the information in convenient, parsed form.
+
+% Args are the skip and penalty (usually negative)
+\def\dobreak#1#2{\par\ifdim\lastskip<#1\removelastskip\penalty#2\vskip#1\fi}
+
+% Parameter controlling skip before chapter headings (if needed)
+\newskip\chapheadingskip
+
+% Define plain chapter starts, and page on/off switching for it.
+\def\chapbreak{\dobreak \chapheadingskip {-4000}}
+\def\chappager{\par\vfill\supereject}
+% Because \domark is called before \chapoddpage, the filler page will
+% get the headings for the next chapter, which is wrong.  But we don't
+% care -- we just disable all headings on the filler page.
+\def\chapoddpage{%
+  \chappager
+  \ifodd\pageno \else
+    \begingroup
+      \headingsoff
+      \null
+      \chappager
+    \endgroup
+  \fi
+}
+
+\def\setchapternewpage #1 {\csname CHAPPAG#1\endcsname}
+
+\def\CHAPPAGoff{%
+\global\let\contentsalignmacro = \chappager
+\global\let\pchapsepmacro=\chapbreak
+\global\let\pagealignmacro=\chappager}
+
+\def\CHAPPAGon{%
+\global\let\contentsalignmacro = \chappager
+\global\let\pchapsepmacro=\chappager
+\global\let\pagealignmacro=\chappager
+\global\def\HEADINGSon{\HEADINGSsingle}}
+
+\def\CHAPPAGodd{%
+\global\let\contentsalignmacro = \chapoddpage
+\global\let\pchapsepmacro=\chapoddpage
+\global\let\pagealignmacro=\chapoddpage
+\global\def\HEADINGSon{\HEADINGSdouble}}
+
+\CHAPPAGon
+
+% Chapter opening.
+%
+% #1 is the text, #2 is the section type (Ynumbered, Ynothing,
+% Yappendix, Yomitfromtoc), #3 the chapter number.
+%
+% To test against our argument.
+\def\Ynothingkeyword{Ynothing}
+\def\Yomitfromtockeyword{Yomitfromtoc}
+\def\Yappendixkeyword{Yappendix}
+%
+\def\chapmacro#1#2#3{%
+  % Insert the first mark before the heading break (see notes for \domark).
+  \let\prevchapterdefs=\lastchapterdefs
+  \let\prevsectiondefs=\lastsectiondefs
+  \gdef\lastsectiondefs{\gdef\thissectionname{}\gdef\thissectionnum{}%
+                        \gdef\thissection{}}%
+  %
+  \def\temptype{#2}%
+  \ifx\temptype\Ynothingkeyword
+    \gdef\lastchapterdefs{\gdef\thischaptername{#1}\gdef\thischapternum{}%
+                          \gdef\thischapter{\thischaptername}}%
+  \else\ifx\temptype\Yomitfromtockeyword
+    \gdef\lastchapterdefs{\gdef\thischaptername{#1}\gdef\thischapternum{}%
+                          \gdef\thischapter{}}%
+  \else\ifx\temptype\Yappendixkeyword
+    \toks0={#1}%
+    \xdef\lastchapterdefs{%
+      \gdef\noexpand\thischaptername{\the\toks0}%
+      \gdef\noexpand\thischapternum{\appendixletter}%
+      % \noexpand\putwordAppendix avoids expanding indigestible
+      % commands in some of the translations.
+      \gdef\noexpand\thischapter{\noexpand\putwordAppendix{}
+                                 \noexpand\thischapternum:
+                                 \noexpand\thischaptername}%
+    }%
+  \else
+    \toks0={#1}%
+    \xdef\lastchapterdefs{%
+      \gdef\noexpand\thischaptername{\the\toks0}%
+      \gdef\noexpand\thischapternum{\the\chapno}%
+      % \noexpand\putwordChapter avoids expanding indigestible
+      % commands in some of the translations.
+      \gdef\noexpand\thischapter{\noexpand\putwordChapter{}
+                                 \noexpand\thischapternum:
+                                 \noexpand\thischaptername}%
+    }%
+  \fi\fi\fi
+  %
+  % Output the mark.  Pass it through \safewhatsit, to take care of
+  % the preceding space.
+  \safewhatsit\domark
+  %
+  % Insert the chapter heading break.
+  \pchapsepmacro
+  %
+  % Now the second mark, after the heading break.  No break points
+  % between here and the heading.
+  \let\prevchapterdefs=\lastchapterdefs
+  \let\prevsectiondefs=\lastsectiondefs
+  \domark
+  %
+  {%
+    \chapfonts \rmisbold
+    %
+    % Have to define \lastsection before calling \donoderef, because the
+    % xref code eventually uses it.  On the other hand, it has to be called
+    % after \pchapsepmacro, or the headline will change too soon.
+    \gdef\lastsection{#1}%
+    %
+    % Only insert the separating space if we have a chapter/appendix
+    % number, and don't print the unnumbered ``number''.
+    \ifx\temptype\Ynothingkeyword
+      \setbox0 = \hbox{}%
+      \def\toctype{unnchap}%
+    \else\ifx\temptype\Yomitfromtockeyword
+      \setbox0 = \hbox{}% contents like unnumbered, but no toc entry
+      \def\toctype{omit}%
+    \else\ifx\temptype\Yappendixkeyword
+      \setbox0 = \hbox{\putwordAppendix{} #3\enspace}%
+      \def\toctype{app}%
+    \else
+      \setbox0 = \hbox{#3\enspace}%
+      \def\toctype{numchap}%
+    \fi\fi\fi
+    %
+    % Write the toc entry for this chapter.  Must come before the
+    % \donoderef, because we include the current node name in the toc
+    % entry, and \donoderef resets it to empty.
+    \writetocentry{\toctype}{#1}{#3}%
+    %
+    % For pdftex, we have to write out the node definition (aka, make
+    % the pdfdest) after any page break, but before the actual text has
+    % been typeset.  If the destination for the pdf outline is after the
+    % text, then jumping from the outline may wind up with the text not
+    % being visible, for instance under high magnification.
+    \donoderef{#2}%
+    %
+    % Typeset the actual heading.
+    \nobreak % Avoid page breaks at the interline glue.
+    \vbox{\raggedtitlesettings \hangindent=\wd0 \centerparametersmaybe
+          \unhbox0 #1\par}%
+  }%
+  \nobreak\bigskip % no page break after a chapter title
+  \nobreak
+}
+
+% @centerchap -- centered and unnumbered.
+\let\centerparametersmaybe = \relax
+\def\centerparameters{%
+  \advance\rightskip by 3\rightskip
+  \leftskip = \rightskip
+  \parfillskip = 0pt
+}
+
+
+% I don't think this chapter style is supported any more, so I'm not
+% updating it with the new noderef stuff.  We'll see.  --karl, 11aug03.
+%
+\def\setchapterstyle #1 {\csname CHAPF#1\endcsname}
+%
+\def\unnchfopen #1{%
+  \chapoddpage
+  \vbox{\chapfonts \raggedtitlesettings #1\par}%
+  \nobreak\bigskip\nobreak
+}
+\def\chfopen #1#2{\chapoddpage {\chapfonts
+\vbox to 3in{\vfil \hbox to\hsize{\hfil #2} \hbox to\hsize{\hfil #1} \vfil}}%
+\par\penalty 5000 %
+}
+\def\centerchfopen #1{%
+  \chapoddpage
+  \vbox{\chapfonts \raggedtitlesettings \hfill #1\hfill}%
+  \nobreak\bigskip \nobreak
+}
+\def\CHAPFopen{%
+  \global\let\chapmacro=\chfopen
+  \global\let\centerchapmacro=\centerchfopen}
+
+
+% Section titles.  These macros combine the section number parts and
+% call the generic \sectionheading to do the printing.
+%
+\newskip\secheadingskip
+\def\secheadingbreak{\dobreak \secheadingskip{-1000}}
+
+% Subsection titles.
+\newskip\subsecheadingskip
+\def\subsecheadingbreak{\dobreak \subsecheadingskip{-500}}
+
+% Subsubsection titles.
+\def\subsubsecheadingskip{\subsecheadingskip}
+\def\subsubsecheadingbreak{\subsecheadingbreak}
+
+
+% Print any size, any type, section title.
+%
+% #1 is the text, #2 is the section level (sec/subsec/subsubsec), #3 is
+% the section type for xrefs (Ynumbered, Ynothing, Yappendix), #4 is the
+% section number.
+%
+\def\seckeyword{sec}
+%
+\def\sectionheading#1#2#3#4{%
+  {%
+    \checkenv{}% should not be in an environment.
+    %
+    % Switch to the right set of fonts.
+    \csname #2fonts\endcsname \rmisbold
+    %
+    \def\sectionlevel{#2}%
+    \def\temptype{#3}%
+    %
+    % Insert first mark before the heading break (see notes for \domark).
+    \let\prevsectiondefs=\lastsectiondefs
+    \ifx\temptype\Ynothingkeyword
+      \ifx\sectionlevel\seckeyword
+        \gdef\lastsectiondefs{\gdef\thissectionname{#1}\gdef\thissectionnum{}%
+                              \gdef\thissection{\thissectionname}}%
+      \fi
+    \else\ifx\temptype\Yomitfromtockeyword
+      % Don't redefine \thissection.
+    \else\ifx\temptype\Yappendixkeyword
+      \ifx\sectionlevel\seckeyword
+        \toks0={#1}%
+        \xdef\lastsectiondefs{%
+          \gdef\noexpand\thissectionname{\the\toks0}%
+          \gdef\noexpand\thissectionnum{#4}%
+          % \noexpand\putwordSection avoids expanding indigestible
+          % commands in some of the translations.
+          \gdef\noexpand\thissection{\noexpand\putwordSection{}
+                                     \noexpand\thissectionnum:
+                                     \noexpand\thissectionname}%
+        }%
+      \fi
+    \else
+      \ifx\sectionlevel\seckeyword
+        \toks0={#1}%
+        \xdef\lastsectiondefs{%
+          \gdef\noexpand\thissectionname{\the\toks0}%
+          \gdef\noexpand\thissectionnum{#4}%
+          % \noexpand\putwordSection avoids expanding indigestible
+          % commands in some of the translations.
+          \gdef\noexpand\thissection{\noexpand\putwordSection{}
+                                     \noexpand\thissectionnum:
+                                     \noexpand\thissectionname}%
+        }%
+      \fi
+    \fi\fi\fi
+    %
+    % Go into vertical mode.  Usually we'll already be there, but we
+    % don't want the following whatsit to end up in a preceding paragraph
+    % if the document didn't happen to have a blank line.
+    \par
+    %
+    % Output the mark.  Pass it through \safewhatsit, to take care of
+    % the preceding space.
+    \safewhatsit\domark
+    %
+    % Insert space above the heading.
+    \csname #2headingbreak\endcsname
+    %
+    % Now the second mark, after the heading break.  No break points
+    % between here and the heading.
+    \let\prevsectiondefs=\lastsectiondefs
+    \domark
+    %
+    % Only insert the space after the number if we have a section number.
+    \ifx\temptype\Ynothingkeyword
+      \setbox0 = \hbox{}%
+      \def\toctype{unn}%
+      \gdef\lastsection{#1}%
+    \else\ifx\temptype\Yomitfromtockeyword
+      % for @headings -- no section number, don't include in toc,
+      % and don't redefine \lastsection.
+      \setbox0 = \hbox{}%
+      \def\toctype{omit}%
+      \let\sectionlevel=\empty
+    \else\ifx\temptype\Yappendixkeyword
+      \setbox0 = \hbox{#4\enspace}%
+      \def\toctype{app}%
+      \gdef\lastsection{#1}%
+    \else
+      \setbox0 = \hbox{#4\enspace}%
+      \def\toctype{num}%
+      \gdef\lastsection{#1}%
+    \fi\fi\fi
+    %
+    % Write the toc entry (before \donoderef).  See comments in \chapmacro.
+    \writetocentry{\toctype\sectionlevel}{#1}{#4}%
+    %
+    % Write the node reference (= pdf destination for pdftex).
+    % Again, see comments in \chapmacro.
+    \donoderef{#3}%
+    %
+    % Interline glue will be inserted when the vbox is completed.
+    % That glue will be a valid breakpoint for the page, since it'll be
+    % preceded by a whatsit (usually from the \donoderef, or from the
+    % \writetocentry if there was no node).  We don't want to allow that
+    % break, since then the whatsits could end up on page n while the
+    % section is on page n+1, thus toc/etc. are wrong.  Debian bug 276000.
+    \nobreak
+    %
+    % Output the actual section heading.
+    \vbox{\hyphenpenalty=10000 \tolerance=5000 \parindent=0pt \ptexraggedright
+          \hangindent=\wd0  % zero if no section number
+          \unhbox0 #1}%
+  }%
+  % Add extra space after the heading -- half of whatever came above it.
+  % Don't allow stretch, though.
+  \kern .5 \csname #2headingskip\endcsname
+  %
+  % Do not let the kern be a potential breakpoint, as it would be if it
+  % was followed by glue.
+  \nobreak
+  %
+  % We'll almost certainly start a paragraph next, so don't let that
+  % glue accumulate.  (Not a breakpoint because it's preceded by a
+  % discardable item.)  However, when a paragraph is not started next
+  % (\startdefun, \cartouche, \center, etc.), this needs to be wiped out
+  % or the negative glue will cause weirdly wrong output, typically
+  % obscuring the section heading with something else.
+  \vskip-\parskip
+  %
+  % This is so the last item on the main vertical list is a known
+  % \penalty > 10000, so \startdefun, etc., can recognize the situation
+  % and do the needful.
+  \penalty 10001
+}
+
+
+\message{toc,}
+% Table of contents.
+\newwrite\tocfile
+
+% Write an entry to the toc file, opening it if necessary.
+% Called from @chapter, etc.
+%
+% Example usage: \writetocentry{sec}{Section Name}{\the\chapno.\the\secno}
+% We append the current node name (if any) and page number as additional
+% arguments for the \{chap,sec,...}entry macros which will eventually
+% read this.  The node name is used in the pdf outlines as the
+% destination to jump to.
+%
+% We open the .toc file for writing here instead of at @setfilename (or
+% any other fixed time) so that @contents can be anywhere in the document.
+% But if #1 is `omit', then we don't do anything.  This is used for the
+% table of contents chapter openings themselves.
+%
+\newif\iftocfileopened
+\def\omitkeyword{omit}%
+%
+\def\writetocentry#1#2#3{%
+  \edef\writetoctype{#1}%
+  \ifx\writetoctype\omitkeyword \else
+    \iftocfileopened\else
+      \immediate\openout\tocfile = \jobname.toc
+      \global\tocfileopenedtrue
+    \fi
+    %
+    \iflinks
+      {\atdummies
+       \edef\temp{%
+         \write\tocfile{@#1entry{#2}{#3}{\lastnode}{\noexpand\folio}}}%
+       \temp
+      }%
+    \fi
+  \fi
+  %
+  % Tell \shipout to create a pdf destination on each page, if we're
+  % writing pdf.  These are used in the table of contents.  We can't
+  % just write one on every page because the title pages are numbered
+  % 1 and 2 (the page numbers aren't printed), and so are the first
+  % two pages of the document.  Thus, we'd have two destinations named
+  % `1', and two named `2'.
+  \ifpdf \global\pdfmakepagedesttrue \fi
+}
+
+
+% These characters do not print properly in the Computer Modern roman
+% fonts, so we must take special care.  This is more or less redundant
+% with the Texinfo input format setup at the end of this file.
+%
+\def\activecatcodes{%
+  \catcode`\"=\active
+  \catcode`\$=\active
+  \catcode`\<=\active
+  \catcode`\>=\active
+  \catcode`\\=\active
+  \catcode`\^=\active
+  \catcode`\_=\active
+  \catcode`\|=\active
+  \catcode`\~=\active
+}
+
+
+% Read the toc file, which is essentially Texinfo input.
+\def\readtocfile{%
+  \setupdatafile
+  \activecatcodes
+  \input \tocreadfilename
+}
+
+\newskip\contentsrightmargin \contentsrightmargin=1in
+\newcount\savepageno
+\newcount\lastnegativepageno \lastnegativepageno = -1
+
+% Prepare to read what we've written to \tocfile.
+%
+\def\startcontents#1{%
+  % If @setchapternewpage on, and @headings double, the contents should
+  % start on an odd page, unlike chapters.  Thus, we maintain
+  % \contentsalignmacro in parallel with \pagealignmacro.
+  % From: Torbjorn Granlund <tege@matematik.su.se>
+  \contentsalignmacro
+  \immediate\closeout\tocfile
+  %
+  % Don't need to put `Contents' or `Short Contents' in the headline.
+  % It is abundantly clear what they are.
+  \chapmacro{#1}{Yomitfromtoc}{}%
+  %
+  \savepageno = \pageno
+  \begingroup                  % Set up to handle contents files properly.
+    \raggedbottom              % Worry more about breakpoints than the bottom.
+    \advance\hsize by -\contentsrightmargin % Don't use the full line length.
+    %
+    % Roman numerals for page numbers.
+    \ifnum \pageno>0 \global\pageno = \lastnegativepageno \fi
+}
+
+% redefined for the two-volume lispref.  We always output on
+% \jobname.toc even if this is redefined.
+%
+\def\tocreadfilename{\jobname.toc}
+
+% Normal (long) toc.
+%
+\def\contents{%
+  \startcontents{\putwordTOC}%
+    \openin 1 \tocreadfilename\space
+    \ifeof 1 \else
+      \readtocfile
+    \fi
+    \vfill \eject
+    \contentsalignmacro % in case @setchapternewpage odd is in effect
+    \ifeof 1 \else
+      \pdfmakeoutlines
+    \fi
+    \closein 1
+  \endgroup
+  \lastnegativepageno = \pageno
+  \global\pageno = \savepageno
+}
+
+% And just the chapters.
+\def\summarycontents{%
+  \startcontents{\putwordShortTOC}%
+    %
+    \let\partentry = \shortpartentry
+    \let\numchapentry = \shortchapentry
+    \let\appentry = \shortchapentry
+    \let\unnchapentry = \shortunnchapentry
+    % We want a true roman here for the page numbers.
+    \secfonts
+    \let\rm=\shortcontrm \let\bf=\shortcontbf
+    \let\sl=\shortcontsl \let\tt=\shortconttt
+    \rm
+    \hyphenpenalty = 10000
+    \advance\baselineskip by 1pt % Open it up a little.
+    \def\numsecentry##1##2##3##4{}
+    \let\appsecentry = \numsecentry
+    \let\unnsecentry = \numsecentry
+    \let\numsubsecentry = \numsecentry
+    \let\appsubsecentry = \numsecentry
+    \let\unnsubsecentry = \numsecentry
+    \let\numsubsubsecentry = \numsecentry
+    \let\appsubsubsecentry = \numsecentry
+    \let\unnsubsubsecentry = \numsecentry
+    \openin 1 \tocreadfilename\space
+    \ifeof 1 \else
+      \readtocfile
+    \fi
+    \closein 1
+    \vfill \eject
+    \contentsalignmacro % in case @setchapternewpage odd is in effect
+  \endgroup
+  \lastnegativepageno = \pageno
+  \global\pageno = \savepageno
+}
+\let\shortcontents = \summarycontents
+
+% Typeset the label for a chapter or appendix for the short contents.
+% The arg is, e.g., `A' for an appendix, or `3' for a chapter.
+%
+\def\shortchaplabel#1{%
+  % This space should be enough, since a single number is .5em, and the
+  % widest letter (M) is 1em, at least in the Computer Modern fonts.
+  % But use \hss just in case.
+  % (This space doesn't include the extra space that gets added after
+  % the label; that gets put in by \shortchapentry above.)
+  %
+  % We'd like to right-justify chapter numbers, but that looks strange
+  % with appendix letters.  And right-justifying numbers and
+  % left-justifying letters looks strange when there is less than 10
+  % chapters.  Have to read the whole toc once to know how many chapters
+  % there are before deciding ...
+  \hbox to 1em{#1\hss}%
+}
+
+% These macros generate individual entries in the table of contents.
+% The first argument is the chapter or section name.
+% The last argument is the page number.
+% The arguments in between are the chapter number, section number, ...
+
+% Parts, in the main contents.  Replace the part number, which doesn't
+% exist, with an empty box.  Let's hope all the numbers have the same width.
+% Also ignore the page number, which is conventionally not printed.
+\def\numeralbox{\setbox0=\hbox{8}\hbox to \wd0{\hfil}}
+\def\partentry#1#2#3#4{\dochapentry{\numeralbox\labelspace#1}{}}
+%
+% Parts, in the short toc.
+\def\shortpartentry#1#2#3#4{%
+  \penalty-300
+  \vskip.5\baselineskip plus.15\baselineskip minus.1\baselineskip
+  \shortchapentry{{\bf #1}}{\numeralbox}{}{}%
+}
+
+% Chapters, in the main contents.
+\def\numchapentry#1#2#3#4{\dochapentry{#2\labelspace#1}{#4}}
+%
+% Chapters, in the short toc.
+% See comments in \dochapentry re vbox and related settings.
+\def\shortchapentry#1#2#3#4{%
+  \tocentry{\shortchaplabel{#2}\labelspace #1}{\doshortpageno\bgroup#4\egroup}%
+}
+
+% Appendices, in the main contents.
+% Need the word Appendix, and a fixed-size box.
+%
+\def\appendixbox#1{%
+  % We use M since it's probably the widest letter.
+  \setbox0 = \hbox{\putwordAppendix{} M}%
+  \hbox to \wd0{\putwordAppendix{} #1\hss}}
+%
+\def\appentry#1#2#3#4{\dochapentry{\appendixbox{#2}\labelspace#1}{#4}}
+
+% Unnumbered chapters.
+\def\unnchapentry#1#2#3#4{\dochapentry{#1}{#4}}
+\def\shortunnchapentry#1#2#3#4{\tocentry{#1}{\doshortpageno\bgroup#4\egroup}}
+
+% Sections.
+\def\numsecentry#1#2#3#4{\dosecentry{#2\labelspace#1}{#4}}
+\let\appsecentry=\numsecentry
+\def\unnsecentry#1#2#3#4{\dosecentry{#1}{#4}}
+
+% Subsections.
+\def\numsubsecentry#1#2#3#4{\dosubsecentry{#2\labelspace#1}{#4}}
+\let\appsubsecentry=\numsubsecentry
+\def\unnsubsecentry#1#2#3#4{\dosubsecentry{#1}{#4}}
+
+% And subsubsections.
+\def\numsubsubsecentry#1#2#3#4{\dosubsubsecentry{#2\labelspace#1}{#4}}
+\let\appsubsubsecentry=\numsubsubsecentry
+\def\unnsubsubsecentry#1#2#3#4{\dosubsubsecentry{#1}{#4}}
+
+% This parameter controls the indentation of the various levels.
+% Same as \defaultparindent.
+\newdimen\tocindent \tocindent = 15pt
+
+% Now for the actual typesetting. In all these, #1 is the text and #2 is the
+% page number.
+%
+% If the toc has to be broken over pages, we want it to be at chapters
+% if at all possible; hence the \penalty.
+\def\dochapentry#1#2{%
+   \penalty-300 \vskip1\baselineskip plus.33\baselineskip minus.25\baselineskip
+   \begingroup
+     \chapentryfonts
+     \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+   \endgroup
+   \nobreak\vskip .25\baselineskip plus.1\baselineskip
+}
+
+\def\dosecentry#1#2{\begingroup
+  \secentryfonts \leftskip=\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+\def\dosubsecentry#1#2{\begingroup
+  \subsecentryfonts \leftskip=2\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+\def\dosubsubsecentry#1#2{\begingroup
+  \subsubsecentryfonts \leftskip=3\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+% We use the same \entry macro as for the index entries.
+\let\tocentry = \entry
+
+% Space between chapter (or whatever) number and the title.
+\def\labelspace{\hskip1em \relax}
+
+\def\dopageno#1{{\rm #1}}
+\def\doshortpageno#1{{\rm #1}}
+
+\def\chapentryfonts{\secfonts \rm}
+\def\secentryfonts{\textfonts}
+\def\subsecentryfonts{\textfonts}
+\def\subsubsecentryfonts{\textfonts}
+
+
+\message{environments,}
+% @foo ... @end foo.
+
+% @tex ... @end tex    escapes into raw TeX temporarily.
+% One exception: @ is still an escape character, so that @end tex works.
+% But \@ or @@ will get a plain @ character.
+
+\envdef\tex{%
+  \setupmarkupstyle{tex}%
+  \catcode `\\=0 \catcode `\{=1 \catcode `\}=2
+  \catcode `\$=3 \catcode `\&=4 \catcode `\#=6
+  \catcode `\^=7 \catcode `\_=8 \catcode `\~=\active \let~=\tie
+  \catcode `\%=14
+  \catcode `\+=\other
+  \catcode `\"=\other
+  \catcode `\|=\other
+  \catcode `\<=\other
+  \catcode `\>=\other
+  \catcode`\`=\other
+  \catcode`\'=\other
+  \escapechar=`\\
+  %
+  % ' is active in math mode (mathcode"8000).  So reset it, and all our
+  % other math active characters (just in case), to plain's definitions.
+  \mathactive
+  %
+  \let\b=\ptexb
+  \let\bullet=\ptexbullet
+  \let\c=\ptexc
+  \let\,=\ptexcomma
+  \let\.=\ptexdot
+  \let\dots=\ptexdots
+  \let\equiv=\ptexequiv
+  \let\!=\ptexexclam
+  \let\i=\ptexi
+  \let\indent=\ptexindent
+  \let\noindent=\ptexnoindent
+  \let\{=\ptexlbrace
+  \let\+=\tabalign
+  \let\}=\ptexrbrace
+  \let\/=\ptexslash
+  \let\*=\ptexstar
+  \let\t=\ptext
+  \expandafter \let\csname top\endcsname=\ptextop  % outer
+  \let\frenchspacing=\plainfrenchspacing
+  %
+  \def\endldots{\mathinner{\ldots\ldots\ldots\ldots}}%
+  \def\enddots{\relax\ifmmode\endldots\else$\mathsurround=0pt \endldots\,$\fi}%
+  \def\@{@}%
+}
+% There is no need to define \Etex.
+
+% Define @lisp ... @end lisp.
+% @lisp environment forms a group so it can rebind things,
+% including the definition of @end lisp (which normally is erroneous).
+
+% Amount to narrow the margins by for @lisp.
+\newskip\lispnarrowing \lispnarrowing=0.4in
+
+% This is the definition that ^^M gets inside @lisp, @example, and other
+% such environments.  \null is better than a space, since it doesn't
+% have any width.
+\def\lisppar{\null\endgraf}
+
+% This space is always present above and below environments.
+\newskip\envskipamount \envskipamount = 0pt
+
+% Make spacing and below environment symmetrical.  We use \parskip here
+% to help in doing that, since in @example-like environments \parskip
+% is reset to zero; thus the \afterenvbreak inserts no space -- but the
+% start of the next paragraph will insert \parskip.
+%
+\def\aboveenvbreak{{%
+  % =10000 instead of <10000 because of a special case in \itemzzz and
+  % \sectionheading, q.v.
+  \ifnum \lastpenalty=10000 \else
+    \advance\envskipamount by \parskip
+    \endgraf
+    \ifdim\lastskip<\envskipamount
+      \removelastskip
+      % it's not a good place to break if the last penalty was \nobreak
+      % or better ...
+      \ifnum\lastpenalty<10000 \penalty-50 \fi
+      \vskip\envskipamount
+    \fi
+  \fi
+}}
+
+\let\afterenvbreak = \aboveenvbreak
+
+% \nonarrowing is a flag.  If "set", @lisp etc don't narrow margins; it will
+% also clear it, so that its embedded environments do the narrowing again.
+\let\nonarrowing=\relax
+
+% @cartouche ... @end cartouche: draw rectangle w/rounded corners around
+% environment contents.
+\font\circle=lcircle10
+\newdimen\circthick
+\newdimen\cartouter\newdimen\cartinner
+\newskip\normbskip\newskip\normpskip\newskip\normlskip
+\circthick=\fontdimen8\circle
+%
+\def\ctl{{\circle\char'013\hskip -6pt}}% 6pt from pl file: 1/2charwidth
+\def\ctr{{\hskip 6pt\circle\char'010}}
+\def\cbl{{\circle\char'012\hskip -6pt}}
+\def\cbr{{\hskip 6pt\circle\char'011}}
+\def\carttop{\hbox to \cartouter{\hskip\lskip
+        \ctl\leaders\hrule height\circthick\hfil\ctr
+        \hskip\rskip}}
+\def\cartbot{\hbox to \cartouter{\hskip\lskip
+        \cbl\leaders\hrule height\circthick\hfil\cbr
+        \hskip\rskip}}
+%
+\newskip\lskip\newskip\rskip
+
+\envdef\cartouche{%
+  \ifhmode\par\fi  % can't be in the midst of a paragraph.
+  \startsavinginserts
+  \lskip=\leftskip \rskip=\rightskip
+  \leftskip=0pt\rightskip=0pt % we want these *outside*.
+  \cartinner=\hsize \advance\cartinner by-\lskip
+  \advance\cartinner by-\rskip
+  \cartouter=\hsize
+  \advance\cartouter by 18.4pt	% allow for 3pt kerns on either
+				% side, and for 6pt waste from
+				% each corner char, and rule thickness
+  \normbskip=\baselineskip \normpskip=\parskip \normlskip=\lineskip
+  % Flag to tell @lisp, etc., not to narrow margin.
+  \let\nonarrowing = t%
+  %
+  % If this cartouche directly follows a sectioning command, we need the
+  % \parskip glue (backspaced over by default) or the cartouche can
+  % collide with the section heading.
+  \ifnum\lastpenalty>10000 \vskip\parskip \penalty\lastpenalty \fi
+  %
+  \vbox\bgroup
+      \baselineskip=0pt\parskip=0pt\lineskip=0pt
+      \carttop
+      \hbox\bgroup
+	  \hskip\lskip
+	  \vrule\kern3pt
+	  \vbox\bgroup
+	      \kern3pt
+	      \hsize=\cartinner
+	      \baselineskip=\normbskip
+	      \lineskip=\normlskip
+	      \parskip=\normpskip
+	      \vskip -\parskip
+	      \comment % For explanation, see the end of def\group.
+}
+\def\Ecartouche{%
+              \ifhmode\par\fi
+	      \kern3pt
+	  \egroup
+	  \kern3pt\vrule
+	  \hskip\rskip
+      \egroup
+      \cartbot
+  \egroup
+  \checkinserts
+}
+
+
+% This macro is called at the beginning of all the @example variants,
+% inside a group.
+\newdimen\nonfillparindent
+\def\nonfillstart{%
+  \aboveenvbreak
+  \hfuzz = 12pt % Don't be fussy
+  \sepspaces % Make spaces be word-separators rather than space tokens.
+  \let\par = \lisppar % don't ignore blank lines
+  \obeylines % each line of input is a line of output
+  \parskip = 0pt
+  % Turn off paragraph indentation but redefine \indent to emulate
+  % the normal \indent.
+  \nonfillparindent=\parindent
+  \parindent = 0pt
+  \let\indent\nonfillindent
+  %
+  \emergencystretch = 0pt % don't try to avoid overfull boxes
+  \ifx\nonarrowing\relax
+    \advance \leftskip by \lispnarrowing
+    \exdentamount=\lispnarrowing
+  \else
+    \let\nonarrowing = \relax
+  \fi
+  \let\exdent=\nofillexdent
+}
+
+\begingroup
+\obeyspaces
+% We want to swallow spaces (but not other tokens) after the fake
+% @indent in our nonfill-environments, where spaces are normally
+% active and set to @tie, resulting in them not being ignored after
+% @indent.
+\gdef\nonfillindent{\futurelet\temp\nonfillindentcheck}%
+\gdef\nonfillindentcheck{%
+\ifx\temp %
+\expandafter\nonfillindentgobble%
+\else%
+\leavevmode\nonfillindentbox%
+\fi%
+}%
+\endgroup
+\def\nonfillindentgobble#1{\nonfillindent}
+\def\nonfillindentbox{\hbox to \nonfillparindent{\hss}}
+
+% If you want all examples etc. small: @set dispenvsize small.
+% If you want even small examples the full size: @set dispenvsize nosmall.
+% This affects the following displayed environments:
+%    @example, @display, @format, @lisp
+%
+\def\smallword{small}
+\def\nosmallword{nosmall}
+\let\SETdispenvsize\relax
+\def\setnormaldispenv{%
+  \ifx\SETdispenvsize\smallword
+    % end paragraph for sake of leading, in case document has no blank
+    % line.  This is redundant with what happens in \aboveenvbreak, but
+    % we need to do it before changing the fonts, and it's inconvenient
+    % to change the fonts afterward.
+    \ifnum \lastpenalty=10000 \else \endgraf \fi
+    \smallexamplefonts \rm
+  \fi
+}
+\def\setsmalldispenv{%
+  \ifx\SETdispenvsize\nosmallword
+  \else
+    \ifnum \lastpenalty=10000 \else \endgraf \fi
+    \smallexamplefonts \rm
+  \fi
+}
+
+% We often define two environments, @foo and @smallfoo.
+% Let's do it in one command.  #1 is the env name, #2 the definition.
+\def\makedispenvdef#1#2{%
+  \expandafter\envdef\csname#1\endcsname {\setnormaldispenv #2}%
+  \expandafter\envdef\csname small#1\endcsname {\setsmalldispenv #2}%
+  \expandafter\let\csname E#1\endcsname \afterenvbreak
+  \expandafter\let\csname Esmall#1\endcsname \afterenvbreak
+}
+
+% Define two environment synonyms (#1 and #2) for an environment.
+\def\maketwodispenvdef#1#2#3{%
+  \makedispenvdef{#1}{#3}%
+  \makedispenvdef{#2}{#3}%
+}
+%
+% @lisp: indented, narrowed, typewriter font;
+% @example: same as @lisp.
+%
+% @smallexample and @smalllisp: use smaller fonts.
+% Originally contributed by Pavel@xerox.
+%
+\maketwodispenvdef{lisp}{example}{%
+  \nonfillstart
+  \tt\setupmarkupstyle{example}%
+  \let\kbdfont = \kbdexamplefont % Allow @kbd to do something special.
+  \gobble % eat return
+}
+% @display/@smalldisplay: same as @lisp except keep current font.
+%
+\makedispenvdef{display}{%
+  \nonfillstart
+  \gobble
+}
+
+% @format/@smallformat: same as @display except don't narrow margins.
+%
+\makedispenvdef{format}{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \gobble
+}
+
+% @flushleft: same as @format, but doesn't obey \SETdispenvsize.
+\envdef\flushleft{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \gobble
+}
+\let\Eflushleft = \afterenvbreak
+
+% @flushright.
+%
+\envdef\flushright{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \advance\leftskip by 0pt plus 1fill\relax
+  \gobble
+}
+\let\Eflushright = \afterenvbreak
+
+
+% @raggedright does more-or-less normal line breaking but no right
+% justification.  From plain.tex.
+\envdef\raggedright{%
+  \rightskip0pt plus2em \spaceskip.3333em \xspaceskip.5em\relax
+}
+\let\Eraggedright\par
+
+\envdef\raggedleft{%
+  \parindent=0pt \leftskip0pt plus2em
+  \spaceskip.3333em \xspaceskip.5em \parfillskip=0pt
+  \hbadness=10000 % Last line will usually be underfull, so turn off
+                  % badness reporting.
+}
+\let\Eraggedleft\par
+
+\envdef\raggedcenter{%
+  \parindent=0pt \rightskip0pt plus1em \leftskip0pt plus1em
+  \spaceskip.3333em \xspaceskip.5em \parfillskip=0pt
+  \hbadness=10000 % Last line will usually be underfull, so turn off
+                  % badness reporting.
+}
+\let\Eraggedcenter\par
+
+
+% @quotation does normal linebreaking (hence we can't use \nonfillstart)
+% and narrows the margins.  We keep \parskip nonzero in general, since
+% we're doing normal filling.  So, when using \aboveenvbreak and
+% \afterenvbreak, temporarily make \parskip 0.
+%
+\makedispenvdef{quotation}{\quotationstart}
+%
+\def\quotationstart{%
+  \indentedblockstart % same as \indentedblock, but increase right margin too.
+  \ifx\nonarrowing\relax
+    \advance\rightskip by \lispnarrowing
+  \fi
+  \parsearg\quotationlabel
+}
+
+% We have retained a nonzero parskip for the environment, since we're
+% doing normal filling.
+%
+\def\Equotation{%
+  \par
+  \ifx\quotationauthor\thisisundefined\else
+    % indent a bit.
+    \leftline{\kern 2\leftskip \sl ---\quotationauthor}%
+  \fi
+  {\parskip=0pt \afterenvbreak}%
+}
+\def\Esmallquotation{\Equotation}
+
+% If we're given an argument, typeset it in bold with a colon after.
+\def\quotationlabel#1{%
+  \def\temp{#1}%
+  \ifx\temp\empty \else
+    {\bf #1: }%
+  \fi
+}
+
+% @indentedblock is like @quotation, but indents only on the left and
+% has no optional argument.
+% 
+\makedispenvdef{indentedblock}{\indentedblockstart}
+%
+\def\indentedblockstart{%
+  {\parskip=0pt \aboveenvbreak}% because \aboveenvbreak inserts \parskip
+  \parindent=0pt
+  %
+  % @cartouche defines \nonarrowing to inhibit narrowing at next level down.
+  \ifx\nonarrowing\relax
+    \advance\leftskip by \lispnarrowing
+    \exdentamount = \lispnarrowing
+  \else
+    \let\nonarrowing = \relax
+  \fi
+}
+
+% Keep a nonzero parskip for the environment, since we're doing normal filling.
+%
+\def\Eindentedblock{%
+  \par
+  {\parskip=0pt \afterenvbreak}%
+}
+\def\Esmallindentedblock{\Eindentedblock}
+
+
+% LaTeX-like @verbatim...@end verbatim and @verb{<char>...<char>}
+% If we want to allow any <char> as delimiter,
+% we need the curly braces so that makeinfo sees the @verb command, eg:
+% `@verbx...x' would look like the '@verbx' command.  --janneke@gnu.org
+%
+% [Knuth]: Donald Ervin Knuth, 1996.  The TeXbook.
+%
+% [Knuth] p.344; only we need to do the other characters Texinfo sets
+% active too.  Otherwise, they get lost as the first character on a
+% verbatim line.
+\def\dospecials{%
+  \do\ \do\\\do\{\do\}\do\$\do\&%
+  \do\#\do\^\do\^^K\do\_\do\^^A\do\%\do\~%
+  \do\<\do\>\do\|\do\@\do+\do\"%
+  % Don't do the quotes -- if we do, @set txicodequoteundirected and
+  % @set txicodequotebacktick will not have effect on @verb and
+  % @verbatim, and ?` and !` ligatures won't get disabled.
+  %\do\`\do\'%
+}
+%
+% [Knuth] p. 380
+\def\uncatcodespecials{%
+  \def\do##1{\catcode`##1=\other}\dospecials}
+%
+% Setup for the @verb command.
+%
+% Eight spaces for a tab
+\begingroup
+  \catcode`\^^I=\active
+  \gdef\tabeightspaces{\catcode`\^^I=\active\def^^I{\ \ \ \ \ \ \ \ }}
+\endgroup
+%
+\def\setupverb{%
+  \tt  % easiest (and conventionally used) font for verbatim
+  \def\par{\leavevmode\endgraf}%
+  \setupmarkupstyle{verb}%
+  \tabeightspaces
+  % Respect line breaks,
+  % print special symbols as themselves, and
+  % make each space count
+  % must do in this order:
+  \obeylines \uncatcodespecials \sepspaces
+}
+
+% Setup for the @verbatim environment
+%
+% Real tab expansion.
+\newdimen\tabw \setbox0=\hbox{\tt\space} \tabw=8\wd0 % tab amount
+%
+% We typeset each line of the verbatim in an \hbox, so we can handle
+% tabs.  The \global is in case the verbatim line starts with an accent,
+% or some other command that starts with a begin-group.  Otherwise, the
+% entire \verbbox would disappear at the corresponding end-group, before
+% it is typeset.  Meanwhile, we can't have nested verbatim commands
+% (can we?), so the \global won't be overwriting itself.
+\newbox\verbbox
+\def\starttabbox{\global\setbox\verbbox=\hbox\bgroup}
+%
+\begingroup
+  \catcode`\^^I=\active
+  \gdef\tabexpand{%
+    \catcode`\^^I=\active
+    \def^^I{\leavevmode\egroup
+      \dimen\verbbox=\wd\verbbox % the width so far, or since the previous tab
+      \divide\dimen\verbbox by\tabw
+      \multiply\dimen\verbbox by\tabw % compute previous multiple of \tabw
+      \advance\dimen\verbbox by\tabw  % advance to next multiple of \tabw
+      \wd\verbbox=\dimen\verbbox \box\verbbox \starttabbox
+    }%
+  }
+\endgroup
+
+% start the verbatim environment.
+\def\setupverbatim{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \tt % easiest (and conventionally used) font for verbatim
+  % The \leavevmode here is for blank lines.  Otherwise, we would
+  % never \starttabox and the \egroup would end verbatim mode.
+  \def\par{\leavevmode\egroup\box\verbbox\endgraf}%
+  \tabexpand
+  \setupmarkupstyle{verbatim}%
+  % Respect line breaks,
+  % print special symbols as themselves, and
+  % make each space count.
+  % Must do in this order:
+  \obeylines \uncatcodespecials \sepspaces
+  \everypar{\starttabbox}%
+}
+
+% Do the @verb magic: verbatim text is quoted by unique
+% delimiter characters.  Before first delimiter expect a
+% right brace, after last delimiter expect closing brace:
+%
+%    \def\doverb'{'<char>#1<char>'}'{#1}
+%
+% [Knuth] p. 382; only eat outer {}
+\begingroup
+  \catcode`[=1\catcode`]=2\catcode`\{=\other\catcode`\}=\other
+  \gdef\doverb{#1[\def\next##1#1}[##1\endgroup]\next]
+\endgroup
+%
+\def\verb{\begingroup\setupverb\doverb}
+%
+%
+% Do the @verbatim magic: define the macro \doverbatim so that
+% the (first) argument ends when '@end verbatim' is reached, ie:
+%
+%     \def\doverbatim#1@end verbatim{#1}
+%
+% For Texinfo it's a lot easier than for LaTeX,
+% because texinfo's \verbatim doesn't stop at '\end{verbatim}':
+% we need not redefine '\', '{' and '}'.
+%
+% Inspired by LaTeX's verbatim command set [latex.ltx]
+%
+\begingroup
+  \catcode`\ =\active
+  \obeylines %
+  % ignore everything up to the first ^^M, that's the newline at the end
+  % of the @verbatim input line itself.  Otherwise we get an extra blank
+  % line in the output.
+  \xdef\doverbatim#1^^M#2@end verbatim{#2\noexpand\end\gobble verbatim}%
+  % We really want {...\end verbatim} in the body of the macro, but
+  % without the active space; thus we have to use \xdef and \gobble.
+\endgroup
+%
+\envdef\verbatim{%
+    \setupverbatim\doverbatim
+}
+\let\Everbatim = \afterenvbreak
+
+
+% @verbatiminclude FILE - insert text of file in verbatim environment.
+%
+\def\verbatiminclude{\parseargusing\filenamecatcodes\doverbatiminclude}
+%
+\def\doverbatiminclude#1{%
+  {%
+    \makevalueexpandable
+    \setupverbatim
+    \indexnofonts       % Allow `@@' and other weird things in file names.
+    \wlog{texinfo.tex: doing @verbatiminclude of #1^^J}%
+    \input #1
+    \afterenvbreak
+  }%
+}
+
+% @copying ... @end copying.
+% Save the text away for @insertcopying later.
+%
+% We save the uninterpreted tokens, rather than creating a box.
+% Saving the text in a box would be much easier, but then all the
+% typesetting commands (@smallbook, font changes, etc.) have to be done
+% beforehand -- and a) we want @copying to be done first in the source
+% file; b) letting users define the frontmatter in as flexible order as
+% possible is very desirable.
+%
+\def\copying{\checkenv{}\begingroup\scanargctxt\docopying}
+\def\docopying#1@end copying{\endgroup\def\copyingtext{#1}}
+%
+\def\insertcopying{%
+  \begingroup
+    \parindent = 0pt  % paragraph indentation looks wrong on title page
+    \scanexp\copyingtext
+  \endgroup
+}
+
+
+\message{defuns,}
+% @defun etc.
+
+\newskip\defbodyindent \defbodyindent=.4in
+\newskip\defargsindent \defargsindent=50pt
+\newskip\deflastargmargin \deflastargmargin=18pt
+\newcount\defunpenalty
+
+% Start the processing of @deffn:
+\def\startdefun{%
+  \ifnum\lastpenalty<10000
+    \medbreak
+    \defunpenalty=10003 % Will keep this @deffn together with the
+                        % following @def command, see below.
+  \else
+    % If there are two @def commands in a row, we'll have a \nobreak,
+    % which is there to keep the function description together with its
+    % header.  But if there's nothing but headers, we need to allow a
+    % break somewhere.  Check specifically for penalty 10002, inserted
+    % by \printdefunline, instead of 10000, since the sectioning
+    % commands also insert a nobreak penalty, and we don't want to allow
+    % a break between a section heading and a defun.
+    %
+    % As a further refinement, we avoid "club" headers by signalling
+    % with penalty of 10003 after the very first @deffn in the
+    % sequence (see above), and penalty of 10002 after any following
+    % @def command.
+    \ifnum\lastpenalty=10002 \penalty2000 \else \defunpenalty=10002 \fi
+    %
+    % Similarly, after a section heading, do not allow a break.
+    % But do insert the glue.
+    \medskip  % preceded by discardable penalty, so not a breakpoint
+  \fi
+  %
+  \parindent=0in
+  \advance\leftskip by \defbodyindent
+  \exdentamount=\defbodyindent
+}
+
+\def\dodefunx#1{%
+  % First, check whether we are in the right environment:
+  \checkenv#1%
+  %
+  % As above, allow line break if we have multiple x headers in a row.
+  % It's not a great place, though.
+  \ifnum\lastpenalty=10002 \penalty3000 \else \defunpenalty=10002 \fi
+  %
+  % And now, it's time to reuse the body of the original defun:
+  \expandafter\gobbledefun#1%
+}
+\def\gobbledefun#1\startdefun{}
+
+% \printdefunline \deffnheader{text}
+%
+\def\printdefunline#1#2{%
+  \begingroup
+    % call \deffnheader:
+    #1#2 \endheader
+    % common ending:
+    \interlinepenalty = 10000
+    \advance\rightskip by 0pt plus 1fil\relax
+    \endgraf
+    \nobreak\vskip -\parskip
+    \penalty\defunpenalty  % signal to \startdefun and \dodefunx
+    % Some of the @defun-type tags do not enable magic parentheses,
+    % rendering the following check redundant.  But we don't optimize.
+    \checkparencounts
+  \endgroup
+}
+
+\def\Edefun{\endgraf\medbreak}
+
+% \makedefun{deffn} creates \deffn, \deffnx and \Edeffn;
+% the only thing remaining is to define \deffnheader.
+%
+\def\makedefun#1{%
+  \expandafter\let\csname E#1\endcsname = \Edefun
+  \edef\temp{\noexpand\domakedefun
+    \makecsname{#1}\makecsname{#1x}\makecsname{#1header}}%
+  \temp
+}
+
+% \domakedefun \deffn \deffnx \deffnheader
+%
+% Define \deffn and \deffnx, without parameters.
+% \deffnheader has to be defined explicitly.
+%
+\def\domakedefun#1#2#3{%
+  \envdef#1{%
+    \startdefun
+    \doingtypefnfalse    % distinguish typed functions from all else
+    \parseargusing\activeparens{\printdefunline#3}%
+  }%
+  \def#2{\dodefunx#1}%
+  \def#3%
+}
+
+\newif\ifdoingtypefn       % doing typed function?
+\newif\ifrettypeownline    % typeset return type on its own line?
+
+% @deftypefnnewline on|off says whether the return type of typed functions
+% are printed on their own line.  This affects @deftypefn, @deftypefun,
+% @deftypeop, and @deftypemethod.
+% 
+\parseargdef\deftypefnnewline{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxideftypefnnl\endcsname
+      = \empty
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxideftypefnnl\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @txideftypefnnl value `\temp',
+                must be on|off}%
+  \fi\fi
+}
+
+% Untyped functions:
+
+% @deffn category name args
+\makedefun{deffn}{\deffngeneral{}}
+
+% @deffn category class name args
+\makedefun{defop}#1 {\defopon{#1\ \putwordon}}
+
+% \defopon {category on}class name args
+\def\defopon#1#2 {\deffngeneral{\putwordon\ \code{#2}}{#1\ \code{#2}} }
+
+% \deffngeneral {subind}category name args
+%
+\def\deffngeneral#1#2 #3 #4\endheader{%
+  % Remember that \dosubind{fn}{foo}{} is equivalent to \doind{fn}{foo}.
+  \dosubind{fn}{\code{#3}}{#1}%
+  \defname{#2}{}{#3}\magicamp\defunargs{#4\unskip}%
+}
+
+% Typed functions:
+
+% @deftypefn category type name args
+\makedefun{deftypefn}{\deftypefngeneral{}}
+
+% @deftypeop category class type name args
+\makedefun{deftypeop}#1 {\deftypeopon{#1\ \putwordon}}
+
+% \deftypeopon {category on}class type name args
+\def\deftypeopon#1#2 {\deftypefngeneral{\putwordon\ \code{#2}}{#1\ \code{#2}} }
+
+% \deftypefngeneral {subind}category type name args
+%
+\def\deftypefngeneral#1#2 #3 #4 #5\endheader{%
+  \dosubind{fn}{\code{#4}}{#1}%
+  \doingtypefntrue
+  \defname{#2}{#3}{#4}\defunargs{#5\unskip}%
+}
+
+% Typed variables:
+
+% @deftypevr category type var args
+\makedefun{deftypevr}{\deftypecvgeneral{}}
+
+% @deftypecv category class type var args
+\makedefun{deftypecv}#1 {\deftypecvof{#1\ \putwordof}}
+
+% \deftypecvof {category of}class type var args
+\def\deftypecvof#1#2 {\deftypecvgeneral{\putwordof\ \code{#2}}{#1\ \code{#2}} }
+
+% \deftypecvgeneral {subind}category type var args
+%
+\def\deftypecvgeneral#1#2 #3 #4 #5\endheader{%
+  \dosubind{vr}{\code{#4}}{#1}%
+  \defname{#2}{#3}{#4}\defunargs{#5\unskip}%
+}
+
+% Untyped variables:
+
+% @defvr category var args
+\makedefun{defvr}#1 {\deftypevrheader{#1} {} }
+
+% @defcv category class var args
+\makedefun{defcv}#1 {\defcvof{#1\ \putwordof}}
+
+% \defcvof {category of}class var args
+\def\defcvof#1#2 {\deftypecvof{#1}#2 {} }
+
+% Types:
+
+% @deftp category name args
+\makedefun{deftp}#1 #2 #3\endheader{%
+  \doind{tp}{\code{#2}}%
+  \defname{#1}{}{#2}\defunargs{#3\unskip}%
+}
+
+% Remaining @defun-like shortcuts:
+\makedefun{defun}{\deffnheader{\putwordDeffunc} }
+\makedefun{defmac}{\deffnheader{\putwordDefmac} }
+\makedefun{defspec}{\deffnheader{\putwordDefspec} }
+\makedefun{deftypefun}{\deftypefnheader{\putwordDeffunc} }
+\makedefun{defvar}{\defvrheader{\putwordDefvar} }
+\makedefun{defopt}{\defvrheader{\putwordDefopt} }
+\makedefun{deftypevar}{\deftypevrheader{\putwordDefvar} }
+\makedefun{defmethod}{\defopon\putwordMethodon}
+\makedefun{deftypemethod}{\deftypeopon\putwordMethodon}
+\makedefun{defivar}{\defcvof\putwordInstanceVariableof}
+\makedefun{deftypeivar}{\deftypecvof\putwordInstanceVariableof}
+
+% \defname, which formats the name of the @def (not the args).
+% #1 is the category, such as "Function".
+% #2 is the return type, if any.
+% #3 is the function name.
+%
+% We are followed by (but not passed) the arguments, if any.
+%
+\def\defname#1#2#3{%
+  \par
+  % Get the values of \leftskip and \rightskip as they were outside the @def...
+  \advance\leftskip by -\defbodyindent
+  %
+  % Determine if we are typesetting the return type of a typed function
+  % on a line by itself.
+  \rettypeownlinefalse
+  \ifdoingtypefn  % doing a typed function specifically?
+    % then check user option for putting return type on its own line:
+    \expandafter\ifx\csname SETtxideftypefnnl\endcsname\relax \else
+      \rettypeownlinetrue
+    \fi
+  \fi
+  %
+  % How we'll format the category name.  Putting it in brackets helps
+  % distinguish it from the body text that may end up on the next line
+  % just below it.
+  \def\temp{#1}%
+  \setbox0=\hbox{\kern\deflastargmargin \ifx\temp\empty\else [\rm\temp]\fi}
+  %
+  % Figure out line sizes for the paragraph shape.  We'll always have at
+  % least two.
+  \tempnum = 2
+  %
+  % The first line needs space for \box0; but if \rightskip is nonzero,
+  % we need only space for the part of \box0 which exceeds it:
+  \dimen0=\hsize  \advance\dimen0 by -\wd0  \advance\dimen0 by \rightskip
+  %
+  % If doing a return type on its own line, we'll have another line.
+  \ifrettypeownline
+    \advance\tempnum by 1
+    \def\maybeshapeline{0in \hsize}%
+  \else
+    \def\maybeshapeline{}%
+  \fi
+  %
+  % The continuations:
+  \dimen2=\hsize  \advance\dimen2 by -\defargsindent
+  %
+  % The final paragraph shape:
+  \parshape \tempnum  0in \dimen0  \maybeshapeline  \defargsindent \dimen2
+  %
+  % Put the category name at the right margin.
+  \noindent
+  \hbox to 0pt{%
+    \hfil\box0 \kern-\hsize
+    % \hsize has to be shortened this way:
+    \kern\leftskip
+    % Intentionally do not respect \rightskip, since we need the space.
+  }%
+  %
+  % Allow all lines to be underfull without complaint:
+  \tolerance=10000 \hbadness=10000
+  \exdentamount=\defbodyindent
+  {%
+    % defun fonts. We use typewriter by default (used to be bold) because:
+    % . we're printing identifiers, they should be in tt in principle.
+    % . in languages with many accents, such as Czech or French, it's
+    %   common to leave accents off identifiers.  The result looks ok in
+    %   tt, but exceedingly strange in rm.
+    % . we don't want -- and --- to be treated as ligatures.
+    % . this still does not fix the ?` and !` ligatures, but so far no
+    %   one has made identifiers using them :).
+    \df \tt
+    \def\temp{#2}% text of the return type
+    \ifx\temp\empty\else
+      \tclose{\temp}% typeset the return type
+      \ifrettypeownline
+        % put return type on its own line; prohibit line break following:
+        \hfil\vadjust{\nobreak}\break  
+      \else
+        \space  % type on same line, so just followed by a space
+      \fi
+    \fi           % no return type
+    #3% output function name
+  }%
+  {\rm\enskip}% hskip 0.5 em of \tenrm
+  %
+  \boldbrax
+  % arguments will be output next, if any.
+}
+
+% Print arguments in slanted roman (not ttsl), inconsistently with using
+% tt for the name.  This is because literal text is sometimes needed in
+% the argument list (groff manual), and ttsl and tt are not very
+% distinguishable.  Prevent hyphenation at `-' chars.
+%
+\def\defunargs#1{%
+  % use sl by default (not ttsl),
+  % tt for the names.
+  \df \sl \hyphenchar\font=0
+  %
+  % On the other hand, if an argument has two dashes (for instance), we
+  % want a way to get ttsl.  We used to recommend @var for that, so
+  % leave the code in, but it's strange for @var to lead to typewriter.
+  % Nowadays we recommend @code, since the difference between a ttsl hyphen
+  % and a tt hyphen is pretty tiny.  @code also disables ?` !`.
+  \def\var##1{{\setupmarkupstyle{var}\ttslanted{##1}}}%
+  #1%
+  \sl\hyphenchar\font=45
+}
+
+% We want ()&[] to print specially on the defun line.
+%
+\def\activeparens{%
+  \catcode`\(=\active \catcode`\)=\active
+  \catcode`\[=\active \catcode`\]=\active
+  \catcode`\&=\active
+}
+
+% Make control sequences which act like normal parenthesis chars.
+\let\lparen = ( \let\rparen = )
+
+% Be sure that we always have a definition for `(', etc.  For example,
+% if the fn name has parens in it, \boldbrax will not be in effect yet,
+% so TeX would otherwise complain about undefined control sequence.
+{
+  \activeparens
+  \global\let(=\lparen \global\let)=\rparen
+  \global\let[=\lbrack \global\let]=\rbrack
+  \global\let& = \&
+
+  \gdef\boldbrax{\let(=\opnr\let)=\clnr\let[=\lbrb\let]=\rbrb}
+  \gdef\magicamp{\let&=\amprm}
+}
+
+\newcount\parencount
+
+% If we encounter &foo, then turn on ()-hacking afterwards
+\newif\ifampseen
+\def\amprm#1 {\ampseentrue{\bf\&#1 }}
+
+\def\parenfont{%
+  \ifampseen
+    % At the first level, print parens in roman,
+    % otherwise use the default font.
+    \ifnum \parencount=1 \rm \fi
+  \else
+    % The \sf parens (in \boldbrax) actually are a little bolder than
+    % the contained text.  This is especially needed for [ and ] .
+    \sf
+  \fi
+}
+\def\infirstlevel#1{%
+  \ifampseen
+    \ifnum\parencount=1
+      #1%
+    \fi
+  \fi
+}
+\def\bfafterword#1 {#1 \bf}
+
+\def\opnr{%
+  \global\advance\parencount by 1
+  {\parenfont(}%
+  \infirstlevel \bfafterword
+}
+\def\clnr{%
+  {\parenfont)}%
+  \infirstlevel \sl
+  \global\advance\parencount by -1
+}
+
+\newcount\brackcount
+\def\lbrb{%
+  \global\advance\brackcount by 1
+  {\bf[}%
+}
+\def\rbrb{%
+  {\bf]}%
+  \global\advance\brackcount by -1
+}
+
+\def\checkparencounts{%
+  \ifnum\parencount=0 \else \badparencount \fi
+  \ifnum\brackcount=0 \else \badbrackcount \fi
+}
+% these should not use \errmessage; the glibc manual, at least, actually
+% has such constructs (when documenting function pointers).
+\def\badparencount{%
+  \message{Warning: unbalanced parentheses in @def...}%
+  \global\parencount=0
+}
+\def\badbrackcount{%
+  \message{Warning: unbalanced square brackets in @def...}%
+  \global\brackcount=0
+}
+
+
+\message{macros,}
+% @macro.
+
+% To do this right we need a feature of e-TeX, \scantokens,
+% which we arrange to emulate with a temporary file in ordinary TeX.
+\ifx\eTeXversion\thisisundefined
+  \newwrite\macscribble
+  \def\scantokens#1{%
+    \toks0={#1}%
+    \immediate\openout\macscribble=\jobname.tmp
+    \immediate\write\macscribble{\the\toks0}%
+    \immediate\closeout\macscribble
+    \input \jobname.tmp
+  }
+\fi
+
+\def\scanmacro#1{\begingroup
+  \newlinechar`\^^M
+  \let\xeatspaces\eatspaces
+  %
+  % Undo catcode changes of \startcontents and \doprintindex
+  % When called from @insertcopying or (short)caption, we need active
+  % backslash to get it printed correctly.  Previously, we had
+  % \catcode`\\=\other instead.  We'll see whether a problem appears
+  % with macro expansion.				--kasal, 19aug04
+  \catcode`\@=0 \catcode`\\=\active \escapechar=`\@
+  %
+  % ... and for \example:
+  \spaceisspace
+  %
+  % The \empty here causes a following catcode 5 newline to be eaten as
+  % part of reading whitespace after a control sequence.  It does not
+  % eat a catcode 13 newline.  There's no good way to handle the two
+  % cases (untried: maybe e-TeX's \everyeof could help, though plain TeX
+  % would then have different behavior).  See the Macro Details node in
+  % the manual for the workaround we recommend for macros and
+  % line-oriented commands.
+  % 
+  \scantokens{#1\empty}%
+\endgroup}
+
+\def\scanexp#1{%
+  \edef\temp{\noexpand\scanmacro{#1}}%
+  \temp
+}
+
+\newcount\paramno   % Count of parameters
+\newtoks\macname    % Macro name
+\newif\ifrecursive  % Is it recursive?
+
+% List of all defined macros in the form
+%    \definedummyword\macro1\definedummyword\macro2...
+% Currently is also contains all @aliases; the list can be split
+% if there is a need.
+\def\macrolist{}
+
+% Add the macro to \macrolist
+\def\addtomacrolist#1{\expandafter \addtomacrolistxxx \csname#1\endcsname}
+\def\addtomacrolistxxx#1{%
+     \toks0 = \expandafter{\macrolist\definedummyword#1}%
+     \xdef\macrolist{\the\toks0}%
+}
+
+% Utility routines.
+% This does \let #1 = #2, with \csnames; that is,
+%   \let \csname#1\endcsname = \csname#2\endcsname
+% (except of course we have to play expansion games).
+%
+\def\cslet#1#2{%
+  \expandafter\let
+  \csname#1\expandafter\endcsname
+  \csname#2\endcsname
+}
+
+% Trim leading and trailing spaces off a string.
+% Concepts from aro-bend problem 15 (see CTAN).
+{\catcode`\@=11
+\gdef\eatspaces #1{\expandafter\trim@\expandafter{#1 }}
+\gdef\trim@ #1{\trim@@ @#1 @ #1 @ @@}
+\gdef\trim@@ #1@ #2@ #3@@{\trim@@@\empty #2 @}
+\def\unbrace#1{#1}
+\unbrace{\gdef\trim@@@ #1 } #2@{#1}
+}
+
+% Trim a single trailing ^^M off a string.
+{\catcode`\^^M=\other \catcode`\Q=3%
+\gdef\eatcr #1{\eatcra #1Q^^MQ}%
+\gdef\eatcra#1^^MQ{\eatcrb#1Q}%
+\gdef\eatcrb#1Q#2Q{#1}%
+}
+
+% Macro bodies are absorbed as an argument in a context where
+% all characters are catcode 10, 11 or 12, except \ which is active
+% (as in normal texinfo). It is necessary to change the definition of \
+% to recognize macro arguments; this is the job of \mbodybackslash.
+%
+% Non-ASCII encodings make 8-bit characters active, so un-activate
+% them to avoid their expansion.  Must do this non-globally, to
+% confine the change to the current group.
+%
+% It's necessary to have hard CRs when the macro is executed. This is
+% done by making ^^M (\endlinechar) catcode 12 when reading the macro
+% body, and then making it the \newlinechar in \scanmacro.
+%
+\def\scanctxt{% used as subroutine
+  \catcode`\"=\other
+  \catcode`\+=\other
+  \catcode`\<=\other
+  \catcode`\>=\other
+  \catcode`\@=\other
+  \catcode`\^=\other
+  \catcode`\_=\other
+  \catcode`\|=\other
+  \catcode`\~=\other
+  \ifx\declaredencoding\ascii \else \setnonasciicharscatcodenonglobal\other \fi
+}
+
+\def\scanargctxt{% used for copying and captions, not macros.
+  \scanctxt
+  \catcode`\\=\other
+  \catcode`\^^M=\other
+}
+
+\def\macrobodyctxt{% used for @macro definitions
+  \scanctxt
+  \catcode`\{=\other
+  \catcode`\}=\other
+  \catcode`\^^M=\other
+  \usembodybackslash
+}
+
+\def\macroargctxt{% used when scanning invocations
+  \scanctxt
+  \catcode`\\=0
+}
+% why catcode 0 for \ in the above?  To recognize \\ \{ \} as "escapes"
+% for the single characters \ { }.  Thus, we end up with the "commands"
+% that would be written @\ @{ @} in a Texinfo document.
+% 
+% We already have @{ and @}.  For @\, we define it here, and only for
+% this purpose, to produce a typewriter backslash (so, the @\ that we
+% define for @math can't be used with @macro calls):
+%
+\def\\{\normalbackslash}%
+% 
+% We would like to do this for \, too, since that is what makeinfo does.
+% But it is not possible, because Texinfo already has a command @, for a
+% cedilla accent.  Documents must use @comma{} instead.
+%
+% \anythingelse will almost certainly be an error of some kind.
+
+
+% \mbodybackslash is the definition of \ in @macro bodies.
+% It maps \foo\ => \csname macarg.foo\endcsname => #N
+% where N is the macro parameter number.
+% We define \csname macarg.\endcsname to be \realbackslash, so
+% \\ in macro replacement text gets you a backslash.
+%
+{\catcode`@=0 @catcode`@\=@active
+ @gdef@usembodybackslash{@let\=@mbodybackslash}
+ @gdef@mbodybackslash#1\{@csname macarg.#1@endcsname}
+}
+\expandafter\def\csname macarg.\endcsname{\realbackslash}
+
+\def\margbackslash#1{\char`\#1 }
+
+\def\macro{\recursivefalse\parsearg\macroxxx}
+\def\rmacro{\recursivetrue\parsearg\macroxxx}
+
+\def\macroxxx#1{%
+  \getargs{#1}% now \macname is the macname and \argl the arglist
+  \ifx\argl\empty       % no arguments
+     \paramno=0\relax
+  \else
+     \expandafter\parsemargdef \argl;%
+     \if\paramno>256\relax
+       \ifx\eTeXversion\thisisundefined
+         \errhelp = \EMsimple
+         \errmessage{You need eTeX to compile a file with macros with more than 256 arguments}
+       \fi
+     \fi
+  \fi
+  \if1\csname ismacro.\the\macname\endcsname
+     \message{Warning: redefining \the\macname}%
+  \else
+     \expandafter\ifx\csname \the\macname\endcsname \relax
+     \else \errmessage{Macro name \the\macname\space already defined}\fi
+     \global\cslet{macsave.\the\macname}{\the\macname}%
+     \global\expandafter\let\csname ismacro.\the\macname\endcsname=1%
+     \addtomacrolist{\the\macname}%
+  \fi
+  \begingroup \macrobodyctxt
+  \ifrecursive \expandafter\parsermacbody
+  \else \expandafter\parsemacbody
+  \fi}
+
+\parseargdef\unmacro{%
+  \if1\csname ismacro.#1\endcsname
+    \global\cslet{#1}{macsave.#1}%
+    \global\expandafter\let \csname ismacro.#1\endcsname=0%
+    % Remove the macro name from \macrolist:
+    \begingroup
+      \expandafter\let\csname#1\endcsname \relax
+      \let\definedummyword\unmacrodo
+      \xdef\macrolist{\macrolist}%
+    \endgroup
+  \else
+    \errmessage{Macro #1 not defined}%
+  \fi
+}
+
+% Called by \do from \dounmacro on each macro.  The idea is to omit any
+% macro definitions that have been changed to \relax.
+%
+\def\unmacrodo#1{%
+  \ifx #1\relax
+    % remove this
+  \else
+    \noexpand\definedummyword \noexpand#1%
+  \fi
+}
+
+% This makes use of the obscure feature that if the last token of a
+% <parameter list> is #, then the preceding argument is delimited by
+% an opening brace, and that opening brace is not consumed.
+\def\getargs#1{\getargsxxx#1{}}
+\def\getargsxxx#1#{\getmacname #1 \relax\getmacargs}
+\def\getmacname#1 #2\relax{\macname={#1}}
+\def\getmacargs#1{\def\argl{#1}}
+
+% For macro processing make @ a letter so that we can make Texinfo private macro names.
+\edef\texiatcatcode{\the\catcode`\@}
+\catcode `@=11\relax
+
+% Parse the optional {params} list.  Set up \paramno and \paramlist
+% so \defmacro knows what to do.  Define \macarg.BLAH for each BLAH
+% in the params list to some hook where the argument si to be expanded.  If
+% there are less than 10 arguments that hook is to be replaced by ##N where N
+% is the position in that list, that is to say the macro arguments are to be
+% defined `a la TeX in the macro body.  
+%
+% That gets used by \mbodybackslash (above).
+%
+% We need to get `macro parameter char #' into several definitions.
+% The technique used is stolen from LaTeX: let \hash be something
+% unexpandable, insert that wherever you need a #, and then redefine
+% it to # just before using the token list produced.
+%
+% The same technique is used to protect \eatspaces till just before
+% the macro is used.
+%
+% If there are 10 or more arguments, a different technique is used, where the
+% hook remains in the body, and when macro is to be expanded the body is
+% processed again to replace the arguments.
+%
+% In that case, the hook is \the\toks N-1, and we simply set \toks N-1 to the
+% argument N value and then \edef  the body (nothing else will expand because of
+% the catcode regime underwhich the body was input).
+%
+% If you compile with TeX (not eTeX), and you have macros with 10 or more
+% arguments, you need that no macro has more than 256 arguments, otherwise an
+% error is produced.
+\def\parsemargdef#1;{%
+  \paramno=0\def\paramlist{}%
+  \let\hash\relax
+  \let\xeatspaces\relax
+  \parsemargdefxxx#1,;,%
+  % In case that there are 10 or more arguments we parse again the arguments
+  % list to set new definitions for the \macarg.BLAH macros corresponding to
+  % each BLAH argument. It was anyhow needed to parse already once this list
+  % in order to count the arguments, and as macros with at most 9 arguments
+  % are by far more frequent than macro with 10 or more arguments, defining
+  % twice the \macarg.BLAH macros does not cost too much processing power.
+  \ifnum\paramno<10\relax\else
+    \paramno0\relax
+    \parsemmanyargdef@@#1,;,% 10 or more arguments
+  \fi
+}
+\def\parsemargdefxxx#1,{%
+  \if#1;\let\next=\relax
+  \else \let\next=\parsemargdefxxx
+    \advance\paramno by 1
+    \expandafter\edef\csname macarg.\eatspaces{#1}\endcsname
+        {\xeatspaces{\hash\the\paramno}}%
+    \edef\paramlist{\paramlist\hash\the\paramno,}%
+  \fi\next}
+
+\def\parsemmanyargdef@@#1,{%
+  \if#1;\let\next=\relax
+  \else 
+    \let\next=\parsemmanyargdef@@
+    \edef\tempb{\eatspaces{#1}}%
+    \expandafter\def\expandafter\tempa
+       \expandafter{\csname macarg.\tempb\endcsname}%
+    % Note that we need some extra \noexpand\noexpand, this is because we
+    % don't want \the  to be expanded in the \parsermacbody  as it uses an
+    % \xdef .
+    \expandafter\edef\tempa
+      {\noexpand\noexpand\noexpand\the\toks\the\paramno}%
+    \advance\paramno by 1\relax
+  \fi\next}
+
+% These two commands read recursive and nonrecursive macro bodies.
+% (They're different since rec and nonrec macros end differently.)
+%
+
+\catcode `\@\texiatcatcode
+\long\def\parsemacbody#1@end macro%
+{\xdef\temp{\eatcr{#1}}\endgroup\defmacro}%
+\long\def\parsermacbody#1@end rmacro%
+{\xdef\temp{\eatcr{#1}}\endgroup\defmacro}%
+\catcode `\@=11\relax
+
+\let\endargs@\relax
+\let\nil@\relax
+\def\nilm@{\nil@}%
+\long\def\nillm@{\nil@}%
+
+% This macro is expanded during the Texinfo macro expansion, not during its
+% definition.  It gets all the arguments values and assigns them to macros
+% macarg.ARGNAME
+%
+% #1 is the macro name
+% #2 is the list of argument names
+% #3 is the list of argument values
+\def\getargvals@#1#2#3{%
+  \def\macargdeflist@{}%
+  \def\saveparamlist@{#2}% Need to keep a copy for parameter expansion.
+  \def\paramlist{#2,\nil@}%
+  \def\macroname{#1}%
+  \begingroup
+  \macroargctxt
+  \def\argvaluelist{#3,\nil@}%
+  \def\@tempa{#3}%
+  \ifx\@tempa\empty
+    \setemptyargvalues@
+  \else
+    \getargvals@@
+  \fi
+}
+
+% 
+\def\getargvals@@{%
+  \ifx\paramlist\nilm@
+      % Some sanity check needed here that \argvaluelist is also empty.
+      \ifx\argvaluelist\nillm@
+      \else
+        \errhelp = \EMsimple
+        \errmessage{Too many arguments in macro `\macroname'!}%
+      \fi
+      \let\next\macargexpandinbody@
+  \else
+    \ifx\argvaluelist\nillm@
+       % No more arguments values passed to macro.  Set remaining named-arg
+       % macros to empty.
+       \let\next\setemptyargvalues@
+    \else
+      % pop current arg name into \@tempb
+      \def\@tempa##1{\pop@{\@tempb}{\paramlist}##1\endargs@}%
+      \expandafter\@tempa\expandafter{\paramlist}%
+       % pop current argument value into \@tempc
+      \def\@tempa##1{\longpop@{\@tempc}{\argvaluelist}##1\endargs@}%
+      \expandafter\@tempa\expandafter{\argvaluelist}%
+       % Here \@tempb is the current arg name and \@tempc is the current arg value.
+       % First place the new argument macro definition into \@tempd
+       \expandafter\macname\expandafter{\@tempc}%
+       \expandafter\let\csname macarg.\@tempb\endcsname\relax
+       \expandafter\def\expandafter\@tempe\expandafter{%
+         \csname macarg.\@tempb\endcsname}%
+       \edef\@tempd{\long\def\@tempe{\the\macname}}%
+       \push@\@tempd\macargdeflist@
+       \let\next\getargvals@@
+    \fi
+  \fi
+  \next
+}
+
+\def\push@#1#2{%
+  \expandafter\expandafter\expandafter\def
+  \expandafter\expandafter\expandafter#2%
+  \expandafter\expandafter\expandafter{%
+  \expandafter#1#2}%
+}
+
+% Replace arguments by their values in the macro body, and place the result
+% in macro \@tempa
+\def\macvalstoargs@{%
+  %  To do this we use the property that token registers that are \the'ed
+  % within an \edef  expand only once. So we are going to place all argument
+  % values into respective token registers.
+  %
+  % First we save the token context, and initialize argument numbering.
+  \begingroup
+    \paramno0\relax
+    % Then, for each argument number #N, we place the corresponding argument
+    % value into a new token list register \toks#N
+    \expandafter\putargsintokens@\saveparamlist@,;,%
+    % Then, we expand the body so that argument are replaced by their
+    % values. The trick for values not to be expanded themselves is that they
+    % are within tokens and that tokens expand only once in an \edef .
+    \edef\@tempc{\csname mac.\macroname .body\endcsname}%
+    % Now we restore the token stack pointer to free the token list registers
+    % which we have used, but we make sure that expanded body is saved after
+    % group.
+    \expandafter
+  \endgroup
+  \expandafter\def\expandafter\@tempa\expandafter{\@tempc}%
+  }
+
+\def\macargexpandinbody@{% 
+  %% Define the named-macro outside of this group and then close this group. 
+  \expandafter
+  \endgroup
+  \macargdeflist@
+  % First the replace in body the macro arguments by their values, the result
+  % is in \@tempa .
+  \macvalstoargs@
+  % Then we point at the \norecurse or \gobble (for recursive) macro value
+  % with \@tempb .
+  \expandafter\let\expandafter\@tempb\csname mac.\macroname .recurse\endcsname
+  % Depending on whether it is recursive or not, we need some tailing
+  % \egroup .
+  \ifx\@tempb\gobble
+     \let\@tempc\relax
+  \else
+     \let\@tempc\egroup
+  \fi
+  % And now we do the real job:
+  \edef\@tempd{\noexpand\@tempb{\macroname}\noexpand\scanmacro{\@tempa}\@tempc}%
+  \@tempd
+}
+
+\def\putargsintokens@#1,{%
+  \if#1;\let\next\relax
+  \else
+    \let\next\putargsintokens@
+    % First we allocate the new token list register, and give it a temporary
+    % alias \@tempb .
+    \toksdef\@tempb\the\paramno
+    % Then we place the argument value into that token list register.
+    \expandafter\let\expandafter\@tempa\csname macarg.#1\endcsname
+    \expandafter\@tempb\expandafter{\@tempa}%
+    \advance\paramno by 1\relax
+  \fi
+  \next
+}
+
+% Save the token stack pointer into macro #1
+\def\texisavetoksstackpoint#1{\edef#1{\the\@cclvi}}
+% Restore the token stack pointer from number in macro #1
+\def\texirestoretoksstackpoint#1{\expandafter\mathchardef\expandafter\@cclvi#1\relax}
+% newtoks that can be used non \outer .
+\def\texinonouternewtoks{\alloc@ 5\toks \toksdef \@cclvi}
+
+% Tailing missing arguments are set to empty
+\def\setemptyargvalues@{%
+  \ifx\paramlist\nilm@
+    \let\next\macargexpandinbody@
+  \else
+    \expandafter\setemptyargvaluesparser@\paramlist\endargs@
+    \let\next\setemptyargvalues@
+  \fi
+  \next
+}
+
+\def\setemptyargvaluesparser@#1,#2\endargs@{%
+  \expandafter\def\expandafter\@tempa\expandafter{%
+    \expandafter\def\csname macarg.#1\endcsname{}}%
+  \push@\@tempa\macargdeflist@
+  \def\paramlist{#2}%
+}
+
+% #1 is the element target macro
+% #2 is the list macro
+% #3,#4\endargs@ is the list value
+\def\pop@#1#2#3,#4\endargs@{%
+   \def#1{#3}%
+   \def#2{#4}%
+}
+\long\def\longpop@#1#2#3,#4\endargs@{%
+   \long\def#1{#3}%
+   \long\def#2{#4}%
+}
+
+% This defines a Texinfo @macro. There are eight cases: recursive and
+% nonrecursive macros of zero, one, up to nine, and many arguments.
+% Much magic with \expandafter here.
+% \xdef is used so that macro definitions will survive the file
+% they're defined in; @include reads the file inside a group.
+%
+\def\defmacro{%
+  \let\hash=##% convert placeholders to macro parameter chars
+  \ifrecursive
+    \ifcase\paramno
+    % 0
+      \expandafter\xdef\csname\the\macname\endcsname{%
+        \noexpand\scanmacro{\temp}}%
+    \or % 1
+      \expandafter\xdef\csname\the\macname\endcsname{%
+         \bgroup\noexpand\macroargctxt
+         \noexpand\braceorline
+         \expandafter\noexpand\csname\the\macname xxx\endcsname}%
+      \expandafter\xdef\csname\the\macname xxx\endcsname##1{%
+         \egroup\noexpand\scanmacro{\temp}}%
+    \else
+      \ifnum\paramno<10\relax % at most 9
+        \expandafter\xdef\csname\the\macname\endcsname{%
+           \bgroup\noexpand\macroargctxt
+           \noexpand\csname\the\macname xx\endcsname}%
+        \expandafter\xdef\csname\the\macname xx\endcsname##1{%
+            \expandafter\noexpand\csname\the\macname xxx\endcsname ##1,}%
+        \expandafter\expandafter
+        \expandafter\xdef
+        \expandafter\expandafter
+          \csname\the\macname xxx\endcsname
+            \paramlist{\egroup\noexpand\scanmacro{\temp}}%
+      \else % 10 or more
+        \expandafter\xdef\csname\the\macname\endcsname{%
+          \noexpand\getargvals@{\the\macname}{\argl}%
+        }%    
+        \global\expandafter\let\csname mac.\the\macname .body\endcsname\temp
+        \global\expandafter\let\csname mac.\the\macname .recurse\endcsname\gobble
+      \fi
+    \fi
+  \else
+    \ifcase\paramno
+    % 0
+      \expandafter\xdef\csname\the\macname\endcsname{%
+        \noexpand\norecurse{\the\macname}%
+        \noexpand\scanmacro{\temp}\egroup}%
+    \or % 1
+      \expandafter\xdef\csname\the\macname\endcsname{%
+         \bgroup\noexpand\macroargctxt
+         \noexpand\braceorline
+         \expandafter\noexpand\csname\the\macname xxx\endcsname}%
+      \expandafter\xdef\csname\the\macname xxx\endcsname##1{%
+        \egroup
+        \noexpand\norecurse{\the\macname}%
+        \noexpand\scanmacro{\temp}\egroup}%
+    \else % at most 9
+      \ifnum\paramno<10\relax
+        \expandafter\xdef\csname\the\macname\endcsname{%
+           \bgroup\noexpand\macroargctxt
+           \expandafter\noexpand\csname\the\macname xx\endcsname}%
+        \expandafter\xdef\csname\the\macname xx\endcsname##1{%
+            \expandafter\noexpand\csname\the\macname xxx\endcsname ##1,}%
+        \expandafter\expandafter
+        \expandafter\xdef
+        \expandafter\expandafter
+        \csname\the\macname xxx\endcsname
+        \paramlist{%
+            \egroup
+            \noexpand\norecurse{\the\macname}%
+            \noexpand\scanmacro{\temp}\egroup}%
+      \else % 10 or more:
+        \expandafter\xdef\csname\the\macname\endcsname{%
+          \noexpand\getargvals@{\the\macname}{\argl}%
+        }%
+        \global\expandafter\let\csname mac.\the\macname .body\endcsname\temp
+        \global\expandafter\let\csname mac.\the\macname .recurse\endcsname\norecurse
+      \fi
+    \fi
+  \fi}
+
+\catcode `\@\texiatcatcode\relax
+
+\def\norecurse#1{\bgroup\cslet{#1}{macsave.#1}}
+
+% \braceorline decides whether the next nonwhitespace character is a
+% {.  If so it reads up to the closing }, if not, it reads the whole
+% line.  Whatever was read is then fed to the next control sequence
+% as an argument (by \parsebrace or \parsearg).
+% 
+\def\braceorline#1{\let\macnamexxx=#1\futurelet\nchar\braceorlinexxx}
+\def\braceorlinexxx{%
+  \ifx\nchar\bgroup\else
+    \expandafter\parsearg
+  \fi \macnamexxx}
+
+
+% @alias.
+% We need some trickery to remove the optional spaces around the equal
+% sign.  Make them active and then expand them all to nothing.
+%
+\def\alias{\parseargusing\obeyspaces\aliasxxx}
+\def\aliasxxx #1{\aliasyyy#1\relax}
+\def\aliasyyy #1=#2\relax{%
+  {%
+    \expandafter\let\obeyedspace=\empty
+    \addtomacrolist{#1}%
+    \xdef\next{\global\let\makecsname{#1}=\makecsname{#2}}%
+  }%
+  \next
+}
+
+
+\message{cross references,}
+
+\newwrite\auxfile
+\newif\ifhavexrefs    % True if xref values are known.
+\newif\ifwarnedxrefs  % True if we warned once that they aren't known.
+
+% @inforef is relatively simple.
+\def\inforef #1{\inforefzzz #1,,,,**}
+\def\inforefzzz #1,#2,#3,#4**{%
+  \putwordSee{} \putwordInfo{} \putwordfile{} \file{\ignorespaces #3{}},
+  node \samp{\ignorespaces#1{}}}
+
+% @node's only job in TeX is to define \lastnode, which is used in
+% cross-references.  The @node line might or might not have commas, and
+% might or might not have spaces before the first comma, like:
+% @node foo , bar , ...
+% We don't want such trailing spaces in the node name.
+%
+\parseargdef\node{\checkenv{}\donode #1 ,\finishnodeparse}
+%
+% also remove a trailing comma, in case of something like this:
+% @node Help-Cross,  ,  , Cross-refs
+\def\donode#1 ,#2\finishnodeparse{\dodonode #1,\finishnodeparse}
+\def\dodonode#1,#2\finishnodeparse{\gdef\lastnode{#1}}
+
+\let\nwnode=\node
+\let\lastnode=\empty
+
+% Write a cross-reference definition for the current node.  #1 is the
+% type (Ynumbered, Yappendix, Ynothing).
+%
+\def\donoderef#1{%
+  \ifx\lastnode\empty\else
+    \setref{\lastnode}{#1}%
+    \global\let\lastnode=\empty
+  \fi
+}
+
+% @anchor{NAME} -- define xref target at arbitrary point.
+%
+\newcount\savesfregister
+%
+\def\savesf{\relax \ifhmode \savesfregister=\spacefactor \fi}
+\def\restoresf{\relax \ifhmode \spacefactor=\savesfregister \fi}
+\def\anchor#1{\savesf \setref{#1}{Ynothing}\restoresf \ignorespaces}
+
+% \setref{NAME}{SNT} defines a cross-reference point NAME (a node or an
+% anchor), which consists of three parts:
+% 1) NAME-title - the current sectioning name taken from \lastsection,
+%                 or the anchor name.
+% 2) NAME-snt   - section number and type, passed as the SNT arg, or
+%                 empty for anchors.
+% 3) NAME-pg    - the page number.
+%
+% This is called from \donoderef, \anchor, and \dofloat.  In the case of
+% floats, there is an additional part, which is not written here:
+% 4) NAME-lof   - the text as it should appear in a @listoffloats.
+%
+\def\setref#1#2{%
+  \pdfmkdest{#1}%
+  \iflinks
+    {%
+      \atdummies  % preserve commands, but don't expand them
+      \edef\writexrdef##1##2{%
+	\write\auxfile{@xrdef{#1-% #1 of \setref, expanded by the \edef
+	  ##1}{##2}}% these are parameters of \writexrdef
+      }%
+      \toks0 = \expandafter{\lastsection}%
+      \immediate \writexrdef{title}{\the\toks0 }%
+      \immediate \writexrdef{snt}{\csname #2\endcsname}% \Ynumbered etc.
+      \safewhatsit{\writexrdef{pg}{\folio}}% will be written later, at \shipout
+    }%
+  \fi
+}
+
+% @xrefautosectiontitle on|off says whether @section(ing) names are used
+% automatically in xrefs, if the third arg is not explicitly specified.
+% This was provided as a "secret" @set xref-automatic-section-title
+% variable, now it's official.
+% 
+\parseargdef\xrefautomaticsectiontitle{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETxref-automatic-section-title\endcsname
+      = \empty
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETxref-automatic-section-title\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @xrefautomaticsectiontitle value `\temp',
+                must be on|off}%
+  \fi\fi
+}
+
+% 
+% @xref, @pxref, and @ref generate cross-references.  For \xrefX, #1 is
+% the node name, #2 the name of the Info cross-reference, #3 the printed
+% node name, #4 the name of the Info file, #5 the name of the printed
+% manual.  All but the node name can be omitted.
+%
+\def\pxref#1{\putwordsee{} \xrefX[#1,,,,,,,]}
+\def\xref#1{\putwordSee{} \xrefX[#1,,,,,,,]}
+\def\ref#1{\xrefX[#1,,,,,,,]}
+%
+\newbox\toprefbox
+\newbox\printedrefnamebox
+\newbox\infofilenamebox
+\newbox\printedmanualbox
+%
+\def\xrefX[#1,#2,#3,#4,#5,#6]{\begingroup
+  \unsepspaces
+  %
+  % Get args without leading/trailing spaces.
+  \def\printedrefname{\ignorespaces #3}%
+  \setbox\printedrefnamebox = \hbox{\printedrefname\unskip}%
+  %
+  \def\infofilename{\ignorespaces #4}%
+  \setbox\infofilenamebox = \hbox{\infofilename\unskip}%
+  %
+  \def\printedmanual{\ignorespaces #5}%
+  \setbox\printedmanualbox  = \hbox{\printedmanual\unskip}%
+  %
+  % If the printed reference name (arg #3) was not explicitly given in
+  % the @xref, figure out what we want to use.
+  \ifdim \wd\printedrefnamebox = 0pt
+    % No printed node name was explicitly given.
+    \expandafter\ifx\csname SETxref-automatic-section-title\endcsname \relax
+      % Not auto section-title: use node name inside the square brackets.
+      \def\printedrefname{\ignorespaces #1}%
+    \else
+      % Auto section-title: use chapter/section title inside
+      % the square brackets if we have it.
+      \ifdim \wd\printedmanualbox > 0pt
+        % It is in another manual, so we don't have it; use node name.
+        \def\printedrefname{\ignorespaces #1}%
+      \else
+        \ifhavexrefs
+          % We (should) know the real title if we have the xref values.
+          \def\printedrefname{\refx{#1-title}{}}%
+        \else
+          % Otherwise just copy the Info node name.
+          \def\printedrefname{\ignorespaces #1}%
+        \fi%
+      \fi
+    \fi
+  \fi
+  %
+  % Make link in pdf output.
+  \ifpdf
+    {\indexnofonts
+     \turnoffactive
+     \makevalueexpandable
+     % This expands tokens, so do it after making catcode changes, so _
+     % etc. don't get their TeX definitions.  This ignores all spaces in
+     % #4, including (wrongly) those in the middle of the filename.
+     \getfilename{#4}%
+     %
+     % This (wrongly) does not take account of leading or trailing
+     % spaces in #1, which should be ignored.
+     \edef\pdfxrefdest{#1}%
+     \ifx\pdfxrefdest\empty
+       \def\pdfxrefdest{Top}% no empty targets
+     \else
+       \txiescapepdf\pdfxrefdest  % escape PDF special chars
+     \fi
+     %
+     \leavevmode
+     \startlink attr{/Border [0 0 0]}%
+     \ifnum\filenamelength>0
+       goto file{\the\filename.pdf} name{\pdfxrefdest}%
+     \else
+       goto name{\pdfmkpgn{\pdfxrefdest}}%
+     \fi
+    }%
+    \setcolor{\linkcolor}%
+  \fi
+  %
+  % Float references are printed completely differently: "Figure 1.2"
+  % instead of "[somenode], p.3".  We distinguish them by the
+  % LABEL-title being set to a magic string.
+  {%
+    % Have to otherify everything special to allow the \csname to
+    % include an _ in the xref name, etc.
+    \indexnofonts
+    \turnoffactive
+    \expandafter\global\expandafter\let\expandafter\Xthisreftitle
+      \csname XR#1-title\endcsname
+  }%
+  \iffloat\Xthisreftitle
+    % If the user specified the print name (third arg) to the ref,
+    % print it instead of our usual "Figure 1.2".
+    \ifdim\wd\printedrefnamebox = 0pt
+      \refx{#1-snt}{}%
+    \else
+      \printedrefname
+    \fi
+    %
+    % If the user also gave the printed manual name (fifth arg), append
+    % "in MANUALNAME".
+    \ifdim \wd\printedmanualbox > 0pt
+      \space \putwordin{} \cite{\printedmanual}%
+    \fi
+  \else
+    % node/anchor (non-float) references.
+    % 
+    % If we use \unhbox to print the node names, TeX does not insert
+    % empty discretionaries after hyphens, which means that it will not
+    % find a line break at a hyphen in a node names.  Since some manuals
+    % are best written with fairly long node names, containing hyphens,
+    % this is a loss.  Therefore, we give the text of the node name
+    % again, so it is as if TeX is seeing it for the first time.
+    % 
+    \ifdim \wd\printedmanualbox > 0pt
+      % Cross-manual reference with a printed manual name.
+      % 
+      \crossmanualxref{\cite{\printedmanual\unskip}}%
+    %
+    \else\ifdim \wd\infofilenamebox > 0pt
+      % Cross-manual reference with only an info filename (arg 4), no
+      % printed manual name (arg 5).  This is essentially the same as
+      % the case above; we output the filename, since we have nothing else.
+      % 
+      \crossmanualxref{\code{\infofilename\unskip}}%
+    %
+    \else
+      % Reference within this manual.
+      %
+      % _ (for example) has to be the character _ for the purposes of the
+      % control sequence corresponding to the node, but it has to expand
+      % into the usual \leavevmode...\vrule stuff for purposes of
+      % printing. So we \turnoffactive for the \refx-snt, back on for the
+      % printing, back off for the \refx-pg.
+      {\turnoffactive
+       % Only output a following space if the -snt ref is nonempty; for
+       % @unnumbered and @anchor, it won't be.
+       \setbox2 = \hbox{\ignorespaces \refx{#1-snt}{}}%
+       \ifdim \wd2 > 0pt \refx{#1-snt}\space\fi
+      }%
+      % output the `[mynode]' via the macro below so it can be overridden.
+      \xrefprintnodename\printedrefname
+      %
+      % But we always want a comma and a space:
+      ,\space
+      %
+      % output the `page 3'.
+      \turnoffactive \putwordpage\tie\refx{#1-pg}{}%
+    \fi\fi
+  \fi
+  \endlink
+\endgroup}
+
+% Output a cross-manual xref to #1.  Used just above (twice).
+% 
+% Only include the text "Section ``foo'' in" if the foo is neither
+% missing or Top.  Thus, @xref{,,,foo,The Foo Manual} outputs simply
+% "see The Foo Manual", the idea being to refer to the whole manual.
+% 
+% But, this being TeX, we can't easily compare our node name against the
+% string "Top" while ignoring the possible spaces before and after in
+% the input.  By adding the arbitrary 7sp below, we make it much less
+% likely that a real node name would have the same width as "Top" (e.g.,
+% in a monospaced font).  Hopefully it will never happen in practice.
+% 
+% For the same basic reason, we retypeset the "Top" at every
+% reference, since the current font is indeterminate.
+% 
+\def\crossmanualxref#1{%
+  \setbox\toprefbox = \hbox{Top\kern7sp}%
+  \setbox2 = \hbox{\ignorespaces \printedrefname \unskip \kern7sp}%
+  \ifdim \wd2 > 7sp  % nonempty?
+    \ifdim \wd2 = \wd\toprefbox \else  % same as Top?
+      \putwordSection{} ``\printedrefname'' \putwordin{}\space
+    \fi
+  \fi
+  #1%
+}
+
+% This macro is called from \xrefX for the `[nodename]' part of xref
+% output.  It's a separate macro only so it can be changed more easily,
+% since square brackets don't work well in some documents.  Particularly
+% one that Bob is working on :).
+%
+\def\xrefprintnodename#1{[#1]}
+
+% Things referred to by \setref.
+%
+\def\Ynothing{}
+\def\Yomitfromtoc{}
+\def\Ynumbered{%
+  \ifnum\secno=0
+    \putwordChapter@tie \the\chapno
+  \else \ifnum\subsecno=0
+    \putwordSection@tie \the\chapno.\the\secno
+  \else \ifnum\subsubsecno=0
+    \putwordSection@tie \the\chapno.\the\secno.\the\subsecno
+  \else
+    \putwordSection@tie \the\chapno.\the\secno.\the\subsecno.\the\subsubsecno
+  \fi\fi\fi
+}
+\def\Yappendix{%
+  \ifnum\secno=0
+     \putwordAppendix@tie @char\the\appendixno{}%
+  \else \ifnum\subsecno=0
+     \putwordSection@tie @char\the\appendixno.\the\secno
+  \else \ifnum\subsubsecno=0
+    \putwordSection@tie @char\the\appendixno.\the\secno.\the\subsecno
+  \else
+    \putwordSection@tie
+      @char\the\appendixno.\the\secno.\the\subsecno.\the\subsubsecno
+  \fi\fi\fi
+}
+
+% Define \refx{NAME}{SUFFIX} to reference a cross-reference string named NAME.
+% If its value is nonempty, SUFFIX is output afterward.
+%
+\def\refx#1#2{%
+  {%
+    \indexnofonts
+    \otherbackslash
+    \expandafter\global\expandafter\let\expandafter\thisrefX
+      \csname XR#1\endcsname
+  }%
+  \ifx\thisrefX\relax
+    % If not defined, say something at least.
+    \angleleft un\-de\-fined\angleright
+    \iflinks
+      \ifhavexrefs
+        {\toks0 = {#1}% avoid expansion of possibly-complex value
+         \message{\linenumber Undefined cross reference `\the\toks0'.}}%
+      \else
+        \ifwarnedxrefs\else
+          \global\warnedxrefstrue
+          \message{Cross reference values unknown; you must run TeX again.}%
+        \fi
+      \fi
+    \fi
+  \else
+    % It's defined, so just use it.
+    \thisrefX
+  \fi
+  #2% Output the suffix in any case.
+}
+
+% This is the macro invoked by entries in the aux file.  Usually it's
+% just a \def (we prepend XR to the control sequence name to avoid
+% collisions).  But if this is a float type, we have more work to do.
+%
+\def\xrdef#1#2{%
+  {% The node name might contain 8-bit characters, which in our current
+   % implementation are changed to commands like @'e.  Don't let these
+   % mess up the control sequence name.
+    \indexnofonts
+    \turnoffactive
+    \xdef\safexrefname{#1}%
+  }%
+  %
+  \expandafter\gdef\csname XR\safexrefname\endcsname{#2}% remember this xref
+  %
+  % Was that xref control sequence that we just defined for a float?
+  \expandafter\iffloat\csname XR\safexrefname\endcsname
+    % it was a float, and we have the (safe) float type in \iffloattype.
+    \expandafter\let\expandafter\floatlist
+      \csname floatlist\iffloattype\endcsname
+    %
+    % Is this the first time we've seen this float type?
+    \expandafter\ifx\floatlist\relax
+      \toks0 = {\do}% yes, so just \do
+    \else
+      % had it before, so preserve previous elements in list.
+      \toks0 = \expandafter{\floatlist\do}%
+    \fi
+    %
+    % Remember this xref in the control sequence \floatlistFLOATTYPE,
+    % for later use in \listoffloats.
+    \expandafter\xdef\csname floatlist\iffloattype\endcsname{\the\toks0
+      {\safexrefname}}%
+  \fi
+}
+
+% Read the last existing aux file, if any.  No error if none exists.
+%
+\def\tryauxfile{%
+  \openin 1 \jobname.aux
+  \ifeof 1 \else
+    \readdatafile{aux}%
+    \global\havexrefstrue
+  \fi
+  \closein 1
+}
+
+\def\setupdatafile{%
+  \catcode`\^^@=\other
+  \catcode`\^^A=\other
+  \catcode`\^^B=\other
+  \catcode`\^^C=\other
+  \catcode`\^^D=\other
+  \catcode`\^^E=\other
+  \catcode`\^^F=\other
+  \catcode`\^^G=\other
+  \catcode`\^^H=\other
+  \catcode`\^^K=\other
+  \catcode`\^^L=\other
+  \catcode`\^^N=\other
+  \catcode`\^^P=\other
+  \catcode`\^^Q=\other
+  \catcode`\^^R=\other
+  \catcode`\^^S=\other
+  \catcode`\^^T=\other
+  \catcode`\^^U=\other
+  \catcode`\^^V=\other
+  \catcode`\^^W=\other
+  \catcode`\^^X=\other
+  \catcode`\^^Z=\other
+  \catcode`\^^[=\other
+  \catcode`\^^\=\other
+  \catcode`\^^]=\other
+  \catcode`\^^^=\other
+  \catcode`\^^_=\other
+  % It was suggested to set the catcode of ^ to 7, which would allow ^^e4 etc.
+  % in xref tags, i.e., node names.  But since ^^e4 notation isn't
+  % supported in the main text, it doesn't seem desirable.  Furthermore,
+  % that is not enough: for node names that actually contain a ^
+  % character, we would end up writing a line like this: 'xrdef {'hat
+  % b-title}{'hat b} and \xrdef does a \csname...\endcsname on the first
+  % argument, and \hat is not an expandable control sequence.  It could
+  % all be worked out, but why?  Either we support ^^ or we don't.
+  %
+  % The other change necessary for this was to define \auxhat:
+  % \def\auxhat{\def^{'hat }}% extra space so ok if followed by letter
+  % and then to call \auxhat in \setq.
+  %
+  \catcode`\^=\other
+  %
+  % Special characters.  Should be turned off anyway, but...
+  \catcode`\~=\other
+  \catcode`\[=\other
+  \catcode`\]=\other
+  \catcode`\"=\other
+  \catcode`\_=\other
+  \catcode`\|=\other
+  \catcode`\<=\other
+  \catcode`\>=\other
+  \catcode`\$=\other
+  \catcode`\#=\other
+  \catcode`\&=\other
+  \catcode`\%=\other
+  \catcode`+=\other % avoid \+ for paranoia even though we've turned it off
+  %
+  % This is to support \ in node names and titles, since the \
+  % characters end up in a \csname.  It's easier than
+  % leaving it active and making its active definition an actual \
+  % character.  What I don't understand is why it works in the *value*
+  % of the xrdef.  Seems like it should be a catcode12 \, and that
+  % should not typeset properly.  But it works, so I'm moving on for
+  % now.  --karl, 15jan04.
+  \catcode`\\=\other
+  %
+  % Make the characters 128-255 be printing characters.
+  {%
+    \count1=128
+    \def\loop{%
+      \catcode\count1=\other
+      \advance\count1 by 1
+      \ifnum \count1<256 \loop \fi
+    }%
+  }%
+  %
+  % @ is our escape character in .aux files, and we need braces.
+  \catcode`\{=1
+  \catcode`\}=2
+  \catcode`\@=0
+}
+
+\def\readdatafile#1{%
+\begingroup
+  \setupdatafile
+  \input\jobname.#1
+\endgroup}
+
+
+\message{insertions,}
+% including footnotes.
+
+\newcount \footnoteno
+
+% The trailing space in the following definition for supereject is
+% vital for proper filling; pages come out unaligned when you do a
+% pagealignmacro call if that space before the closing brace is
+% removed. (Generally, numeric constants should always be followed by a
+% space to prevent strange expansion errors.)
+\def\supereject{\par\penalty -20000\footnoteno =0 }
+
+% @footnotestyle is meaningful for Info output only.
+\let\footnotestyle=\comment
+
+{\catcode `\@=11
+%
+% Auto-number footnotes.  Otherwise like plain.
+\gdef\footnote{%
+  \let\indent=\ptexindent
+  \let\noindent=\ptexnoindent
+  \global\advance\footnoteno by \@ne
+  \edef\thisfootno{$^{\the\footnoteno}$}%
+  %
+  % In case the footnote comes at the end of a sentence, preserve the
+  % extra spacing after we do the footnote number.
+  \let\@sf\empty
+  \ifhmode\edef\@sf{\spacefactor\the\spacefactor}\ptexslash\fi
+  %
+  % Remove inadvertent blank space before typesetting the footnote number.
+  \unskip
+  \thisfootno\@sf
+  \dofootnote
+}%
+
+% Don't bother with the trickery in plain.tex to not require the
+% footnote text as a parameter.  Our footnotes don't need to be so general.
+%
+% Oh yes, they do; otherwise, @ifset (and anything else that uses
+% \parseargline) fails inside footnotes because the tokens are fixed when
+% the footnote is read.  --karl, 16nov96.
+%
+\gdef\dofootnote{%
+  \insert\footins\bgroup
+  % We want to typeset this text as a normal paragraph, even if the
+  % footnote reference occurs in (for example) a display environment.
+  % So reset some parameters.
+  \hsize=\pagewidth
+  \interlinepenalty\interfootnotelinepenalty
+  \splittopskip\ht\strutbox % top baseline for broken footnotes
+  \splitmaxdepth\dp\strutbox
+  \floatingpenalty\@MM
+  \leftskip\z@skip
+  \rightskip\z@skip
+  \spaceskip\z@skip
+  \xspaceskip\z@skip
+  \parindent\defaultparindent
+  %
+  \smallfonts \rm
+  %
+  % Because we use hanging indentation in footnotes, a @noindent appears
+  % to exdent this text, so make it be a no-op.  makeinfo does not use
+  % hanging indentation so @noindent can still be needed within footnote
+  % text after an @example or the like (not that this is good style).
+  \let\noindent = \relax
+  %
+  % Hang the footnote text off the number.  Use \everypar in case the
+  % footnote extends for more than one paragraph.
+  \everypar = {\hang}%
+  \textindent{\thisfootno}%
+  %
+  % Don't crash into the line above the footnote text.  Since this
+  % expands into a box, it must come within the paragraph, lest it
+  % provide a place where TeX can split the footnote.
+  \footstrut
+  %
+  % Invoke rest of plain TeX footnote routine.
+  \futurelet\next\fo@t
+}
+}%end \catcode `\@=11
+
+% In case a @footnote appears in a vbox, save the footnote text and create
+% the real \insert just after the vbox finished.  Otherwise, the insertion
+% would be lost.
+% Similarly, if a @footnote appears inside an alignment, save the footnote
+% text to a box and make the \insert when a row of the table is finished.
+% And the same can be done for other insert classes.  --kasal, 16nov03.
+
+% Replace the \insert primitive by a cheating macro.
+% Deeper inside, just make sure that the saved insertions are not spilled
+% out prematurely.
+%
+\def\startsavinginserts{%
+  \ifx \insert\ptexinsert
+    \let\insert\saveinsert
+  \else
+    \let\checkinserts\relax
+  \fi
+}
+
+% This \insert replacement works for both \insert\footins{foo} and
+% \insert\footins\bgroup foo\egroup, but it doesn't work for \insert27{foo}.
+%
+\def\saveinsert#1{%
+  \edef\next{\noexpand\savetobox \makeSAVEname#1}%
+  \afterassignment\next
+  % swallow the left brace
+  \let\temp =
+}
+\def\makeSAVEname#1{\makecsname{SAVE\expandafter\gobble\string#1}}
+\def\savetobox#1{\global\setbox#1 = \vbox\bgroup \unvbox#1}
+
+\def\checksaveins#1{\ifvoid#1\else \placesaveins#1\fi}
+
+\def\placesaveins#1{%
+  \ptexinsert \csname\expandafter\gobblesave\string#1\endcsname
+    {\box#1}%
+}
+
+% eat @SAVE -- beware, all of them have catcode \other:
+{
+  \def\dospecials{\do S\do A\do V\do E} \uncatcodespecials  %  ;-)
+  \gdef\gobblesave @SAVE{}
+}
+
+% initialization:
+\def\newsaveins #1{%
+  \edef\next{\noexpand\newsaveinsX \makeSAVEname#1}%
+  \next
+}
+\def\newsaveinsX #1{%
+  \csname newbox\endcsname #1%
+  \expandafter\def\expandafter\checkinserts\expandafter{\checkinserts
+    \checksaveins #1}%
+}
+
+% initialize:
+\let\checkinserts\empty
+\newsaveins\footins
+\newsaveins\margin
+
+
+% @image.  We use the macros from epsf.tex to support this.
+% If epsf.tex is not installed and @image is used, we complain.
+%
+% Check for and read epsf.tex up front.  If we read it only at @image
+% time, we might be inside a group, and then its definitions would get
+% undone and the next image would fail.
+\openin 1 = epsf.tex
+\ifeof 1 \else
+  % Do not bother showing banner with epsf.tex v2.7k (available in
+  % doc/epsf.tex and on ctan).
+  \def\epsfannounce{\toks0 = }%
+  \input epsf.tex
+\fi
+\closein 1
+%
+% We will only complain once about lack of epsf.tex.
+\newif\ifwarnednoepsf
+\newhelp\noepsfhelp{epsf.tex must be installed for images to
+  work.  It is also included in the Texinfo distribution, or you can get
+  it from ftp://tug.org/tex/epsf.tex.}
+%
+\def\image#1{%
+  \ifx\epsfbox\thisisundefined
+    \ifwarnednoepsf \else
+      \errhelp = \noepsfhelp
+      \errmessage{epsf.tex not found, images will be ignored}%
+      \global\warnednoepsftrue
+    \fi
+  \else
+    \imagexxx #1,,,,,\finish
+  \fi
+}
+%
+% Arguments to @image:
+% #1 is (mandatory) image filename; we tack on .eps extension.
+% #2 is (optional) width, #3 is (optional) height.
+% #4 is (ignored optional) html alt text.
+% #5 is (ignored optional) extension.
+% #6 is just the usual extra ignored arg for parsing stuff.
+\newif\ifimagevmode
+\def\imagexxx#1,#2,#3,#4,#5,#6\finish{\begingroup
+  \catcode`\^^M = 5     % in case we're inside an example
+  \normalturnoffactive  % allow _ et al. in names
+  % If the image is by itself, center it.
+  \ifvmode
+    \imagevmodetrue
+  \else \ifx\centersub\centerV
+    % for @center @image, we need a vbox so we can have our vertical space
+    \imagevmodetrue
+    \vbox\bgroup % vbox has better behavior than vtop herev
+  \fi\fi
+  %
+  \ifimagevmode
+    \nobreak\medskip
+    % Usually we'll have text after the image which will insert
+    % \parskip glue, so insert it here too to equalize the space
+    % above and below.
+    \nobreak\vskip\parskip
+    \nobreak
+  \fi
+  %
+  % Leave vertical mode so that indentation from an enclosing
+  %  environment such as @quotation is respected.
+  % However, if we're at the top level, we don't want the
+  %  normal paragraph indentation.
+  % On the other hand, if we are in the case of @center @image, we don't
+  %  want to start a paragraph, which will create a hsize-width box and
+  %  eradicate the centering.
+  \ifx\centersub\centerV\else \noindent \fi
+  %
+  % Output the image.
+  \ifpdf
+    \dopdfimage{#1}{#2}{#3}%
+  \else
+    % \epsfbox itself resets \epsf?size at each figure.
+    \setbox0 = \hbox{\ignorespaces #2}\ifdim\wd0 > 0pt \epsfxsize=#2\relax \fi
+    \setbox0 = \hbox{\ignorespaces #3}\ifdim\wd0 > 0pt \epsfysize=#3\relax \fi
+    \epsfbox{#1.eps}%
+  \fi
+  %
+  \ifimagevmode
+    \medskip  % space after a standalone image
+  \fi  
+  \ifx\centersub\centerV \egroup \fi
+\endgroup}
+
+
+% @float FLOATTYPE,LABEL,LOC ... @end float for displayed figures, tables,
+% etc.  We don't actually implement floating yet, we always include the
+% float "here".  But it seemed the best name for the future.
+%
+\envparseargdef\float{\eatcommaspace\eatcommaspace\dofloat#1, , ,\finish}
+
+% There may be a space before second and/or third parameter; delete it.
+\def\eatcommaspace#1, {#1,}
+
+% #1 is the optional FLOATTYPE, the text label for this float, typically
+% "Figure", "Table", "Example", etc.  Can't contain commas.  If omitted,
+% this float will not be numbered and cannot be referred to.
+%
+% #2 is the optional xref label.  Also must be present for the float to
+% be referable.
+%
+% #3 is the optional positioning argument; for now, it is ignored.  It
+% will somehow specify the positions allowed to float to (here, top, bottom).
+%
+% We keep a separate counter for each FLOATTYPE, which we reset at each
+% chapter-level command.
+\let\resetallfloatnos=\empty
+%
+\def\dofloat#1,#2,#3,#4\finish{%
+  \let\thiscaption=\empty
+  \let\thisshortcaption=\empty
+  %
+  % don't lose footnotes inside @float.
+  %
+  % BEWARE: when the floats start float, we have to issue warning whenever an
+  % insert appears inside a float which could possibly float. --kasal, 26may04
+  %
+  \startsavinginserts
+  %
+  % We can't be used inside a paragraph.
+  \par
+  %
+  \vtop\bgroup
+    \def\floattype{#1}%
+    \def\floatlabel{#2}%
+    \def\floatloc{#3}% we do nothing with this yet.
+    %
+    \ifx\floattype\empty
+      \let\safefloattype=\empty
+    \else
+      {%
+        % the floattype might have accents or other special characters,
+        % but we need to use it in a control sequence name.
+        \indexnofonts
+        \turnoffactive
+        \xdef\safefloattype{\floattype}%
+      }%
+    \fi
+    %
+    % If label is given but no type, we handle that as the empty type.
+    \ifx\floatlabel\empty \else
+      % We want each FLOATTYPE to be numbered separately (Figure 1,
+      % Table 1, Figure 2, ...).  (And if no label, no number.)
+      %
+      \expandafter\getfloatno\csname\safefloattype floatno\endcsname
+      \global\advance\floatno by 1
+      %
+      {%
+        % This magic value for \lastsection is output by \setref as the
+        % XREFLABEL-title value.  \xrefX uses it to distinguish float
+        % labels (which have a completely different output format) from
+        % node and anchor labels.  And \xrdef uses it to construct the
+        % lists of floats.
+        %
+        \edef\lastsection{\floatmagic=\safefloattype}%
+        \setref{\floatlabel}{Yfloat}%
+      }%
+    \fi
+    %
+    % start with \parskip glue, I guess.
+    \vskip\parskip
+    %
+    % Don't suppress indentation if a float happens to start a section.
+    \restorefirstparagraphindent
+}
+
+% we have these possibilities:
+% @float Foo,lbl & @caption{Cap}: Foo 1.1: Cap
+% @float Foo,lbl & no caption:    Foo 1.1
+% @float Foo & @caption{Cap}:     Foo: Cap
+% @float Foo & no caption:        Foo
+% @float ,lbl & Caption{Cap}:     1.1: Cap
+% @float ,lbl & no caption:       1.1
+% @float & @caption{Cap}:         Cap
+% @float & no caption:
+%
+\def\Efloat{%
+    \let\floatident = \empty
+    %
+    % In all cases, if we have a float type, it comes first.
+    \ifx\floattype\empty \else \def\floatident{\floattype}\fi
+    %
+    % If we have an xref label, the number comes next.
+    \ifx\floatlabel\empty \else
+      \ifx\floattype\empty \else % if also had float type, need tie first.
+        \appendtomacro\floatident{\tie}%
+      \fi
+      % the number.
+      \appendtomacro\floatident{\chaplevelprefix\the\floatno}%
+    \fi
+    %
+    % Start the printed caption with what we've constructed in
+    % \floatident, but keep it separate; we need \floatident again.
+    \let\captionline = \floatident
+    %
+    \ifx\thiscaption\empty \else
+      \ifx\floatident\empty \else
+	\appendtomacro\captionline{: }% had ident, so need a colon between
+      \fi
+      %
+      % caption text.
+      \appendtomacro\captionline{\scanexp\thiscaption}%
+    \fi
+    %
+    % If we have anything to print, print it, with space before.
+    % Eventually this needs to become an \insert.
+    \ifx\captionline\empty \else
+      \vskip.5\parskip
+      \captionline
+      %
+      % Space below caption.
+      \vskip\parskip
+    \fi
+    %
+    % If have an xref label, write the list of floats info.  Do this
+    % after the caption, to avoid chance of it being a breakpoint.
+    \ifx\floatlabel\empty \else
+      % Write the text that goes in the lof to the aux file as
+      % \floatlabel-lof.  Besides \floatident, we include the short
+      % caption if specified, else the full caption if specified, else nothing.
+      {%
+        \atdummies
+        %
+        % since we read the caption text in the macro world, where ^^M
+        % is turned into a normal character, we have to scan it back, so
+        % we don't write the literal three characters "^^M" into the aux file.
+	\scanexp{%
+	  \xdef\noexpand\gtemp{%
+	    \ifx\thisshortcaption\empty
+	      \thiscaption
+	    \else
+	      \thisshortcaption
+	    \fi
+	  }%
+	}%
+        \immediate\write\auxfile{@xrdef{\floatlabel-lof}{\floatident
+	  \ifx\gtemp\empty \else : \gtemp \fi}}%
+      }%
+    \fi
+  \egroup  % end of \vtop
+  %
+  % place the captured inserts
+  %
+  % BEWARE: when the floats start floating, we have to issue warning
+  % whenever an insert appears inside a float which could possibly
+  % float. --kasal, 26may04
+  %
+  \checkinserts
+}
+
+% Append the tokens #2 to the definition of macro #1, not expanding either.
+%
+\def\appendtomacro#1#2{%
+  \expandafter\def\expandafter#1\expandafter{#1#2}%
+}
+
+% @caption, @shortcaption
+%
+\def\caption{\docaption\thiscaption}
+\def\shortcaption{\docaption\thisshortcaption}
+\def\docaption{\checkenv\float \bgroup\scanargctxt\defcaption}
+\def\defcaption#1#2{\egroup \def#1{#2}}
+
+% The parameter is the control sequence identifying the counter we are
+% going to use.  Create it if it doesn't exist and assign it to \floatno.
+\def\getfloatno#1{%
+  \ifx#1\relax
+      % Haven't seen this figure type before.
+      \csname newcount\endcsname #1%
+      %
+      % Remember to reset this floatno at the next chap.
+      \expandafter\gdef\expandafter\resetallfloatnos
+        \expandafter{\resetallfloatnos #1=0 }%
+  \fi
+  \let\floatno#1%
+}
+
+% \setref calls this to get the XREFLABEL-snt value.  We want an @xref
+% to the FLOATLABEL to expand to "Figure 3.1".  We call \setref when we
+% first read the @float command.
+%
+\def\Yfloat{\floattype@tie \chaplevelprefix\the\floatno}%
+
+% Magic string used for the XREFLABEL-title value, so \xrefX can
+% distinguish floats from other xref types.
+\def\floatmagic{!!float!!}
+
+% #1 is the control sequence we are passed; we expand into a conditional
+% which is true if #1 represents a float ref.  That is, the magic
+% \lastsection value which we \setref above.
+%
+\def\iffloat#1{\expandafter\doiffloat#1==\finish}
+%
+% #1 is (maybe) the \floatmagic string.  If so, #2 will be the
+% (safe) float type for this float.  We set \iffloattype to #2.
+%
+\def\doiffloat#1=#2=#3\finish{%
+  \def\temp{#1}%
+  \def\iffloattype{#2}%
+  \ifx\temp\floatmagic
+}
+
+% @listoffloats FLOATTYPE - print a list of floats like a table of contents.
+%
+\parseargdef\listoffloats{%
+  \def\floattype{#1}% floattype
+  {%
+    % the floattype might have accents or other special characters,
+    % but we need to use it in a control sequence name.
+    \indexnofonts
+    \turnoffactive
+    \xdef\safefloattype{\floattype}%
+  }%
+  %
+  % \xrdef saves the floats as a \do-list in \floatlistSAFEFLOATTYPE.
+  \expandafter\ifx\csname floatlist\safefloattype\endcsname \relax
+    \ifhavexrefs
+      % if the user said @listoffloats foo but never @float foo.
+      \message{\linenumber No `\safefloattype' floats to list.}%
+    \fi
+  \else
+    \begingroup
+      \leftskip=\tocindent  % indent these entries like a toc
+      \let\do=\listoffloatsdo
+      \csname floatlist\safefloattype\endcsname
+    \endgroup
+  \fi
+}
+
+% This is called on each entry in a list of floats.  We're passed the
+% xref label, in the form LABEL-title, which is how we save it in the
+% aux file.  We strip off the -title and look up \XRLABEL-lof, which
+% has the text we're supposed to typeset here.
+%
+% Figures without xref labels will not be included in the list (since
+% they won't appear in the aux file).
+%
+\def\listoffloatsdo#1{\listoffloatsdoentry#1\finish}
+\def\listoffloatsdoentry#1-title\finish{{%
+  % Can't fully expand XR#1-lof because it can contain anything.  Just
+  % pass the control sequence.  On the other hand, XR#1-pg is just the
+  % page number, and we want to fully expand that so we can get a link
+  % in pdf output.
+  \toksA = \expandafter{\csname XR#1-lof\endcsname}%
+  %
+  % use the same \entry macro we use to generate the TOC and index.
+  \edef\writeentry{\noexpand\entry{\the\toksA}{\csname XR#1-pg\endcsname}}%
+  \writeentry
+}}
+
+
+\message{localization,}
+
+% For single-language documents, @documentlanguage is usually given very
+% early, just after @documentencoding.  Single argument is the language
+% (de) or locale (de_DE) abbreviation.
+%
+{
+  \catcode`\_ = \active
+  \globaldefs=1
+\parseargdef\documentlanguage{\begingroup
+  \let_=\normalunderscore  % normal _ character for filenames
+  \tex % read txi-??.tex file in plain TeX.
+    % Read the file by the name they passed if it exists.
+    \openin 1 txi-#1.tex
+    \ifeof 1
+      \documentlanguagetrywithoutunderscore{#1_\finish}%
+    \else
+      \globaldefs = 1  % everything in the txi-LL files needs to persist
+      \input txi-#1.tex
+    \fi
+    \closein 1
+  \endgroup % end raw TeX
+\endgroup}
+%
+% If they passed de_DE, and txi-de_DE.tex doesn't exist,
+% try txi-de.tex.
+%
+\gdef\documentlanguagetrywithoutunderscore#1_#2\finish{%
+  \openin 1 txi-#1.tex
+  \ifeof 1
+    \errhelp = \nolanghelp
+    \errmessage{Cannot read language file txi-#1.tex}%
+  \else
+    \globaldefs = 1  % everything in the txi-LL files needs to persist
+    \input txi-#1.tex
+  \fi
+  \closein 1
+}
+}% end of special _ catcode
+%
+\newhelp\nolanghelp{The given language definition file cannot be found or
+is empty.  Maybe you need to install it?  Putting it in the current
+directory should work if nowhere else does.}
+
+% This macro is called from txi-??.tex files; the first argument is the
+% \language name to set (without the "\lang@" prefix), the second and
+% third args are \{left,right}hyphenmin.
+%
+% The language names to pass are determined when the format is built.
+% See the etex.log file created at that time, e.g.,
+% /usr/local/texlive/2008/texmf-var/web2c/pdftex/etex.log.
+%
+% With TeX Live 2008, etex now includes hyphenation patterns for all
+% available languages.  This means we can support hyphenation in
+% Texinfo, at least to some extent.  (This still doesn't solve the
+% accented characters problem.)
+%
+\catcode`@=11
+\def\txisetlanguage#1#2#3{%
+  % do not set the language if the name is undefined in the current TeX.
+  \expandafter\ifx\csname lang@#1\endcsname \relax
+    \message{no patterns for #1}%
+  \else
+    \global\language = \csname lang@#1\endcsname
+  \fi
+  % but there is no harm in adjusting the hyphenmin values regardless.
+  \global\lefthyphenmin = #2\relax
+  \global\righthyphenmin = #3\relax
+}
+
+% Helpers for encodings.
+% Set the catcode of characters 128 through 255 to the specified number.
+%
+\def\setnonasciicharscatcode#1{%
+   \count255=128
+   \loop\ifnum\count255<256
+      \global\catcode\count255=#1\relax
+      \advance\count255 by 1
+   \repeat
+}
+
+\def\setnonasciicharscatcodenonglobal#1{%
+   \count255=128
+   \loop\ifnum\count255<256
+      \catcode\count255=#1\relax
+      \advance\count255 by 1
+   \repeat
+}
+
+% @documentencoding sets the definition of non-ASCII characters
+% according to the specified encoding.
+%
+\parseargdef\documentencoding{%
+  % Encoding being declared for the document.
+  \def\declaredencoding{\csname #1.enc\endcsname}%
+  %
+  % Supported encodings: names converted to tokens in order to be able
+  % to compare them with \ifx.
+  \def\ascii{\csname US-ASCII.enc\endcsname}%
+  \def\latnine{\csname ISO-8859-15.enc\endcsname}%
+  \def\latone{\csname ISO-8859-1.enc\endcsname}%
+  \def\lattwo{\csname ISO-8859-2.enc\endcsname}%
+  \def\utfeight{\csname UTF-8.enc\endcsname}%
+  %
+  \ifx \declaredencoding \ascii
+     \asciichardefs
+  %
+  \else \ifx \declaredencoding \lattwo
+     \setnonasciicharscatcode\active
+     \lattwochardefs
+  %
+  \else \ifx \declaredencoding \latone
+     \setnonasciicharscatcode\active
+     \latonechardefs
+  %
+  \else \ifx \declaredencoding \latnine
+     \setnonasciicharscatcode\active
+     \latninechardefs
+  %
+  \else \ifx \declaredencoding \utfeight
+     \setnonasciicharscatcode\active
+     \utfeightchardefs
+  %
+  \else
+    \message{Unknown document encoding #1, ignoring.}%
+  %
+  \fi % utfeight
+  \fi % latnine
+  \fi % latone
+  \fi % lattwo
+  \fi % ascii
+}
+
+% A message to be logged when using a character that isn't available
+% the default font encoding (OT1).
+%
+\def\missingcharmsg#1{\message{Character missing in OT1 encoding: #1.}}
+
+% Take account of \c (plain) vs. \, (Texinfo) difference.
+\def\cedilla#1{\ifx\c\ptexc\c{#1}\else\,{#1}\fi}
+
+% First, make active non-ASCII characters in order for them to be
+% correctly categorized when TeX reads the replacement text of
+% macros containing the character definitions.
+\setnonasciicharscatcode\active
+%
+% Latin1 (ISO-8859-1) character definitions.
+\def\latonechardefs{%
+  \gdef^^a0{\tie}
+  \gdef^^a1{\exclamdown}
+  \gdef^^a2{\missingcharmsg{CENT SIGN}}
+  \gdef^^a3{{\pounds}}
+  \gdef^^a4{\missingcharmsg{CURRENCY SIGN}}
+  \gdef^^a5{\missingcharmsg{YEN SIGN}}
+  \gdef^^a6{\missingcharmsg{BROKEN BAR}}
+  \gdef^^a7{\S}
+  \gdef^^a8{\"{}}
+  \gdef^^a9{\copyright}
+  \gdef^^aa{\ordf}
+  \gdef^^ab{\guillemetleft}
+  \gdef^^ac{$\lnot$}
+  \gdef^^ad{\-}
+  \gdef^^ae{\registeredsymbol}
+  \gdef^^af{\={}}
+  %
+  \gdef^^b0{\textdegree}
+  \gdef^^b1{$\pm$}
+  \gdef^^b2{$^2$}
+  \gdef^^b3{$^3$}
+  \gdef^^b4{\'{}}
+  \gdef^^b5{$\mu$}
+  \gdef^^b6{\P}
+  %
+  \gdef^^b7{$^.$}
+  \gdef^^b8{\cedilla\ }
+  \gdef^^b9{$^1$}
+  \gdef^^ba{\ordm}
+  %
+  \gdef^^bb{\guillemetright}
+  \gdef^^bc{$1\over4$}
+  \gdef^^bd{$1\over2$}
+  \gdef^^be{$3\over4$}
+  \gdef^^bf{\questiondown}
+  %
+  \gdef^^c0{\`A}
+  \gdef^^c1{\'A}
+  \gdef^^c2{\^A}
+  \gdef^^c3{\~A}
+  \gdef^^c4{\"A}
+  \gdef^^c5{\ringaccent A}
+  \gdef^^c6{\AE}
+  \gdef^^c7{\cedilla C}
+  \gdef^^c8{\`E}
+  \gdef^^c9{\'E}
+  \gdef^^ca{\^E}
+  \gdef^^cb{\"E}
+  \gdef^^cc{\`I}
+  \gdef^^cd{\'I}
+  \gdef^^ce{\^I}
+  \gdef^^cf{\"I}
+  %
+  \gdef^^d0{\DH}
+  \gdef^^d1{\~N}
+  \gdef^^d2{\`O}
+  \gdef^^d3{\'O}
+  \gdef^^d4{\^O}
+  \gdef^^d5{\~O}
+  \gdef^^d6{\"O}
+  \gdef^^d7{$\times$}
+  \gdef^^d8{\O}
+  \gdef^^d9{\`U}
+  \gdef^^da{\'U}
+  \gdef^^db{\^U}
+  \gdef^^dc{\"U}
+  \gdef^^dd{\'Y}
+  \gdef^^de{\TH}
+  \gdef^^df{\ss}
+  %
+  \gdef^^e0{\`a}
+  \gdef^^e1{\'a}
+  \gdef^^e2{\^a}
+  \gdef^^e3{\~a}
+  \gdef^^e4{\"a}
+  \gdef^^e5{\ringaccent a}
+  \gdef^^e6{\ae}
+  \gdef^^e7{\cedilla c}
+  \gdef^^e8{\`e}
+  \gdef^^e9{\'e}
+  \gdef^^ea{\^e}
+  \gdef^^eb{\"e}
+  \gdef^^ec{\`{\dotless i}}
+  \gdef^^ed{\'{\dotless i}}
+  \gdef^^ee{\^{\dotless i}}
+  \gdef^^ef{\"{\dotless i}}
+  %
+  \gdef^^f0{\dh}
+  \gdef^^f1{\~n}
+  \gdef^^f2{\`o}
+  \gdef^^f3{\'o}
+  \gdef^^f4{\^o}
+  \gdef^^f5{\~o}
+  \gdef^^f6{\"o}
+  \gdef^^f7{$\div$}
+  \gdef^^f8{\o}
+  \gdef^^f9{\`u}
+  \gdef^^fa{\'u}
+  \gdef^^fb{\^u}
+  \gdef^^fc{\"u}
+  \gdef^^fd{\'y}
+  \gdef^^fe{\th}
+  \gdef^^ff{\"y}
+}
+
+% Latin9 (ISO-8859-15) encoding character definitions.
+\def\latninechardefs{%
+  % Encoding is almost identical to Latin1.
+  \latonechardefs
+  %
+  \gdef^^a4{\euro}
+  \gdef^^a6{\v S}
+  \gdef^^a8{\v s}
+  \gdef^^b4{\v Z}
+  \gdef^^b8{\v z}
+  \gdef^^bc{\OE}
+  \gdef^^bd{\oe}
+  \gdef^^be{\"Y}
+}
+
+% Latin2 (ISO-8859-2) character definitions.
+\def\lattwochardefs{%
+  \gdef^^a0{\tie}
+  \gdef^^a1{\ogonek{A}}
+  \gdef^^a2{\u{}}
+  \gdef^^a3{\L}
+  \gdef^^a4{\missingcharmsg{CURRENCY SIGN}}
+  \gdef^^a5{\v L}
+  \gdef^^a6{\'S}
+  \gdef^^a7{\S}
+  \gdef^^a8{\"{}}
+  \gdef^^a9{\v S}
+  \gdef^^aa{\cedilla S}
+  \gdef^^ab{\v T}
+  \gdef^^ac{\'Z}
+  \gdef^^ad{\-}
+  \gdef^^ae{\v Z}
+  \gdef^^af{\dotaccent Z}
+  %
+  \gdef^^b0{\textdegree}
+  \gdef^^b1{\ogonek{a}}
+  \gdef^^b2{\ogonek{ }}
+  \gdef^^b3{\l}
+  \gdef^^b4{\'{}}
+  \gdef^^b5{\v l}
+  \gdef^^b6{\'s}
+  \gdef^^b7{\v{}}
+  \gdef^^b8{\cedilla\ }
+  \gdef^^b9{\v s}
+  \gdef^^ba{\cedilla s}
+  \gdef^^bb{\v t}
+  \gdef^^bc{\'z}
+  \gdef^^bd{\H{}}
+  \gdef^^be{\v z}
+  \gdef^^bf{\dotaccent z}
+  %
+  \gdef^^c0{\'R}
+  \gdef^^c1{\'A}
+  \gdef^^c2{\^A}
+  \gdef^^c3{\u A}
+  \gdef^^c4{\"A}
+  \gdef^^c5{\'L}
+  \gdef^^c6{\'C}
+  \gdef^^c7{\cedilla C}
+  \gdef^^c8{\v C}
+  \gdef^^c9{\'E}
+  \gdef^^ca{\ogonek{E}}
+  \gdef^^cb{\"E}
+  \gdef^^cc{\v E}
+  \gdef^^cd{\'I}
+  \gdef^^ce{\^I}
+  \gdef^^cf{\v D}
+  %
+  \gdef^^d0{\DH}
+  \gdef^^d1{\'N}
+  \gdef^^d2{\v N}
+  \gdef^^d3{\'O}
+  \gdef^^d4{\^O}
+  \gdef^^d5{\H O}
+  \gdef^^d6{\"O}
+  \gdef^^d7{$\times$}
+  \gdef^^d8{\v R}
+  \gdef^^d9{\ringaccent U}
+  \gdef^^da{\'U}
+  \gdef^^db{\H U}
+  \gdef^^dc{\"U}
+  \gdef^^dd{\'Y}
+  \gdef^^de{\cedilla T}
+  \gdef^^df{\ss}
+  %
+  \gdef^^e0{\'r}
+  \gdef^^e1{\'a}
+  \gdef^^e2{\^a}
+  \gdef^^e3{\u a}
+  \gdef^^e4{\"a}
+  \gdef^^e5{\'l}
+  \gdef^^e6{\'c}
+  \gdef^^e7{\cedilla c}
+  \gdef^^e8{\v c}
+  \gdef^^e9{\'e}
+  \gdef^^ea{\ogonek{e}}
+  \gdef^^eb{\"e}
+  \gdef^^ec{\v e}
+  \gdef^^ed{\'{\dotless{i}}}
+  \gdef^^ee{\^{\dotless{i}}}
+  \gdef^^ef{\v d}
+  %
+  \gdef^^f0{\dh}
+  \gdef^^f1{\'n}
+  \gdef^^f2{\v n}
+  \gdef^^f3{\'o}
+  \gdef^^f4{\^o}
+  \gdef^^f5{\H o}
+  \gdef^^f6{\"o}
+  \gdef^^f7{$\div$}
+  \gdef^^f8{\v r}
+  \gdef^^f9{\ringaccent u}
+  \gdef^^fa{\'u}
+  \gdef^^fb{\H u}
+  \gdef^^fc{\"u}
+  \gdef^^fd{\'y}
+  \gdef^^fe{\cedilla t}
+  \gdef^^ff{\dotaccent{}}
+}
+
+% UTF-8 character definitions.
+%
+% This code to support UTF-8 is based on LaTeX's utf8.def, with some
+% changes for Texinfo conventions.  It is included here under the GPL by
+% permission from Frank Mittelbach and the LaTeX team.
+%
+\newcount\countUTFx
+\newcount\countUTFy
+\newcount\countUTFz
+
+\gdef\UTFviiiTwoOctets#1#2{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\endcsname}
+%
+\gdef\UTFviiiThreeOctets#1#2#3{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\string #3\endcsname}
+%
+\gdef\UTFviiiFourOctets#1#2#3#4{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\string #3\string #4\endcsname}
+
+\gdef\UTFviiiDefined#1{%
+  \ifx #1\relax
+    \message{\linenumber Unicode char \string #1 not defined for Texinfo}%
+  \else
+    \expandafter #1%
+  \fi
+}
+
+\begingroup
+  \catcode`\~13
+  \catcode`\"12
+
+  \def\UTFviiiLoop{%
+    \global\catcode\countUTFx\active
+    \uccode`\~\countUTFx
+    \uppercase\expandafter{\UTFviiiTmp}%
+    \advance\countUTFx by 1
+    \ifnum\countUTFx < \countUTFy
+      \expandafter\UTFviiiLoop
+    \fi}
+
+  \countUTFx = "C2
+  \countUTFy = "E0
+  \def\UTFviiiTmp{%
+    \xdef~{\noexpand\UTFviiiTwoOctets\string~}}
+  \UTFviiiLoop
+
+  \countUTFx = "E0
+  \countUTFy = "F0
+  \def\UTFviiiTmp{%
+    \xdef~{\noexpand\UTFviiiThreeOctets\string~}}
+  \UTFviiiLoop
+
+  \countUTFx = "F0
+  \countUTFy = "F4
+  \def\UTFviiiTmp{%
+    \xdef~{\noexpand\UTFviiiFourOctets\string~}}
+  \UTFviiiLoop
+\endgroup
+
+\begingroup
+  \catcode`\"=12
+  \catcode`\<=12
+  \catcode`\.=12
+  \catcode`\,=12
+  \catcode`\;=12
+  \catcode`\!=12
+  \catcode`\~=13
+
+  \gdef\DeclareUnicodeCharacter#1#2{%
+    \countUTFz = "#1\relax
+    %\wlog{\space\space defining Unicode char U+#1 (decimal \the\countUTFz)}%
+    \begingroup
+      \parseXMLCharref
+      \def\UTFviiiTwoOctets##1##2{%
+        \csname u8:##1\string ##2\endcsname}%
+      \def\UTFviiiThreeOctets##1##2##3{%
+        \csname u8:##1\string ##2\string ##3\endcsname}%
+      \def\UTFviiiFourOctets##1##2##3##4{%
+        \csname u8:##1\string ##2\string ##3\string ##4\endcsname}%
+      \expandafter\expandafter\expandafter\expandafter
+       \expandafter\expandafter\expandafter
+       \gdef\UTFviiiTmp{#2}%
+    \endgroup}
+
+  \gdef\parseXMLCharref{%
+    \ifnum\countUTFz < "A0\relax
+      \errhelp = \EMsimple
+      \errmessage{Cannot define Unicode char value < 00A0}%
+    \else\ifnum\countUTFz < "800\relax
+      \parseUTFviiiA,%
+      \parseUTFviiiB C\UTFviiiTwoOctets.,%
+    \else\ifnum\countUTFz < "10000\relax
+      \parseUTFviiiA;%
+      \parseUTFviiiA,%
+      \parseUTFviiiB E\UTFviiiThreeOctets.{,;}%
+    \else
+      \parseUTFviiiA;%
+      \parseUTFviiiA,%
+      \parseUTFviiiA!%
+      \parseUTFviiiB F\UTFviiiFourOctets.{!,;}%
+    \fi\fi\fi
+  }
+
+  \gdef\parseUTFviiiA#1{%
+    \countUTFx = \countUTFz
+    \divide\countUTFz by 64
+    \countUTFy = \countUTFz
+    \multiply\countUTFz by 64
+    \advance\countUTFx by -\countUTFz
+    \advance\countUTFx by 128
+    \uccode `#1\countUTFx
+    \countUTFz = \countUTFy}
+
+  \gdef\parseUTFviiiB#1#2#3#4{%
+    \advance\countUTFz by "#10\relax
+    \uccode `#3\countUTFz
+    \uppercase{\gdef\UTFviiiTmp{#2#3#4}}}
+\endgroup
+
+\def\utfeightchardefs{%
+  \DeclareUnicodeCharacter{00A0}{\tie}
+  \DeclareUnicodeCharacter{00A1}{\exclamdown}
+  \DeclareUnicodeCharacter{00A3}{\pounds}
+  \DeclareUnicodeCharacter{00A8}{\"{ }}
+  \DeclareUnicodeCharacter{00A9}{\copyright}
+  \DeclareUnicodeCharacter{00AA}{\ordf}
+  \DeclareUnicodeCharacter{00AB}{\guillemetleft}
+  \DeclareUnicodeCharacter{00AD}{\-}
+  \DeclareUnicodeCharacter{00AE}{\registeredsymbol}
+  \DeclareUnicodeCharacter{00AF}{\={ }}
+
+  \DeclareUnicodeCharacter{00B0}{\ringaccent{ }}
+  \DeclareUnicodeCharacter{00B4}{\'{ }}
+  \DeclareUnicodeCharacter{00B8}{\cedilla{ }}
+  \DeclareUnicodeCharacter{00BA}{\ordm}
+  \DeclareUnicodeCharacter{00BB}{\guillemetright}
+  \DeclareUnicodeCharacter{00BF}{\questiondown}
+
+  \DeclareUnicodeCharacter{00C0}{\`A}
+  \DeclareUnicodeCharacter{00C1}{\'A}
+  \DeclareUnicodeCharacter{00C2}{\^A}
+  \DeclareUnicodeCharacter{00C3}{\~A}
+  \DeclareUnicodeCharacter{00C4}{\"A}
+  \DeclareUnicodeCharacter{00C5}{\AA}
+  \DeclareUnicodeCharacter{00C6}{\AE}
+  \DeclareUnicodeCharacter{00C7}{\cedilla{C}}
+  \DeclareUnicodeCharacter{00C8}{\`E}
+  \DeclareUnicodeCharacter{00C9}{\'E}
+  \DeclareUnicodeCharacter{00CA}{\^E}
+  \DeclareUnicodeCharacter{00CB}{\"E}
+  \DeclareUnicodeCharacter{00CC}{\`I}
+  \DeclareUnicodeCharacter{00CD}{\'I}
+  \DeclareUnicodeCharacter{00CE}{\^I}
+  \DeclareUnicodeCharacter{00CF}{\"I}
+
+  \DeclareUnicodeCharacter{00D0}{\DH}
+  \DeclareUnicodeCharacter{00D1}{\~N}
+  \DeclareUnicodeCharacter{00D2}{\`O}
+  \DeclareUnicodeCharacter{00D3}{\'O}
+  \DeclareUnicodeCharacter{00D4}{\^O}
+  \DeclareUnicodeCharacter{00D5}{\~O}
+  \DeclareUnicodeCharacter{00D6}{\"O}
+  \DeclareUnicodeCharacter{00D8}{\O}
+  \DeclareUnicodeCharacter{00D9}{\`U}
+  \DeclareUnicodeCharacter{00DA}{\'U}
+  \DeclareUnicodeCharacter{00DB}{\^U}
+  \DeclareUnicodeCharacter{00DC}{\"U}
+  \DeclareUnicodeCharacter{00DD}{\'Y}
+  \DeclareUnicodeCharacter{00DE}{\TH}
+  \DeclareUnicodeCharacter{00DF}{\ss}
+
+  \DeclareUnicodeCharacter{00E0}{\`a}
+  \DeclareUnicodeCharacter{00E1}{\'a}
+  \DeclareUnicodeCharacter{00E2}{\^a}
+  \DeclareUnicodeCharacter{00E3}{\~a}
+  \DeclareUnicodeCharacter{00E4}{\"a}
+  \DeclareUnicodeCharacter{00E5}{\aa}
+  \DeclareUnicodeCharacter{00E6}{\ae}
+  \DeclareUnicodeCharacter{00E7}{\cedilla{c}}
+  \DeclareUnicodeCharacter{00E8}{\`e}
+  \DeclareUnicodeCharacter{00E9}{\'e}
+  \DeclareUnicodeCharacter{00EA}{\^e}
+  \DeclareUnicodeCharacter{00EB}{\"e}
+  \DeclareUnicodeCharacter{00EC}{\`{\dotless{i}}}
+  \DeclareUnicodeCharacter{00ED}{\'{\dotless{i}}}
+  \DeclareUnicodeCharacter{00EE}{\^{\dotless{i}}}
+  \DeclareUnicodeCharacter{00EF}{\"{\dotless{i}}}
+
+  \DeclareUnicodeCharacter{00F0}{\dh}
+  \DeclareUnicodeCharacter{00F1}{\~n}
+  \DeclareUnicodeCharacter{00F2}{\`o}
+  \DeclareUnicodeCharacter{00F3}{\'o}
+  \DeclareUnicodeCharacter{00F4}{\^o}
+  \DeclareUnicodeCharacter{00F5}{\~o}
+  \DeclareUnicodeCharacter{00F6}{\"o}
+  \DeclareUnicodeCharacter{00F8}{\o}
+  \DeclareUnicodeCharacter{00F9}{\`u}
+  \DeclareUnicodeCharacter{00FA}{\'u}
+  \DeclareUnicodeCharacter{00FB}{\^u}
+  \DeclareUnicodeCharacter{00FC}{\"u}
+  \DeclareUnicodeCharacter{00FD}{\'y}
+  \DeclareUnicodeCharacter{00FE}{\th}
+  \DeclareUnicodeCharacter{00FF}{\"y}
+
+  \DeclareUnicodeCharacter{0100}{\=A}
+  \DeclareUnicodeCharacter{0101}{\=a}
+  \DeclareUnicodeCharacter{0102}{\u{A}}
+  \DeclareUnicodeCharacter{0103}{\u{a}}
+  \DeclareUnicodeCharacter{0104}{\ogonek{A}}
+  \DeclareUnicodeCharacter{0105}{\ogonek{a}}
+  \DeclareUnicodeCharacter{0106}{\'C}
+  \DeclareUnicodeCharacter{0107}{\'c}
+  \DeclareUnicodeCharacter{0108}{\^C}
+  \DeclareUnicodeCharacter{0109}{\^c}
+  \DeclareUnicodeCharacter{0118}{\ogonek{E}}
+  \DeclareUnicodeCharacter{0119}{\ogonek{e}}
+  \DeclareUnicodeCharacter{010A}{\dotaccent{C}}
+  \DeclareUnicodeCharacter{010B}{\dotaccent{c}}
+  \DeclareUnicodeCharacter{010C}{\v{C}}
+  \DeclareUnicodeCharacter{010D}{\v{c}}
+  \DeclareUnicodeCharacter{010E}{\v{D}}
+
+  \DeclareUnicodeCharacter{0112}{\=E}
+  \DeclareUnicodeCharacter{0113}{\=e}
+  \DeclareUnicodeCharacter{0114}{\u{E}}
+  \DeclareUnicodeCharacter{0115}{\u{e}}
+  \DeclareUnicodeCharacter{0116}{\dotaccent{E}}
+  \DeclareUnicodeCharacter{0117}{\dotaccent{e}}
+  \DeclareUnicodeCharacter{011A}{\v{E}}
+  \DeclareUnicodeCharacter{011B}{\v{e}}
+  \DeclareUnicodeCharacter{011C}{\^G}
+  \DeclareUnicodeCharacter{011D}{\^g}
+  \DeclareUnicodeCharacter{011E}{\u{G}}
+  \DeclareUnicodeCharacter{011F}{\u{g}}
+
+  \DeclareUnicodeCharacter{0120}{\dotaccent{G}}
+  \DeclareUnicodeCharacter{0121}{\dotaccent{g}}
+  \DeclareUnicodeCharacter{0124}{\^H}
+  \DeclareUnicodeCharacter{0125}{\^h}
+  \DeclareUnicodeCharacter{0128}{\~I}
+  \DeclareUnicodeCharacter{0129}{\~{\dotless{i}}}
+  \DeclareUnicodeCharacter{012A}{\=I}
+  \DeclareUnicodeCharacter{012B}{\={\dotless{i}}}
+  \DeclareUnicodeCharacter{012C}{\u{I}}
+  \DeclareUnicodeCharacter{012D}{\u{\dotless{i}}}
+
+  \DeclareUnicodeCharacter{0130}{\dotaccent{I}}
+  \DeclareUnicodeCharacter{0131}{\dotless{i}}
+  \DeclareUnicodeCharacter{0132}{IJ}
+  \DeclareUnicodeCharacter{0133}{ij}
+  \DeclareUnicodeCharacter{0134}{\^J}
+  \DeclareUnicodeCharacter{0135}{\^{\dotless{j}}}
+  \DeclareUnicodeCharacter{0139}{\'L}
+  \DeclareUnicodeCharacter{013A}{\'l}
+
+  \DeclareUnicodeCharacter{0141}{\L}
+  \DeclareUnicodeCharacter{0142}{\l}
+  \DeclareUnicodeCharacter{0143}{\'N}
+  \DeclareUnicodeCharacter{0144}{\'n}
+  \DeclareUnicodeCharacter{0147}{\v{N}}
+  \DeclareUnicodeCharacter{0148}{\v{n}}
+  \DeclareUnicodeCharacter{014C}{\=O}
+  \DeclareUnicodeCharacter{014D}{\=o}
+  \DeclareUnicodeCharacter{014E}{\u{O}}
+  \DeclareUnicodeCharacter{014F}{\u{o}}
+
+  \DeclareUnicodeCharacter{0150}{\H{O}}
+  \DeclareUnicodeCharacter{0151}{\H{o}}
+  \DeclareUnicodeCharacter{0152}{\OE}
+  \DeclareUnicodeCharacter{0153}{\oe}
+  \DeclareUnicodeCharacter{0154}{\'R}
+  \DeclareUnicodeCharacter{0155}{\'r}
+  \DeclareUnicodeCharacter{0158}{\v{R}}
+  \DeclareUnicodeCharacter{0159}{\v{r}}
+  \DeclareUnicodeCharacter{015A}{\'S}
+  \DeclareUnicodeCharacter{015B}{\'s}
+  \DeclareUnicodeCharacter{015C}{\^S}
+  \DeclareUnicodeCharacter{015D}{\^s}
+  \DeclareUnicodeCharacter{015E}{\cedilla{S}}
+  \DeclareUnicodeCharacter{015F}{\cedilla{s}}
+
+  \DeclareUnicodeCharacter{0160}{\v{S}}
+  \DeclareUnicodeCharacter{0161}{\v{s}}
+  \DeclareUnicodeCharacter{0162}{\cedilla{t}}
+  \DeclareUnicodeCharacter{0163}{\cedilla{T}}
+  \DeclareUnicodeCharacter{0164}{\v{T}}
+
+  \DeclareUnicodeCharacter{0168}{\~U}
+  \DeclareUnicodeCharacter{0169}{\~u}
+  \DeclareUnicodeCharacter{016A}{\=U}
+  \DeclareUnicodeCharacter{016B}{\=u}
+  \DeclareUnicodeCharacter{016C}{\u{U}}
+  \DeclareUnicodeCharacter{016D}{\u{u}}
+  \DeclareUnicodeCharacter{016E}{\ringaccent{U}}
+  \DeclareUnicodeCharacter{016F}{\ringaccent{u}}
+
+  \DeclareUnicodeCharacter{0170}{\H{U}}
+  \DeclareUnicodeCharacter{0171}{\H{u}}
+  \DeclareUnicodeCharacter{0174}{\^W}
+  \DeclareUnicodeCharacter{0175}{\^w}
+  \DeclareUnicodeCharacter{0176}{\^Y}
+  \DeclareUnicodeCharacter{0177}{\^y}
+  \DeclareUnicodeCharacter{0178}{\"Y}
+  \DeclareUnicodeCharacter{0179}{\'Z}
+  \DeclareUnicodeCharacter{017A}{\'z}
+  \DeclareUnicodeCharacter{017B}{\dotaccent{Z}}
+  \DeclareUnicodeCharacter{017C}{\dotaccent{z}}
+  \DeclareUnicodeCharacter{017D}{\v{Z}}
+  \DeclareUnicodeCharacter{017E}{\v{z}}
+
+  \DeclareUnicodeCharacter{01C4}{D\v{Z}}
+  \DeclareUnicodeCharacter{01C5}{D\v{z}}
+  \DeclareUnicodeCharacter{01C6}{d\v{z}}
+  \DeclareUnicodeCharacter{01C7}{LJ}
+  \DeclareUnicodeCharacter{01C8}{Lj}
+  \DeclareUnicodeCharacter{01C9}{lj}
+  \DeclareUnicodeCharacter{01CA}{NJ}
+  \DeclareUnicodeCharacter{01CB}{Nj}
+  \DeclareUnicodeCharacter{01CC}{nj}
+  \DeclareUnicodeCharacter{01CD}{\v{A}}
+  \DeclareUnicodeCharacter{01CE}{\v{a}}
+  \DeclareUnicodeCharacter{01CF}{\v{I}}
+
+  \DeclareUnicodeCharacter{01D0}{\v{\dotless{i}}}
+  \DeclareUnicodeCharacter{01D1}{\v{O}}
+  \DeclareUnicodeCharacter{01D2}{\v{o}}
+  \DeclareUnicodeCharacter{01D3}{\v{U}}
+  \DeclareUnicodeCharacter{01D4}{\v{u}}
+
+  \DeclareUnicodeCharacter{01E2}{\={\AE}}
+  \DeclareUnicodeCharacter{01E3}{\={\ae}}
+  \DeclareUnicodeCharacter{01E6}{\v{G}}
+  \DeclareUnicodeCharacter{01E7}{\v{g}}
+  \DeclareUnicodeCharacter{01E8}{\v{K}}
+  \DeclareUnicodeCharacter{01E9}{\v{k}}
+
+  \DeclareUnicodeCharacter{01F0}{\v{\dotless{j}}}
+  \DeclareUnicodeCharacter{01F1}{DZ}
+  \DeclareUnicodeCharacter{01F2}{Dz}
+  \DeclareUnicodeCharacter{01F3}{dz}
+  \DeclareUnicodeCharacter{01F4}{\'G}
+  \DeclareUnicodeCharacter{01F5}{\'g}
+  \DeclareUnicodeCharacter{01F8}{\`N}
+  \DeclareUnicodeCharacter{01F9}{\`n}
+  \DeclareUnicodeCharacter{01FC}{\'{\AE}}
+  \DeclareUnicodeCharacter{01FD}{\'{\ae}}
+  \DeclareUnicodeCharacter{01FE}{\'{\O}}
+  \DeclareUnicodeCharacter{01FF}{\'{\o}}
+
+  \DeclareUnicodeCharacter{021E}{\v{H}}
+  \DeclareUnicodeCharacter{021F}{\v{h}}
+
+  \DeclareUnicodeCharacter{0226}{\dotaccent{A}}
+  \DeclareUnicodeCharacter{0227}{\dotaccent{a}}
+  \DeclareUnicodeCharacter{0228}{\cedilla{E}}
+  \DeclareUnicodeCharacter{0229}{\cedilla{e}}
+  \DeclareUnicodeCharacter{022E}{\dotaccent{O}}
+  \DeclareUnicodeCharacter{022F}{\dotaccent{o}}
+
+  \DeclareUnicodeCharacter{0232}{\=Y}
+  \DeclareUnicodeCharacter{0233}{\=y}
+  \DeclareUnicodeCharacter{0237}{\dotless{j}}
+
+  \DeclareUnicodeCharacter{02DB}{\ogonek{ }}
+
+  \DeclareUnicodeCharacter{1E02}{\dotaccent{B}}
+  \DeclareUnicodeCharacter{1E03}{\dotaccent{b}}
+  \DeclareUnicodeCharacter{1E04}{\udotaccent{B}}
+  \DeclareUnicodeCharacter{1E05}{\udotaccent{b}}
+  \DeclareUnicodeCharacter{1E06}{\ubaraccent{B}}
+  \DeclareUnicodeCharacter{1E07}{\ubaraccent{b}}
+  \DeclareUnicodeCharacter{1E0A}{\dotaccent{D}}
+  \DeclareUnicodeCharacter{1E0B}{\dotaccent{d}}
+  \DeclareUnicodeCharacter{1E0C}{\udotaccent{D}}
+  \DeclareUnicodeCharacter{1E0D}{\udotaccent{d}}
+  \DeclareUnicodeCharacter{1E0E}{\ubaraccent{D}}
+  \DeclareUnicodeCharacter{1E0F}{\ubaraccent{d}}
+
+  \DeclareUnicodeCharacter{1E1E}{\dotaccent{F}}
+  \DeclareUnicodeCharacter{1E1F}{\dotaccent{f}}
+
+  \DeclareUnicodeCharacter{1E20}{\=G}
+  \DeclareUnicodeCharacter{1E21}{\=g}
+  \DeclareUnicodeCharacter{1E22}{\dotaccent{H}}
+  \DeclareUnicodeCharacter{1E23}{\dotaccent{h}}
+  \DeclareUnicodeCharacter{1E24}{\udotaccent{H}}
+  \DeclareUnicodeCharacter{1E25}{\udotaccent{h}}
+  \DeclareUnicodeCharacter{1E26}{\"H}
+  \DeclareUnicodeCharacter{1E27}{\"h}
+
+  \DeclareUnicodeCharacter{1E30}{\'K}
+  \DeclareUnicodeCharacter{1E31}{\'k}
+  \DeclareUnicodeCharacter{1E32}{\udotaccent{K}}
+  \DeclareUnicodeCharacter{1E33}{\udotaccent{k}}
+  \DeclareUnicodeCharacter{1E34}{\ubaraccent{K}}
+  \DeclareUnicodeCharacter{1E35}{\ubaraccent{k}}
+  \DeclareUnicodeCharacter{1E36}{\udotaccent{L}}
+  \DeclareUnicodeCharacter{1E37}{\udotaccent{l}}
+  \DeclareUnicodeCharacter{1E3A}{\ubaraccent{L}}
+  \DeclareUnicodeCharacter{1E3B}{\ubaraccent{l}}
+  \DeclareUnicodeCharacter{1E3E}{\'M}
+  \DeclareUnicodeCharacter{1E3F}{\'m}
+
+  \DeclareUnicodeCharacter{1E40}{\dotaccent{M}}
+  \DeclareUnicodeCharacter{1E41}{\dotaccent{m}}
+  \DeclareUnicodeCharacter{1E42}{\udotaccent{M}}
+  \DeclareUnicodeCharacter{1E43}{\udotaccent{m}}
+  \DeclareUnicodeCharacter{1E44}{\dotaccent{N}}
+  \DeclareUnicodeCharacter{1E45}{\dotaccent{n}}
+  \DeclareUnicodeCharacter{1E46}{\udotaccent{N}}
+  \DeclareUnicodeCharacter{1E47}{\udotaccent{n}}
+  \DeclareUnicodeCharacter{1E48}{\ubaraccent{N}}
+  \DeclareUnicodeCharacter{1E49}{\ubaraccent{n}}
+
+  \DeclareUnicodeCharacter{1E54}{\'P}
+  \DeclareUnicodeCharacter{1E55}{\'p}
+  \DeclareUnicodeCharacter{1E56}{\dotaccent{P}}
+  \DeclareUnicodeCharacter{1E57}{\dotaccent{p}}
+  \DeclareUnicodeCharacter{1E58}{\dotaccent{R}}
+  \DeclareUnicodeCharacter{1E59}{\dotaccent{r}}
+  \DeclareUnicodeCharacter{1E5A}{\udotaccent{R}}
+  \DeclareUnicodeCharacter{1E5B}{\udotaccent{r}}
+  \DeclareUnicodeCharacter{1E5E}{\ubaraccent{R}}
+  \DeclareUnicodeCharacter{1E5F}{\ubaraccent{r}}
+
+  \DeclareUnicodeCharacter{1E60}{\dotaccent{S}}
+  \DeclareUnicodeCharacter{1E61}{\dotaccent{s}}
+  \DeclareUnicodeCharacter{1E62}{\udotaccent{S}}
+  \DeclareUnicodeCharacter{1E63}{\udotaccent{s}}
+  \DeclareUnicodeCharacter{1E6A}{\dotaccent{T}}
+  \DeclareUnicodeCharacter{1E6B}{\dotaccent{t}}
+  \DeclareUnicodeCharacter{1E6C}{\udotaccent{T}}
+  \DeclareUnicodeCharacter{1E6D}{\udotaccent{t}}
+  \DeclareUnicodeCharacter{1E6E}{\ubaraccent{T}}
+  \DeclareUnicodeCharacter{1E6F}{\ubaraccent{t}}
+
+  \DeclareUnicodeCharacter{1E7C}{\~V}
+  \DeclareUnicodeCharacter{1E7D}{\~v}
+  \DeclareUnicodeCharacter{1E7E}{\udotaccent{V}}
+  \DeclareUnicodeCharacter{1E7F}{\udotaccent{v}}
+
+  \DeclareUnicodeCharacter{1E80}{\`W}
+  \DeclareUnicodeCharacter{1E81}{\`w}
+  \DeclareUnicodeCharacter{1E82}{\'W}
+  \DeclareUnicodeCharacter{1E83}{\'w}
+  \DeclareUnicodeCharacter{1E84}{\"W}
+  \DeclareUnicodeCharacter{1E85}{\"w}
+  \DeclareUnicodeCharacter{1E86}{\dotaccent{W}}
+  \DeclareUnicodeCharacter{1E87}{\dotaccent{w}}
+  \DeclareUnicodeCharacter{1E88}{\udotaccent{W}}
+  \DeclareUnicodeCharacter{1E89}{\udotaccent{w}}
+  \DeclareUnicodeCharacter{1E8A}{\dotaccent{X}}
+  \DeclareUnicodeCharacter{1E8B}{\dotaccent{x}}
+  \DeclareUnicodeCharacter{1E8C}{\"X}
+  \DeclareUnicodeCharacter{1E8D}{\"x}
+  \DeclareUnicodeCharacter{1E8E}{\dotaccent{Y}}
+  \DeclareUnicodeCharacter{1E8F}{\dotaccent{y}}
+
+  \DeclareUnicodeCharacter{1E90}{\^Z}
+  \DeclareUnicodeCharacter{1E91}{\^z}
+  \DeclareUnicodeCharacter{1E92}{\udotaccent{Z}}
+  \DeclareUnicodeCharacter{1E93}{\udotaccent{z}}
+  \DeclareUnicodeCharacter{1E94}{\ubaraccent{Z}}
+  \DeclareUnicodeCharacter{1E95}{\ubaraccent{z}}
+  \DeclareUnicodeCharacter{1E96}{\ubaraccent{h}}
+  \DeclareUnicodeCharacter{1E97}{\"t}
+  \DeclareUnicodeCharacter{1E98}{\ringaccent{w}}
+  \DeclareUnicodeCharacter{1E99}{\ringaccent{y}}
+
+  \DeclareUnicodeCharacter{1EA0}{\udotaccent{A}}
+  \DeclareUnicodeCharacter{1EA1}{\udotaccent{a}}
+
+  \DeclareUnicodeCharacter{1EB8}{\udotaccent{E}}
+  \DeclareUnicodeCharacter{1EB9}{\udotaccent{e}}
+  \DeclareUnicodeCharacter{1EBC}{\~E}
+  \DeclareUnicodeCharacter{1EBD}{\~e}
+
+  \DeclareUnicodeCharacter{1ECA}{\udotaccent{I}}
+  \DeclareUnicodeCharacter{1ECB}{\udotaccent{i}}
+  \DeclareUnicodeCharacter{1ECC}{\udotaccent{O}}
+  \DeclareUnicodeCharacter{1ECD}{\udotaccent{o}}
+
+  \DeclareUnicodeCharacter{1EE4}{\udotaccent{U}}
+  \DeclareUnicodeCharacter{1EE5}{\udotaccent{u}}
+
+  \DeclareUnicodeCharacter{1EF2}{\`Y}
+  \DeclareUnicodeCharacter{1EF3}{\`y}
+  \DeclareUnicodeCharacter{1EF4}{\udotaccent{Y}}
+
+  \DeclareUnicodeCharacter{1EF8}{\~Y}
+  \DeclareUnicodeCharacter{1EF9}{\~y}
+
+  \DeclareUnicodeCharacter{2013}{--}
+  \DeclareUnicodeCharacter{2014}{---}
+  \DeclareUnicodeCharacter{2018}{\quoteleft}
+  \DeclareUnicodeCharacter{2019}{\quoteright}
+  \DeclareUnicodeCharacter{201A}{\quotesinglbase}
+  \DeclareUnicodeCharacter{201C}{\quotedblleft}
+  \DeclareUnicodeCharacter{201D}{\quotedblright}
+  \DeclareUnicodeCharacter{201E}{\quotedblbase}
+  \DeclareUnicodeCharacter{2022}{\bullet}
+  \DeclareUnicodeCharacter{2026}{\dots}
+  \DeclareUnicodeCharacter{2039}{\guilsinglleft}
+  \DeclareUnicodeCharacter{203A}{\guilsinglright}
+  \DeclareUnicodeCharacter{20AC}{\euro}
+
+  \DeclareUnicodeCharacter{2192}{\expansion}
+  \DeclareUnicodeCharacter{21D2}{\result}
+
+  \DeclareUnicodeCharacter{2212}{\minus}
+  \DeclareUnicodeCharacter{2217}{\point}
+  \DeclareUnicodeCharacter{2261}{\equiv}
+}% end of \utfeightchardefs
+
+
+% US-ASCII character definitions.
+\def\asciichardefs{% nothing need be done
+   \relax
+}
+
+% Make non-ASCII characters printable again for compatibility with
+% existing Texinfo documents that may use them, even without declaring a
+% document encoding.
+%
+\setnonasciicharscatcode \other
+
+
+\message{formatting,}
+
+\newdimen\defaultparindent \defaultparindent = 15pt
+
+\chapheadingskip = 15pt plus 4pt minus 2pt
+\secheadingskip = 12pt plus 3pt minus 2pt
+\subsecheadingskip = 9pt plus 2pt minus 2pt
+
+% Prevent underfull vbox error messages.
+\vbadness = 10000
+
+% Don't be very finicky about underfull hboxes, either.
+\hbadness = 6666
+
+% Following George Bush, get rid of widows and orphans.
+\widowpenalty=10000
+\clubpenalty=10000
+
+% Use TeX 3.0's \emergencystretch to help line breaking, but if we're
+% using an old version of TeX, don't do anything.  We want the amount of
+% stretch added to depend on the line length, hence the dependence on
+% \hsize.  We call this whenever the paper size is set.
+%
+\def\setemergencystretch{%
+  \ifx\emergencystretch\thisisundefined
+    % Allow us to assign to \emergencystretch anyway.
+    \def\emergencystretch{\dimen0}%
+  \else
+    \emergencystretch = .15\hsize
+  \fi
+}
+
+% Parameters in order: 1) textheight; 2) textwidth;
+% 3) voffset; 4) hoffset; 5) binding offset; 6) topskip;
+% 7) physical page height; 8) physical page width.
+%
+% We also call \setleading{\textleading}, so the caller should define
+% \textleading.  The caller should also set \parskip.
+%
+\def\internalpagesizes#1#2#3#4#5#6#7#8{%
+  \voffset = #3\relax
+  \topskip = #6\relax
+  \splittopskip = \topskip
+  %
+  \vsize = #1\relax
+  \advance\vsize by \topskip
+  \outervsize = \vsize
+  \advance\outervsize by 2\topandbottommargin
+  \pageheight = \vsize
+  %
+  \hsize = #2\relax
+  \outerhsize = \hsize
+  \advance\outerhsize by 0.5in
+  \pagewidth = \hsize
+  %
+  \normaloffset = #4\relax
+  \bindingoffset = #5\relax
+  %
+  \ifpdf
+    \pdfpageheight #7\relax
+    \pdfpagewidth #8\relax
+    % if we don't reset these, they will remain at "1 true in" of
+    % whatever layout pdftex was dumped with.
+    \pdfhorigin = 1 true in
+    \pdfvorigin = 1 true in
+  \fi
+  %
+  \setleading{\textleading}
+  %
+  \parindent = \defaultparindent
+  \setemergencystretch
+}
+
+% @letterpaper (the default).
+\def\letterpaper{{\globaldefs = 1
+  \parskip = 3pt plus 2pt minus 1pt
+  \textleading = 13.2pt
+  %
+  % If page is nothing but text, make it come out even.
+  \internalpagesizes{607.2pt}{6in}% that's 46 lines
+                    {\voffset}{.25in}%
+                    {\bindingoffset}{36pt}%
+                    {11in}{8.5in}%
+}}
+
+% Use @smallbook to reset parameters for 7x9.25 trim size.
+\def\smallbook{{\globaldefs = 1
+  \parskip = 2pt plus 1pt
+  \textleading = 12pt
+  %
+  \internalpagesizes{7.5in}{5in}%
+                    {-.2in}{0in}%
+                    {\bindingoffset}{16pt}%
+                    {9.25in}{7in}%
+  %
+  \lispnarrowing = 0.3in
+  \tolerance = 700
+  \hfuzz = 1pt
+  \contentsrightmargin = 0pt
+  \defbodyindent = .5cm
+}}
+
+% Use @smallerbook to reset parameters for 6x9 trim size.
+% (Just testing, parameters still in flux.)
+\def\smallerbook{{\globaldefs = 1
+  \parskip = 1.5pt plus 1pt
+  \textleading = 12pt
+  %
+  \internalpagesizes{7.4in}{4.8in}%
+                    {-.2in}{-.4in}%
+                    {0pt}{14pt}%
+                    {9in}{6in}%
+  %
+  \lispnarrowing = 0.25in
+  \tolerance = 700
+  \hfuzz = 1pt
+  \contentsrightmargin = 0pt
+  \defbodyindent = .4cm
+}}
+
+% Use @afourpaper to print on European A4 paper.
+\def\afourpaper{{\globaldefs = 1
+  \parskip = 3pt plus 2pt minus 1pt
+  \textleading = 13.2pt
+  %
+  % Double-side printing via postscript on Laserjet 4050
+  % prints double-sided nicely when \bindingoffset=10mm and \hoffset=-6mm.
+  % To change the settings for a different printer or situation, adjust
+  % \normaloffset until the front-side and back-side texts align.  Then
+  % do the same for \bindingoffset.  You can set these for testing in
+  % your texinfo source file like this:
+  % @tex
+  % \global\normaloffset = -6mm
+  % \global\bindingoffset = 10mm
+  % @end tex
+  \internalpagesizes{673.2pt}{160mm}% that's 51 lines
+                    {\voffset}{\hoffset}%
+                    {\bindingoffset}{44pt}%
+                    {297mm}{210mm}%
+  %
+  \tolerance = 700
+  \hfuzz = 1pt
+  \contentsrightmargin = 0pt
+  \defbodyindent = 5mm
+}}
+
+% Use @afivepaper to print on European A5 paper.
+% From romildo@urano.iceb.ufop.br, 2 July 2000.
+% He also recommends making @example and @lisp be small.
+\def\afivepaper{{\globaldefs = 1
+  \parskip = 2pt plus 1pt minus 0.1pt
+  \textleading = 12.5pt
+  %
+  \internalpagesizes{160mm}{120mm}%
+                    {\voffset}{\hoffset}%
+                    {\bindingoffset}{8pt}%
+                    {210mm}{148mm}%
+  %
+  \lispnarrowing = 0.2in
+  \tolerance = 800
+  \hfuzz = 1.2pt
+  \contentsrightmargin = 0pt
+  \defbodyindent = 2mm
+  \tableindent = 12mm
+}}
+
+% A specific text layout, 24x15cm overall, intended for A4 paper.
+\def\afourlatex{{\globaldefs = 1
+  \afourpaper
+  \internalpagesizes{237mm}{150mm}%
+                    {\voffset}{4.6mm}%
+                    {\bindingoffset}{7mm}%
+                    {297mm}{210mm}%
+  %
+  % Must explicitly reset to 0 because we call \afourpaper.
+  \globaldefs = 0
+}}
+
+% Use @afourwide to print on A4 paper in landscape format.
+\def\afourwide{{\globaldefs = 1
+  \afourpaper
+  \internalpagesizes{241mm}{165mm}%
+                    {\voffset}{-2.95mm}%
+                    {\bindingoffset}{7mm}%
+                    {297mm}{210mm}%
+  \globaldefs = 0
+}}
+
+% @pagesizes TEXTHEIGHT[,TEXTWIDTH]
+% Perhaps we should allow setting the margins, \topskip, \parskip,
+% and/or leading, also. Or perhaps we should compute them somehow.
+%
+\parseargdef\pagesizes{\pagesizesyyy #1,,\finish}
+\def\pagesizesyyy#1,#2,#3\finish{{%
+  \setbox0 = \hbox{\ignorespaces #2}\ifdim\wd0 > 0pt \hsize=#2\relax \fi
+  \globaldefs = 1
+  %
+  \parskip = 3pt plus 2pt minus 1pt
+  \setleading{\textleading}%
+  %
+  \dimen0 = #1\relax
+  \advance\dimen0 by \voffset
+  %
+  \dimen2 = \hsize
+  \advance\dimen2 by \normaloffset
+  %
+  \internalpagesizes{#1}{\hsize}%
+                    {\voffset}{\normaloffset}%
+                    {\bindingoffset}{44pt}%
+                    {\dimen0}{\dimen2}%
+}}
+
+% Set default to letter.
+%
+\letterpaper
+
+
+\message{and turning on texinfo input format.}
+
+\def^^L{\par} % remove \outer, so ^L can appear in an @comment
+
+% DEL is a comment character, in case @c does not suffice.
+\catcode`\^^? = 14
+
+% Define macros to output various characters with catcode for normal text.
+\catcode`\"=\other \def\normaldoublequote{"}
+\catcode`\$=\other \def\normaldollar{$}%$ font-lock fix
+\catcode`\+=\other \def\normalplus{+}
+\catcode`\<=\other \def\normalless{<}
+\catcode`\>=\other \def\normalgreater{>}
+\catcode`\^=\other \def\normalcaret{^}
+\catcode`\_=\other \def\normalunderscore{_}
+\catcode`\|=\other \def\normalverticalbar{|}
+\catcode`\~=\other \def\normaltilde{~}
+
+% This macro is used to make a character print one way in \tt
+% (where it can probably be output as-is), and another way in other fonts,
+% where something hairier probably needs to be done.
+%
+% #1 is what to print if we are indeed using \tt; #2 is what to print
+% otherwise.  Since all the Computer Modern typewriter fonts have zero
+% interword stretch (and shrink), and it is reasonable to expect all
+% typewriter fonts to have this, we can check that font parameter.
+%
+\def\ifusingtt#1#2{\ifdim \fontdimen3\font=0pt #1\else #2\fi}
+
+% Same as above, but check for italic font.  Actually this also catches
+% non-italic slanted fonts since it is impossible to distinguish them from
+% italic fonts.  But since this is only used by $ and it uses \sl anyway
+% this is not a problem.
+\def\ifusingit#1#2{\ifdim \fontdimen1\font>0pt #1\else #2\fi}
+
+% Turn off all special characters except @
+% (and those which the user can use as if they were ordinary).
+% Most of these we simply print from the \tt font, but for some, we can
+% use math or other variants that look better in normal text.
+
+\catcode`\"=\active
+\def\activedoublequote{{\tt\char34}}
+\let"=\activedoublequote
+\catcode`\~=\active
+\def~{{\tt\char126}}
+\chardef\hat=`\^
+\catcode`\^=\active
+\def^{{\tt \hat}}
+
+\catcode`\_=\active
+\def_{\ifusingtt\normalunderscore\_}
+\let\realunder=_
+% Subroutine for the previous macro.
+\def\_{\leavevmode \kern.07em \vbox{\hrule width.3em height.1ex}\kern .07em }
+
+\catcode`\|=\active
+\def|{{\tt\char124}}
+\chardef \less=`\<
+\catcode`\<=\active
+\def<{{\tt \less}}
+\chardef \gtr=`\>
+\catcode`\>=\active
+\def>{{\tt \gtr}}
+\catcode`\+=\active
+\def+{{\tt \char 43}}
+\catcode`\$=\active
+\def${\ifusingit{{\sl\$}}\normaldollar}%$ font-lock fix
+
+% If a .fmt file is being used, characters that might appear in a file
+% name cannot be active until we have parsed the command line.
+% So turn them off again, and have \everyjob (or @setfilename) turn them on.
+% \otherifyactive is called near the end of this file.
+\def\otherifyactive{\catcode`+=\other \catcode`\_=\other}
+
+% Used sometimes to turn off (effectively) the active characters even after
+% parsing them.
+\def\turnoffactive{%
+  \normalturnoffactive
+  \otherbackslash
+}
+
+\catcode`\@=0
+
+% \backslashcurfont outputs one backslash character in current font,
+% as in \char`\\.
+\global\chardef\backslashcurfont=`\\
+\global\let\rawbackslashxx=\backslashcurfont  % let existing .??s files work
+
+% \realbackslash is an actual character `\' with catcode other, and
+% \doublebackslash is two of them (for the pdf outlines).
+{\catcode`\\=\other @gdef@realbackslash{\} @gdef@doublebackslash{\\}}
+
+% In texinfo, backslash is an active character; it prints the backslash
+% in fixed width font.
+\catcode`\\=\active  % @ for escape char from now on.
+
+% The story here is that in math mode, the \char of \backslashcurfont
+% ends up printing the roman \ from the math symbol font (because \char
+% in math mode uses the \mathcode, and plain.tex sets
+% \mathcode`\\="026E).  It seems better for @backslashchar{} to always
+% print a typewriter backslash, hence we use an explicit \mathchar,
+% which is the decimal equivalent of "715c (class 7, e.g., use \fam;
+% ignored family value; char position "5C).  We can't use " for the
+% usual hex value because it has already been made active.
+@def@normalbackslash{{@tt @ifmmode @mathchar29020 @else @backslashcurfont @fi}}
+@let@backslashchar = @normalbackslash % @backslashchar{} is for user documents.
+
+% On startup, @fixbackslash assigns:
+%  @let \ = @normalbackslash
+% \rawbackslash defines an active \ to do \backslashcurfont.
+% \otherbackslash defines an active \ to be a literal `\' character with
+% catcode other.  We switch back and forth between these.
+@gdef@rawbackslash{@let\=@backslashcurfont}
+@gdef@otherbackslash{@let\=@realbackslash}
+
+% Same as @turnoffactive except outputs \ as {\tt\char`\\} instead of
+% the literal character `\'.  Also revert - to its normal character, in
+% case the active - from code has slipped in.
+%
+{@catcode`- = @active
+ @gdef@normalturnoffactive{%
+   @let-=@normaldash
+   @let"=@normaldoublequote
+   @let$=@normaldollar %$ font-lock fix
+   @let+=@normalplus
+   @let<=@normalless
+   @let>=@normalgreater
+   @let\=@normalbackslash
+   @let^=@normalcaret
+   @let_=@normalunderscore
+   @let|=@normalverticalbar
+   @let~=@normaltilde
+   @markupsetuplqdefault
+   @markupsetuprqdefault
+   @unsepspaces
+ }
+}
+
+% Make _ and + \other characters, temporarily.
+% This is canceled by @fixbackslash.
+@otherifyactive
+
+% If a .fmt file is being used, we don't want the `\input texinfo' to show up.
+% That is what \eatinput is for; after that, the `\' should revert to printing
+% a backslash.
+%
+@gdef@eatinput input texinfo{@fixbackslash}
+@global@let\ = @eatinput
+
+% On the other hand, perhaps the file did not have a `\input texinfo'. Then
+% the first `\' in the file would cause an error. This macro tries to fix
+% that, assuming it is called before the first `\' could plausibly occur.
+% Also turn back on active characters that might appear in the input
+% file name, in case not using a pre-dumped format.
+%
+@gdef@fixbackslash{%
+  @ifx\@eatinput @let\ = @normalbackslash @fi
+  @catcode`+=@active
+  @catcode`@_=@active
+}
+
+% Say @foo, not \foo, in error messages.
+@escapechar = `@@
+
+% These (along with & and #) are made active for url-breaking, so need
+% active definitions as the normal characters.
+@def@normaldot{.}
+@def@normalquest{?}
+@def@normalslash{/}
+
+% These look ok in all fonts, so just make them not special.
+% @hashchar{} gets its own user-level command, because of #line.
+@catcode`@& = @other @def@normalamp{&}
+@catcode`@# = @other @def@normalhash{#}
+@catcode`@% = @other @def@normalpercent{%}
+
+@let @hashchar = @normalhash
+
+@c Finally, make ` and ' active, so that txicodequoteundirected and
+@c txicodequotebacktick work right in, e.g., @w{@code{`foo'}}.  If we
+@c don't make ` and ' active, @code will not get them as active chars.
+@c Do this last of all since we use ` in the previous @catcode assignments.
+@catcode`@'=@active
+@catcode`@`=@active
+@markupsetuplqdefault
+@markupsetuprqdefault
+
+@c Local variables:
+@c eval: (add-hook 'write-file-hooks 'time-stamp)
+@c page-delimiter: "^\\\\message"
+@c time-stamp-start: "def\\\\texinfoversion{"
+@c time-stamp-format: "%:y-%02m-%02d.%02H"
+@c time-stamp-end: "}"
+@c End:
+
+@c vim:sw=2:
+
+@ignore
+   arch-tag: e1b36e32-c96e-4135-a41a-0b2efa2ea115
+@end ignore
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ylwrap b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ylwrap
new file mode 100755
index 0000000..8f072a8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/automake-1.14/ylwrap
@@ -0,0 +1,247 @@
+#! /bin/sh
+# ylwrap - wrapper for lex/yacc invocations.
+
+scriptversion=2013-01-12.17; # UTC
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+#
+# Written by Tom Tromey <tromey@cygnus.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+get_dirname ()
+{
+  case $1 in
+    */*|*\\*) printf '%s\n' "$1" | sed -e 's|\([\\/]\)[^\\/]*$|\1|';;
+    # Otherwise,  we want the empty string (not ".").
+  esac
+}
+
+# guard FILE
+# ----------
+# The CPP macro used to guard inclusion of FILE.
+guard ()
+{
+  printf '%s\n' "$1"                                                    \
+    | sed                                                               \
+        -e 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'   \
+        -e 's/[^ABCDEFGHIJKLMNOPQRSTUVWXYZ]/_/g'                        \
+        -e 's/__*/_/g'
+}
+
+# quote_for_sed [STRING]
+# ----------------------
+# Return STRING (or stdin) quoted to be used as a sed pattern.
+quote_for_sed ()
+{
+  case $# in
+    0) cat;;
+    1) printf '%s\n' "$1";;
+  esac \
+    | sed -e 's|[][\\.*]|\\&|g'
+}
+
+case "$1" in
+  '')
+    echo "$0: No files given.  Try '$0 --help' for more information." 1>&2
+    exit 1
+    ;;
+  --basedir)
+    basedir=$2
+    shift 2
+    ;;
+  -h|--h*)
+    cat <<\EOF
+Usage: ylwrap [--help|--version] INPUT [OUTPUT DESIRED]... -- PROGRAM [ARGS]...
+
+Wrapper for lex/yacc invocations, renaming files as desired.
+
+  INPUT is the input file
+  OUTPUT is one file PROG generates
+  DESIRED is the file we actually want instead of OUTPUT
+  PROGRAM is program to run
+  ARGS are passed to PROG
+
+Any number of OUTPUT,DESIRED pairs may be used.
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v|--v*)
+    echo "ylwrap $scriptversion"
+    exit $?
+    ;;
+esac
+
+
+# The input.
+input=$1
+shift
+# We'll later need for a correct munging of "#line" directives.
+input_sub_rx=`get_dirname "$input" | quote_for_sed`
+case $input in
+  [\\/]* | ?:[\\/]*)
+    # Absolute path; do nothing.
+    ;;
+  *)
+    # Relative path.  Make it absolute.
+    input=`pwd`/$input
+    ;;
+esac
+input_rx=`get_dirname "$input" | quote_for_sed`
+
+# Since DOS filename conventions don't allow two dots,
+# the DOS version of Bison writes out y_tab.c instead of y.tab.c
+# and y_tab.h instead of y.tab.h. Test to see if this is the case.
+y_tab_nodot=false
+if test -f y_tab.c || test -f y_tab.h; then
+  y_tab_nodot=true
+fi
+
+# The parser itself, the first file, is the destination of the .y.c
+# rule in the Makefile.
+parser=$1
+
+# A sed program to s/FROM/TO/g for all the FROM/TO so that, for
+# instance, we rename #include "y.tab.h" into #include "parse.h"
+# during the conversion from y.tab.c to parse.c.
+sed_fix_filenames=
+
+# Also rename header guards, as Bison 2.7 for instance uses its header
+# guard in its implementation file.
+sed_fix_header_guards=
+
+while test $# -ne 0; do
+  if test x"$1" = x"--"; then
+    shift
+    break
+  fi
+  from=$1
+  # Handle y_tab.c and y_tab.h output by DOS
+  if $y_tab_nodot; then
+    case $from in
+      "y.tab.c") from=y_tab.c;;
+      "y.tab.h") from=y_tab.h;;
+    esac
+  fi
+  shift
+  to=$1
+  shift
+  sed_fix_filenames="${sed_fix_filenames}s|"`quote_for_sed "$from"`"|$to|g;"
+  sed_fix_header_guards="${sed_fix_header_guards}s|"`guard "$from"`"|"`guard "$to"`"|g;"
+done
+
+# The program to run.
+prog=$1
+shift
+# Make any relative path in $prog absolute.
+case $prog in
+  [\\/]* | ?:[\\/]*) ;;
+  *[\\/]*) prog=`pwd`/$prog ;;
+esac
+
+dirname=ylwrap$$
+do_exit="cd '`pwd`' && rm -rf $dirname > /dev/null 2>&1;"' (exit $ret); exit $ret'
+trap "ret=129; $do_exit" 1
+trap "ret=130; $do_exit" 2
+trap "ret=141; $do_exit" 13
+trap "ret=143; $do_exit" 15
+mkdir $dirname || exit 1
+
+cd $dirname
+
+case $# in
+  0) "$prog" "$input" ;;
+  *) "$prog" "$@" "$input" ;;
+esac
+ret=$?
+
+if test $ret -eq 0; then
+  for from in *
+  do
+    to=`printf '%s\n' "$from" | sed "$sed_fix_filenames"`
+    if test -f "$from"; then
+      # If $2 is an absolute path name, then just use that,
+      # otherwise prepend '../'.
+      case $to in
+        [\\/]* | ?:[\\/]*) target=$to;;
+        *) target=../$to;;
+      esac
+
+      # Do not overwrite unchanged header files to avoid useless
+      # recompilations.  Always update the parser itself: it is the
+      # destination of the .y.c rule in the Makefile.  Divert the
+      # output of all other files to a temporary file so we can
+      # compare them to existing versions.
+      if test $from != $parser; then
+        realtarget=$target
+        target=tmp-`printf '%s\n' "$target" | sed 's|.*[\\/]||g'`
+      fi
+
+      # Munge "#line" or "#" directives.  Don't let the resulting
+      # debug information point at an absolute srcdir.  Use the real
+      # output file name, not yy.lex.c for instance.  Adjust the
+      # include guards too.
+      sed -e "/^#/!b"                           \
+          -e "s|$input_rx|$input_sub_rx|"       \
+          -e "$sed_fix_filenames"               \
+          -e "$sed_fix_header_guards"           \
+        "$from" >"$target" || ret=$?
+
+      # Check whether files must be updated.
+      if test "$from" != "$parser"; then
+        if test -f "$realtarget" && cmp -s "$realtarget" "$target"; then
+          echo "$to is unchanged"
+          rm -f "$target"
+        else
+          echo "updating $to"
+          mv -f "$target" "$realtarget"
+        fi
+      fi
+    else
+      # A missing file is only an error for the parser.  This is a
+      # blatant hack to let us support using "yacc -d".  If -d is not
+      # specified, don't fail when the header file is "missing".
+      if test "$from" = "$parser"; then
+        ret=1
+      fi
+    fi
+  done
+fi
+
+# Remove the directory.
+cd ..
+rm -rf $dirname
+
+exit $ret
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/doc/automake/amhello-1.0.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/doc/automake/amhello-1.0.tar.gz
new file mode 100644
index 0000000..7d81add
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/doc/automake/amhello-1.0.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.el b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.el
new file mode 100644
index 0000000..a5737f9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.el
@@ -0,0 +1,100 @@
+;;; autoconf-mode.el --- autoconf code editing commands for Emacs
+
+;; Author: Martin Buchholz (martin@xemacs.org)
+;; Maintainer: Martin Buchholz
+;; Keywords: languages, faces, m4, configure
+
+;; This file is part of Autoconf
+
+;; Copyright (C) 2001, 2006, 2009, 2010 Free Software Foundation, Inc.
+;;
+;; This program is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+;;
+;; This program is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+;; A major mode for editing autoconf input (like configure.in).
+;; Derived from m4-mode.el by Andrew Csillag (drew@staff.prodigy.com)
+
+;;; Your should add the following to your Emacs configuration file:
+
+;;  (autoload 'autoconf-mode "autoconf-mode"
+;;            "Major mode for editing autoconf files." t)
+;;  (setq auto-mode-alist
+;;        (cons '("\\.ac\\'\\|configure\\.in\\'" . autoconf-mode)
+;;              auto-mode-alist))
+
+;;; Code:
+
+;;thank god for make-regexp.el!
+(defvar autoconf-font-lock-keywords
+  `(("\\bdnl \\(.*\\)"  1 font-lock-comment-face t)
+    ("\\$[0-9*#@]" . font-lock-variable-name-face)
+    ("\\b\\(m4_\\)?\\(builtin\\|change\\(com\\|quote\\|word\\)\\|d\\(e\\(bug\\(file\\|mode\\)\\|cr\\|f\\(ine\\|n\\)\\)\\|iv\\(ert\\|num\\)\\|nl\\|umpdef\\)\\|e\\(rrprint\\|syscmd\\|val\\)\\|f\\(ile\\|ormat\\)\\|gnu\\|i\\(f\\(def\\|else\\)\\|n\\(c\\(lude\\|r\\)\\|d\\(ex\\|ir\\)\\)\\)\\|l\\(en\\|ine\\)\\|m\\(4\\(exit\\|wrap\\)\\|aketemp\\|kstemp\\)\\|p\\(atsubst\\|opdef\\|ushdef\\)\\|regexp\\|s\\(hift\\|include\\|ubstr\\|ys\\(cmd\\|val\\)\\)\\|tra\\(ceo\\(ff\\|n\\)\\|nslit\\)\\|un\\(d\\(efine\\|ivert\\)\\|ix\\)\\)\\b" . font-lock-keyword-face)
+    ("^\\(\\(m4_\\)?define\\(_default\\)?\\|A._DEFUN\\|m4_defun\\(_once\\|_init\\)?\\)(\\[?\\([A-Za-z0-9_]+\\)" 5 font-lock-function-name-face)
+    "default font-lock-keywords")
+)
+
+(defvar autoconf-mode-syntax-table nil
+  "syntax table used in autoconf mode")
+(setq autoconf-mode-syntax-table (make-syntax-table))
+(modify-syntax-entry ?\" "\""  autoconf-mode-syntax-table)
+;;(modify-syntax-entry ?\' "\""  autoconf-mode-syntax-table)
+(modify-syntax-entry ?#  "<\n" autoconf-mode-syntax-table)
+(modify-syntax-entry ?\n ">#"  autoconf-mode-syntax-table)
+(modify-syntax-entry ?\( "()"   autoconf-mode-syntax-table)
+(modify-syntax-entry ?\) ")("   autoconf-mode-syntax-table)
+(modify-syntax-entry ?\[ "(]"  autoconf-mode-syntax-table)
+(modify-syntax-entry ?\] ")["  autoconf-mode-syntax-table)
+(modify-syntax-entry ?*  "."   autoconf-mode-syntax-table)
+(modify-syntax-entry ?_  "_"   autoconf-mode-syntax-table)
+
+(defvar autoconf-mode-map
+  (let ((map (make-sparse-keymap)))
+    (define-key map '[(control c) (\;)] 'comment-region)
+    map))
+
+(defun autoconf-current-defun ()
+  "Autoconf value for `add-log-current-defun-function'.
+This tells add-log.el how to find the current macro."
+  (save-excursion
+    (if (re-search-backward "^\\(m4_define\\(_default\\)?\\|m4_defun\\(_once\\|_init\\)?\\|A._DEFUN\\)(\\[*\\([A-Za-z0-9_]+\\)" nil t)
+	(buffer-substring (match-beginning 4)
+			  (match-end 4))
+      nil)))
+
+;;;###autoload
+(defun autoconf-mode ()
+  "A major-mode to edit Autoconf files like configure.ac.
+\\{autoconf-mode-map}
+"
+  (interactive)
+  (kill-all-local-variables)
+  (use-local-map autoconf-mode-map)
+
+  (make-local-variable 'add-log-current-defun-function)
+  (setq add-log-current-defun-function 'autoconf-current-defun)
+
+  (make-local-variable 'comment-start)
+  (setq comment-start "# ")
+  (make-local-variable 'parse-sexp-ignore-comments)
+  (setq parse-sexp-ignore-comments t)
+
+  (make-local-variable	'font-lock-defaults)
+  (setq major-mode 'autoconf-mode)
+  (setq mode-name "Autoconf")
+  (setq font-lock-defaults `(autoconf-font-lock-keywords nil))
+  (set-syntax-table autoconf-mode-syntax-table)
+  (run-hooks 'autoconf-mode-hook))
+
+(provide 'autoconf-mode)
+
+;;; autoconf-mode.el ends here
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.elc b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.elc
new file mode 100644
index 0000000..f73ba50
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autoconf-mode.elc
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.el b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.el
new file mode 100644
index 0000000..269db85
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.el
@@ -0,0 +1,101 @@
+;;; autotest-mode.el --- autotest code editing commands for Emacs
+
+;; Author: Akim Demaille (akim@freefriends.org)
+;; Keywords: languages, faces, m4, Autotest
+
+;; This file is part of Autoconf
+
+;; Copyright (C) 2001, 2009, 2010 Free Software Foundation, Inc.
+;;
+;; This program is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+;;
+;; This program is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+;;; Commentary:
+
+;; A major mode for editing autotest input (like testsuite.at).
+;; Derived from autoconf-mode.el, by Martin Buchholz (martin@xemacs.org).
+
+;;; Your should add the following to your Emacs configuration file:
+
+;;   (autoload 'autotest-mode "autotest-mode"
+;;             "Major mode for editing autotest files." t)
+;;   (setq auto-mode-alist
+;;         (cons '("\\.at\\'" . autotest-mode) auto-mode-alist))
+
+;;; Code:
+
+(defvar autotest-font-lock-keywords
+  `(("\\bdnl\\b\\(.*\\)"  1 font-lock-comment-face t)
+    ("\\$[0-9*#@]" . font-lock-variable-name-face)
+    ("^\\(m4_define\\|m4_defun\\)(\\[*\\([A-Za-z0-9_]+\\)" 2 font-lock-function-name-face)
+    ("^AT_SETUP(\\[+\\([^]]+\\)" 1 font-lock-function-name-face)
+    ("^AT_DATA(\\[+\\([^]]+\\)" 1 font-lock-variable-name-face)
+    ("\\b\\(_?m4_[_a-z0-9]*\\|_?A[ST]_[_A-Z0-9]+\\)\\b" . font-lock-keyword-face)
+    "default font-lock-keywords")
+)
+
+(defvar autotest-mode-syntax-table nil
+  "syntax table used in autotest mode")
+(setq autotest-mode-syntax-table (make-syntax-table))
+(modify-syntax-entry ?\" "\""  autotest-mode-syntax-table)
+;;(modify-syntax-entry ?\' "\""  autotest-mode-syntax-table)
+(modify-syntax-entry ?#  "<\n" autotest-mode-syntax-table)
+(modify-syntax-entry ?\n ">#"  autotest-mode-syntax-table)
+(modify-syntax-entry ?\( "()"   autotest-mode-syntax-table)
+(modify-syntax-entry ?\) ")("   autotest-mode-syntax-table)
+(modify-syntax-entry ?\[ "(]"  autotest-mode-syntax-table)
+(modify-syntax-entry ?\] ")["  autotest-mode-syntax-table)
+(modify-syntax-entry ?*  "."   autotest-mode-syntax-table)
+(modify-syntax-entry ?_  "_"   autotest-mode-syntax-table)
+
+(defvar autotest-mode-map
+  (let ((map (make-sparse-keymap)))
+    (define-key map '[(control c) (\;)] 'comment-region)
+    map))
+
+(defun autotest-current-defun ()
+  "Autotest value for `add-log-current-defun-function'.
+This tells add-log.el how to find the current test group/macro."
+  (save-excursion
+    (if (re-search-backward "^\\(m4_define\\|m4_defun\\|AT_SETUP\\)(\\[+\\([^]]+\\)" nil t)
+	(buffer-substring (match-beginning 2)
+			  (match-end 2))
+      nil)))
+
+;;;###autoload
+(defun autotest-mode ()
+  "A major-mode to edit Autotest files like testsuite.at.
+\\{autotest-mode-map}
+"
+  (interactive)
+  (kill-all-local-variables)
+  (use-local-map autotest-mode-map)
+
+  (make-local-variable 'add-log-current-defun-function)
+  (setq add-log-current-defun-function 'autotest-current-defun)
+
+  (make-local-variable 'comment-start)
+  (setq comment-start "# ")
+  (make-local-variable 'parse-sexp-ignore-comments)
+  (setq parse-sexp-ignore-comments t)
+
+  (make-local-variable	'font-lock-defaults)
+  (setq major-mode 'autotest-mode)
+  (setq mode-name "Autotest")
+  (setq font-lock-defaults `(autotest-font-lock-keywords nil))
+  (set-syntax-table autotest-mode-syntax-table)
+  (run-hooks 'autotest-mode-hook))
+
+(provide 'autotest-mode)
+
+;;; autotest-mode.el ends here
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.elc b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.elc
new file mode 100644
index 0000000..3a6b5c6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/emacs/site-lisp/autotest-mode.elc
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/autoconf.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/autoconf.info
new file mode 100644
index 0000000..b49338c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/autoconf.info
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake-history.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake-history.info
new file mode 100644
index 0000000..dba7763
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake-history.info
@@ -0,0 +1,1678 @@
+This is automake-history.info, produced by makeinfo version 5.2 from
+automake-history.texi.
+
+This manual describes (part of) the history of GNU Automake, a program
+that creates GNU standards-compliant Makefiles from template files.
+
+   Copyright (C) 1995-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with no Front-Cover texts,
+     and with no Back-Cover Texts.  A copy of the license is included in
+     the section entitled "GNU Free Documentation License."
+
+
+File: automake-history.info,  Node: Top,  Next: Timeline,  Up: (dir)
+
+Brief History of Automake
+*************************
+
+This manual describes (part of) the history of GNU Automake, a program
+that creates GNU standards-compliant Makefiles from template files.
+
+   Copyright (C) 1995-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with no Front-Cover texts,
+     and with no Back-Cover Texts.  A copy of the license is included in
+     the section entitled "GNU Free Documentation License."
+
+* Menu:
+
+* Timeline::                      The Automake story.
+* Dependency Tracking Evolution:: Evolution of Automatic Dependency Tracking
+* Releases::                      Release statistics
+* Copying This Manual::           How to make copies of this manual
+
+ -- The Detailed Node Listing --
+
+Evolution of Automatic Dependency Tracking
+
+* First Take on Dependencies::    Precomputed dependency tracking
+* Dependencies As Side Effects::  Update at developer compile time
+* Dependencies for the User::     Update at user compile time
+* Techniques for Dependencies::   Alternative approaches
+
+Techniques for Computing Dependencies
+
+* Recommendations for Tool Writers::
+* Future Directions for Dependencies::
+
+Copying This Manual
+
+* GNU Free Documentation License::  License for copying this manual
+
+
+
+File: automake-history.info,  Node: Timeline,  Next: Dependency Tracking Evolution,  Prev: Top,  Up: Top
+
+1 Timeline
+**********
+
+1994-09-19 First CVS commit.
+
+     If we can trust the CVS repository, David J. MacKenzie (djm)
+     started working on Automake (or AutoMake, as it was spelt then)
+     this Monday.
+
+     The first version of the 'automake' script looks as follows.
+
+          #!/bin/sh
+
+          status=0
+
+          for makefile
+          do
+            if test ! -f ${makefile}.am; then
+              echo "automake: ${makefile}.am: No such honkin' file"
+              status=1
+              continue
+            fi
+
+            exec 4> ${makefile}.in
+
+          done
+
+     From this you can already see that Automake will be about reading
+     '*.am' file and producing '*.in' files.  You cannot see anything
+     else, but if you also know that David is the one who created
+     Autoconf two years before you can guess the rest.
+
+     Several commits follow, and by the end of the day Automake is
+     reported to work for GNU fileutils and GNU m4.
+
+     The modus operandi is the one that is still used today: variable
+     assignments in 'Makefile.am' files trigger injections of precanned
+     'Makefile' fragments into the generated 'Makefile.in'.  The use of
+     'Makefile' fragments was inspired by the 4.4BSD 'make' and include
+     files, however Automake aims to be portable and to conform to the
+     GNU standards for 'Makefile' variables and targets.
+
+     At this point, the most recent release of Autoconf is version 1.11,
+     and David is preparing to release Autoconf 2.0 in late October.  As
+     a matter of fact, he will barely touch Automake after September.
+
+1994-11-05 David MacKenzie's last commit.
+
+     At this point Automake is a 200 line portable shell script, plus
+     332 lines of 'Makefile' fragments.  In the 'README', David states
+     his ambivalence between "portable shell" and "more appropriate
+     language":
+
+          I wrote it keeping in mind the possibility of it becoming an
+          Autoconf macro, so it would run at configure-time.  That would
+          slow configuration down a bit, but allow users to modify the
+          Makefile.am without needing to fetch the AutoMake package.
+          And, the Makefile.in files wouldn't need to be distributed.
+          But all of AutoMake would.  So I might reimplement AutoMake in
+          Perl, m4, or some other more appropriate language.
+
+     Automake is described as "an experimental Makefile generator".
+     There is no documentation.  Adventurous users are referred to the
+     examples and patches needed to use Automake with GNU m4 1.3,
+     fileutils 3.9, time 1.6, and development versions of find and
+     indent.
+
+     These examples seem to have been lost.  However at the time of
+     writing (10 years later in September, 2004) the FSF still
+     distributes a package that uses this version of Automake: check out
+     GNU termutils 2.0.
+
+1995-11-12 Tom Tromey's first commit.
+
+     After one year of inactivity, Tom Tromey takes over the package.
+     Tom was working on GNU cpio back then, and doing this just for fun,
+     having trouble finding a project to contribute to.  So while
+     hacking he wanted to bring the 'Makefile.in' up to GNU standards.
+     This was hard, and one day he saw Automake on
+     <ftp://alpha.gnu.org/>, grabbed it and tried it out.
+
+     Tom didn't talk to djm about it until later, just to make sure he
+     didn't mind if he made a release.  He did a bunch of early releases
+     to the Gnits folks.
+
+     Gnits was (and still is) totally informal, just a few GNU friends
+     who Franc,ois Pinard knew, who were all interested in making a
+     common infrastructure for GNU projects, and shared a similar
+     outlook on how to do it.  So they were able to make some progress.
+     It came along with Autoconf and extensions thereof, and then
+     Automake from David and Tom (who were both gnitsians).  One of
+     their ideas was to write a document paralleling the GNU standards,
+     that was more strict in some ways and more detailed.  They never
+     finished the GNITS standards, but the ideas mostly made their way
+     into Automake.
+
+1995-11-23 Automake 0.20
+
+     Besides introducing automatic dependency tracking (*note Dependency
+     Tracking Evolution::), this version also supplies a 9-page manual.
+
+     At this time 'aclocal' and 'AM_INIT_AUTOMAKE' did not exist, so
+     many things had to be done by hand.  For instance, here is what a
+     configure.in (this is the former name of the 'configure.ac' we use
+     today) must contain in order to use Automake 0.20:
+
+          PACKAGE=cpio
+          VERSION=2.3.911
+          AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE")
+          AC_DEFINE_UNQUOTED(VERSION, "$VERSION")
+          AC_SUBST(PACKAGE)
+          AC_SUBST(VERSION)
+          AC_ARG_PROGRAM
+          AC_PROG_INSTALL
+
+     (Today all of the above is achieved by 'AC_INIT' and
+     'AM_INIT_AUTOMAKE'.)
+
+     Here is how programs are specified in 'Makefile.am':
+
+          PROGRAMS = hello
+          hello_SOURCES = hello.c
+
+     This looks pretty much like what we do today, except the 'PROGRAMS'
+     variable has no directory prefix specifying where 'hello' should be
+     installed: all programs are installed in '$(bindir)'.
+     'LIBPROGRAMS' can be used to specify programs that must be built
+     but not installed (it is called 'noinst_PROGRAMS' nowadays).
+
+     Programs can be built conditionally using 'AC_SUBST'itutions:
+
+          PROGRAMS = @progs@
+          AM_PROGRAMS = foo bar baz
+
+     ('AM_PROGRAMS' has since then been renamed to 'EXTRA_PROGRAMS'.)
+
+     Similarly scripts, static libraries, and data can be built and
+     installed using the 'LIBRARIES', 'SCRIPTS', and 'DATA' variables.
+     However 'LIBRARIES' were treated a bit specially in that Automake
+     did automatically supply the 'lib' and '.a' prefixes.  Therefore to
+     build 'libcpio.a', one had to write
+
+          LIBRARIES = cpio
+          cpio_SOURCES = ...
+
+     Extra files to distribute must be listed in 'DIST_OTHER' (the
+     ancestor of 'EXTRA_DIST').  Also extra directories that are to be
+     distributed should appear in 'DIST_SUBDIRS', but the manual
+     describes this as a temporary ugly hack (today extra directories
+     should also be listed in 'EXTRA_DIST', and 'DIST_SUBDIRS' is used
+     for another purpose, *note Conditional Subdirectories:
+     (automake)Conditional Subdirectories.).
+
+1995-11-26 Automake 0.21
+
+     In less time than it takes to cook a frozen pizza, Tom rewrites
+     Automake using Perl.  At this time Perl 5 is only one year old, and
+     Perl 4.036 is in use at many sites.  Supporting several Perl
+     versions has been a source of problems through the whole history of
+     Automake.
+
+     If you never used Perl 4, imagine Perl 5 without objects, without
+     'my' variables (only dynamically scoped 'local' variables), without
+     function prototypes, with function calls that needs to be prefixed
+     with '&', etc.  Traces of this old style can still be found in
+     today's 'automake'.
+
+1995-11-28 Automake 0.22
+1995-11-29 Automake 0.23
+
+     Bug fixes.
+
+1995-12-08 Automake 0.24
+1995-12-10 Automake 0.25
+
+     Releases are raining.  0.24 introduces the uniform naming scheme we
+     use today, i.e., 'bin_PROGRAMS' instead of 'PROGRAMS',
+     'noinst_LIBRARIES' instead of 'LIBLIBRARIES', etc.  (However
+     'EXTRA_PROGRAMS' does not exist yet, 'AM_PROGRAMS' is still in use;
+     and 'TEXINFOS' and 'MANS' still have no directory prefixes.)
+     Adding support for prefixes like that was one of the major ideas in
+     'automake'; it has lasted pretty well.
+
+     AutoMake is renamed to Automake (Tom seems to recall it was
+     Franc,ois Pinard's doing).
+
+     0.25 fixes a Perl 4 portability bug.
+
+1995-12-18 Jim Meyering starts using Automake in GNU Textutils.
+1995-12-31 Franc,ois Pinard starts using Automake in GNU tar.
+
+1996-01-03 Automake 0.26
+1996-01-03 Automake 0.27
+
+     Of the many changes and suggestions sent by Franc,ois Pinard and
+     included in 0.26, perhaps the most important is the advice that to
+     ease customization a user rule or variable definition should always
+     override an Automake rule or definition.
+
+     Gordon Matzigkeit and Jim Meyering are two other early contributors
+     that have been sending fixes.
+
+     0.27 fixes yet another Perl 4 portability bug.
+
+1996-01-13 Automake 0.28
+
+     Automake starts scanning 'configure.in' for 'LIBOBJS' support.
+     This is an important step because until this version Automake only
+     knew about the 'Makefile.am's it processed.  'configure.in' was
+     Autoconf's world and the link between Autoconf and Automake had to
+     be done by the 'Makefile.am' author.  For instance, if 'config.h'
+     was generated by 'configure', it was the package maintainer's
+     responsibility to define the 'CONFIG_HEADER' variable in each
+     'Makefile.am'.
+
+     Succeeding releases will rely more and more on scanning
+     'configure.in' to better automate the Autoconf integration.
+
+     0.28 also introduces the 'AUTOMAKE_OPTIONS' variable and the
+     '--gnu' and '--gnits' options, the latter being stricter.
+
+1996-02-07 Automake 0.29
+
+     Thanks to 'configure.in' scanning, 'CONFIG_HEADER' is gone, and
+     rebuild rules for 'configure'-generated file are automatically
+     output.
+
+     'TEXINFOS' and 'MANS' converted to the uniform naming scheme.
+
+1996-02-24 Automake 0.30
+
+     The test suite is born.  It contains 9 tests.  From now on test
+     cases will be added pretty regularly (*note Releases::), and this
+     proved to be really helpful later on.
+
+     'EXTRA_PROGRAMS' finally replaces 'AM_PROGRAMS'.
+
+     All the third-party Autoconf macros, written mostly by Franc,ois
+     Pinard (and later Jim Meyering), are distributed in Automake's
+     hand-written 'aclocal.m4' file.  Package maintainers are expected
+     to extract the necessary macros from this file.  (In previous
+     versions you had to copy and paste them from the manual...)
+
+1996-03-11 Automake 0.31
+
+     The test suite in 0.30 was run via a long 'check-local' rule.  Upon
+     Ulrich Drepper's suggestion, 0.31 makes it an Automake rule output
+     whenever the 'TESTS' variable is defined.
+
+     'DIST_OTHER' is renamed to 'EXTRA_DIST', and the 'check_' prefix is
+     introduced.  The syntax is now the same as today.
+
+1996-03-15 Gordon Matzigkeit starts writing libtool.
+
+1996-04-27 Automake 0.32
+
+     '-hook' targets are introduced; an idea from Dieter Baron.
+
+     '*.info' files, which were output in the build directory are now
+     built in the source directory, because they are distributed.  It
+     seems these files like to move back and forth as that will happen
+     again in future versions.
+
+1996-05-18 Automake 0.33
+
+     Gord Matzigkeit's main two contributions:
+
+        * very preliminary libtool support
+        * the distcheck rule
+
+     Although they were very basic at this point, these are probably
+     among the top features for Automake today.
+
+     Jim Meyering also provides the infamous 'jm_MAINTAINER_MODE', since
+     then renamed to 'AM_MAINTAINER_MODE' and abandoned by its author
+     (*note maintainer-mode: (automake)maintainer-mode.).
+
+1996-05-28 Automake 1.0
+
+     After only six months of heavy development, the 'automake' script
+     is 3134 lines long, plus 973 lines of 'Makefile' fragments.  The
+     package has 30 pages of documentation, and 38 test cases.
+     'aclocal.m4' contains 4 macros.
+
+     From now on and until version 1.4, new releases will occur at a
+     rate of about one a year.  1.1 did not exist, actually 1.1b to 1.1p
+     have been the name of beta releases for 1.2.  This is the first
+     time Automake uses suffix letters to designate beta releases, a
+     habit that lasts.
+
+1996-10-10 Kevin Dalley packages Automake 1.0 for Debian GNU/Linux.
+
+1996-11-26 David J. MacKenzie releases Autoconf 2.12.
+
+     Between June and October, the Autoconf development is almost
+     stalled.  Roland McGrath has been working at the beginning of the
+     year.  David comes back in November to release 2.12, but he won't
+     touch Autoconf anymore after this year, and Autoconf then really
+     stagnates.  The desolate Autoconf 'ChangeLog' for 1997 lists only 7
+     commits.
+
+1997-02-28 <automake@gnu.ai.mit.edu> list alive
+
+     The mailing list is announced as follows:
+          I've created the "automake" mailing list.  It is
+          "automake@gnu.ai.mit.edu".  Administrivia, as always, to
+          automake-request@gnu.ai.mit.edu.
+
+          The charter of this list is discussion of automake, autoconf, and
+          other configuration/portability tools (e.g., libtool).  It is expected
+          that discussion will range from pleas for help all the way up to
+          patches.
+
+          This list is archived on the FSF machines.  Offhand I don't know if
+          you can get the archive without an account there.
+
+          This list is open to anybody who wants to join.  Tell all your
+          friends!
+          -- Tom Tromey
+
+     Before that people were discussing Automake privately, on the Gnits
+     mailing list (which is not public either), and less frequently on
+     'gnu.misc.discuss'.
+
+     'gnu.ai.mit.edu' is now 'gnu.org', in case you never noticed.  The
+     archives of the early years of the 'automake@gnu.org' list have
+     been lost, so today it is almost impossible to find traces of
+     discussions that occurred before 1999.  This has been annoying more
+     than once, as such discussions can be useful to understand the
+     rationale behind a piece of uncommented code that was introduced
+     back then.
+
+1997-06-22 Automake 1.2
+
+     Automake developments continues, and more and more new Autoconf
+     macros are required.  Distributing them in 'aclocal.m4' and
+     requiring people to browse this file to extract the relevant macros
+     becomes uncomfortable.  Ideally, some of them should be contributed
+     to Autoconf so that they can be used directly, however Autoconf is
+     currently inactive.  Automake 1.2 consequently introduces 'aclocal'
+     ('aclocal' was actually started on 1996-07-28), a tool that
+     automatically constructs an 'aclocal.m4' file from a repository of
+     third-party macros.  Because Autoconf has stalled, Automake also
+     becomes a kind of repository for such third-party macros, even
+     macros completely unrelated to Automake (for instance macros that
+     fix broken Autoconf macros).
+
+     The 1.2 release contains 20 macros, including the
+     'AM_INIT_AUTOMAKE' macro that simplifies the creation of
+     'configure.in'.
+
+     Libtool is fully supported using '*_LTLIBRARIES'.
+
+     The missing script is introduced by Franc,ois Pinard; it is meant
+     to be a better solution than 'AM_MAINTAINER_MODE' (*note
+     maintainer-mode: (automake)maintainer-mode.).
+
+     Conditionals support was implemented by Ian Lance Taylor.  At the
+     time, Tom and Ian were working on an internal project at Cygnus.
+     They were using ILU, which is pretty similar to CORBA.  They wanted
+     to integrate ILU into their build, which was all 'configure'-based,
+     and Ian thought that adding conditionals to 'automake' was simpler
+     than doing all the work in 'configure' (which was the standard at
+     the time).  So this was actually funded by Cygnus.
+
+     This very useful but tricky feature will take a lot of time to
+     stabilize.  (At the time this text is written, there are still
+     primaries that have not been updated to support conditional
+     definitions in Automake 1.9.)
+
+     The 'automake' script has almost doubled: 6089 lines of Perl, plus
+     1294 lines of 'Makefile' fragments.
+
+1997-07-08 Gordon Matzigkeit releases Libtool 1.0.
+
+1998-04-05 Automake 1.3
+
+     This is a small advance compared to 1.2.  It adds support for
+     assembly, and preliminary support for Java.
+
+     Perl 5.004_04 is out, but fixes to support Perl 4 are still
+     regularly submitted whenever Automake breaks it.
+
+1998-09-06 'sourceware.cygnus.com' is on-line.
+
+     Sourceware was setup by Jason Molenda to host open source projects.
+
+1998-09-19 Automake CVS repository moved to 'sourceware.cygnus.com'
+1998-10-26 'sourceware.cygnus.com' announces it hosts Automake:
+     Automake is now hosted on 'sourceware.cygnus.com'.  It has a
+     publicly accessible CVS repository.  This CVS repository is a copy
+     of the one Tom was using on his machine, which in turn is based on
+     a copy of the CVS repository of David MacKenzie.  This is why we
+     still have to full source history.  (Automake was on Sourceware
+     until 2007-10-29, when it moved to a git repository on
+     'savannah.gnu.org', but the Sourceware host had been renamed to
+     'sources.redhat.com'.)
+
+     The oldest file in the administrative directory of the CVS
+     repository that was created on Sourceware is dated 1998-09-19,
+     while the announcement that 'automake' and 'autoconf' had joined
+     'sourceware' was made on 1998-10-26.  They were among the first
+     projects to be hosted there.
+
+     The heedful reader will have noticed Automake was exactly 4 years
+     old on 1998-09-19.
+
+1999-01-05 Ben Elliston releases Autoconf 2.13.
+
+1999-01-14 Automake 1.4
+
+     This release adds support for Fortran 77 and for the 'include'
+     statement.  Also, '+=' assignments are introduced, but it is still
+     quite easy to fool Automake when mixing this with conditionals.
+
+     These two releases, Automake 1.4 and Autoconf 2.13 make a duo that
+     will be used together for years.
+
+     'automake' is 7228 lines, plus 1591 lines of Makefile fragment, 20
+     macros (some 1.3 macros were finally contributed back to Autoconf),
+     197 test cases, and 51 pages of documentation.
+
+1999-03-27 The 'user-dep-branch' is created on the CVS repository.
+
+     This implements a new dependency tracking schemed that should be
+     able to handle automatic dependency tracking using any compiler
+     (not just gcc) and any make (not just GNU 'make').  In addition,
+     the new scheme should be more reliable than the old one, as
+     dependencies are generated on the end user's machine.  Alexandre
+     Oliva creates depcomp for this purpose.
+
+     *Note Dependency Tracking Evolution::, for more details about the
+     evolution of automatic dependency tracking in Automake.
+
+1999-11-21 The 'user-dep-branch' is merged into the main trunk.
+
+     This was a huge problem since we also had patches going in on the
+     trunk.  The merge took a long time and was very painful.
+
+2000-05-10
+
+     Since September 1999 and until 2003, Akim Demaille will be
+     zealously revamping Autoconf.
+
+          I think the next release should be called "3.0".
+          Let's face it: you've basically rewritten autoconf.
+          Every weekend there are 30 new patches.
+          I don't see how we could call this "2.15" with a straight
+          face.
+          - Tom Tromey on <autoconf@gnu.org>
+
+     Actually Akim works like a submarine: he will pile up patches while
+     he works off-line during the weekend, and flush them in batch when
+     he resurfaces on Monday.
+
+2001-01-24
+
+     On this Wednesday, Autoconf 2.49c, the last beta before Autoconf
+     2.50 is out, and Akim has to find something to do during his
+     week-end :)
+
+2001-01-28
+
+     Akim sends a batch of 14 patches to <automake@gnu.org>.
+
+          Aiieeee!  I was dreading the day that the Demaillator turned
+          his sights on automake... and now it has arrived!  - Tom
+          Tromey
+
+     It's only the beginning: in two months he will send 192 patches.
+     Then he would slow down so Tom can catch up and review all this.
+     Initially Tom actually read all of these patches, then he probably
+     trustingly answered OK to most of them, and finally gave up and let
+     Akim apply whatever he wanted.  There was no way to keep up with
+     that patch rate.
+
+          Anyway the patch below won't apply since it predates Akim's
+          sourcequake; I have yet to figure where the relevant passage
+          has been moved :) - Alexandre Duret-Lutz
+
+     All of these patches were sent to and discussed on
+     <automake@gnu.org>, so subscribed users were literally drowning in
+     technical mails.  Eventually, the <automake-patches@gnu.org>
+     mailing list was created in May.
+
+     Year after year, Automake had drifted away from its initial design:
+     construct 'Makefile.in' by assembling various 'Makefile' fragments.
+     In 1.4, lots of 'Makefile' rules are being emitted at various
+     places in the 'automake' script itself; this does not help ensuring
+     a consistent treatment of these rules (for instance making sure
+     that user-defined rules override Automake's own rules).  One of
+     Akim's goal was moving all of these hard-coded rules to separate
+     'Makefile' fragments, so the logic could be centralized in a
+     'Makefile' fragment processor.
+
+     Another significant contribution of Akim is the interface with the
+     "trace" feature of Autoconf.  The way to scan 'configure.in' at
+     this time was to read the file and grep the various macro of
+     interest to Automake.  Doing so could break in many unexpected
+     ways; 'automake' could miss some definition (for instance
+     'AC_SUBST([$1], [$2])' where the arguments are known only when M4
+     is run), or conversely it could detect some macro that was not
+     expanded (because it is called conditionally).  In the CVS version
+     of Autoconf, Akim had implemented the '--trace' option, which
+     provides accurate information about where macros are actually
+     called and with what arguments.  Akim will equip Automake with a
+     second 'configure.in' scanner that uses this '--trace' interface.
+     Since it was not sensible to drop the Autoconf 2.13 compatibility
+     yet, this experimental scanner was only used when an environment
+     variable was set, the traditional grep-scanner being still the
+     default.
+
+2001-04-25 Gary V. Vaughan releases Libtool 1.4
+
+     It has been more than two years since Automake 1.4, CVS Automake
+     has suffered lot's of heavy changes and still is not ready for
+     release.  Libtool 1.4 had to be distributed with a patch against
+     Automake 1.4.
+
+2001-05-08 Automake 1.4-p1
+2001-05-24 Automake 1.4-p2
+
+     Gary V. Vaughan, the principal Libtool maintainer, makes a "patch
+     release" of Automake:
+
+          The main purpose of this release is to have a stable automake
+          which is compatible with the latest stable libtool.
+
+     The release also contains obvious fixes for bugs in Automake 1.4,
+     some of which were reported almost monthly.
+
+2001-05-21 Akim Demaille releases Autoconf 2.50
+
+2001-06-07 Automake 1.4-p3
+2001-06-10 Automake 1.4-p4
+2001-07-15 Automake 1.4-p5
+
+     Gary continues his patch-release series.  These also add support
+     for some new Autoconf 2.50 idioms.  Essentially, Autoconf now
+     advocates 'configure.ac' over 'configure.in', and it introduces a
+     new syntax for 'AC_OUTPUT'ing files.
+
+2001-08-23 Automake 1.5
+
+     A major and long-awaited release, that comes more than two years
+     after 1.4.  It brings many changes, among which:
+        * The new dependency tracking scheme that uses 'depcomp'.  Aside
+          from the improvement on the dependency tracking itself (*note
+          Dependency Tracking Evolution::), this also streamlines the
+          use of 'automake'-generated 'Makefile.in's as the
+          'Makefile.in's used during development are now the same as
+          those used in distributions.  Before that the 'Makefile.in's
+          generated for maintainers required GNU 'make' and GCC, they
+          were different from the portable 'Makefile' generated for
+          distribution; this was causing some confusion.
+
+        * Support for per-target compilation flags.
+
+        * Support for reference to files in subdirectories in most
+          'Makefile.am' variables.
+
+        * Introduction of the 'dist_', 'nodist_', and 'nobase_'
+          prefixes.
+        * Perl 4 support is finally dropped.
+
+     1.5 did break several packages that worked with 1.4.  Enough so
+     that Linux distributions could not easily install the new Automake
+     version without breaking many of the packages for which they had to
+     run 'automake'.
+
+     Some of these breakages were effectively bugs that would eventually
+     be fixed in the next release.  However, a lot of damage was caused
+     by some changes made deliberately to render Automake stricter on
+     some setup we did consider bogus.  For instance, 'make distcheck'
+     was improved to check that 'make uninstall' did remove all the
+     files 'make install' installed, that 'make distclean' did not omit
+     some file, and that a VPATH build would work even if the source
+     directory was read-only.  Similarly, Automake now rejects multiple
+     definitions of the same variable (because that would mix very badly
+     with conditionals), and '+=' assignments with no previous
+     definition.  Because these changes all occurred suddenly after 1.4
+     had been established for more than two years, it hurt users.
+
+     To make matter worse, meanwhile Autoconf (now at version 2.52) was
+     facing similar troubles, for similar reasons.
+
+2002-03-05 Automake 1.6
+
+     This release introduced versioned installation (*note API
+     Versioning: (automake)API Versioning.).  This was mainly pushed by
+     Havoc Pennington, taking the GNOME source tree as motive: due to
+     incompatibilities between the autotools it's impossible for the
+     GNOME packages to switch to Autoconf 2.53 and Automake 1.5 all at
+     once, so they are currently stuck with Autoconf 2.13 and Automake
+     1.4.
+
+     The idea was to call this version 'automake-1.6', call all its
+     bug-fix versions identically, and switch to 'automake-1.7' for the
+     next release that adds new features or changes some rules.  This
+     scheme implies maintaining a bug-fix branch in addition to the
+     development trunk, which means more work from the maintainer, but
+     providing regular bug-fix releases proved to be really worthwhile.
+
+     Like 1.5, 1.6 also introduced a bunch of incompatibilities,
+     intentional or not.  Perhaps the more annoying was the dependence
+     on the newly released Autoconf 2.53.  Autoconf seemed to have
+     stabilized enough since its explosive 2.50 release and included
+     changes required to fix some bugs in Automake.  In order to upgrade
+     to Automake 1.6, people now had to upgrade Autoconf too; for some
+     packages it was no picnic.
+
+     While versioned installation helped people to upgrade, it also
+     unfortunately allowed people not to upgrade.  At the time of
+     writing, some Linux distributions are shipping packages for
+     Automake 1.4, 1.5, 1.6, 1.7, 1.8, and 1.9.  Most of these still
+     install 1.4 by default.  Some distribution also call 1.4 the
+     "stable" version, and present "1.9" as the development version;
+     this does not really makes sense since 1.9 is way more solid than
+     1.4.  All this does not help the newcomer.
+
+2002-04-11 Automake 1.6.1
+
+     1.6, and the upcoming 1.4-p6 release were the last release by Tom.
+     This one and those following will be handled by Alexandre
+     Duret-Lutz.  Tom is still around, and will be there until about
+     1.7, but his interest into Automake is drifting away towards
+     projects like 'gcj'.
+
+     Alexandre has been using Automake since 2000, and started to
+     contribute mostly on Akim's incitement (Akim and Alexandre have
+     been working in the same room from 1999 to 2002).  In 2001 and 2002
+     he had a lot of free time to enjoy hacking Automake.
+
+2002-06-14 Automake 1.6.2
+
+2002-07-28 Automake 1.6.3
+2002-07-28 Automake 1.4-p6
+
+     Two releases on the same day.  1.6.3 is a bug-fix release.
+
+     Tom Tromey backported the versioned installation mechanism on the
+     1.4 branch, so that Automake 1.6.x and Automake 1.4-p6 could be
+     installed side by side.  Another request from the GNOME folks.
+
+2002-09-25 Automake 1.7
+
+     This release switches to the new 'configure.ac' scanner Akim was
+     experimenting in 1.5.
+
+2002-10-16 Automake 1.7.1
+2002-12-06 Automake 1.7.2
+2003-02-20 Automake 1.7.3
+2003-04-23 Automake 1.7.4
+2003-05-18 Automake 1.7.5
+2003-07-10 Automake 1.7.6
+2003-09-07 Automake 1.7.7
+2003-10-07 Automake 1.7.8
+
+     Many bug-fix releases.  1.7 lasted because the development version
+     (upcoming 1.8) was suffering some major internal revamping.
+
+2003-10-26 Automake on screen
+
+     Episode 49, 'Repercussions', in the third season of the 'Alias' TV
+     show is first aired.
+
+     Marshall, one of the characters, is working on a computer virus
+     that he has to modify before it gets into the wrong hands or
+     something like that.  The screenshots you see do not show any
+     program code, they show a 'Makefile.in' generated by automake...
+
+2003-11-09 Automake 1.7.9
+
+2003-12-10 Automake 1.8
+
+     The most striking update is probably that of 'aclocal'.
+
+     'aclocal' now uses 'm4_include' in the produced 'aclocal.m4' when
+     the included macros are already distributed with the package (an
+     idiom used in many packages), which reduces code duplication.  Many
+     people liked that, but in fact this change was really introduced to
+     fix a bug in rebuild rules: 'Makefile.in' must be rebuilt whenever
+     a dependency of 'configure' changes, but all the 'm4' files
+     included in 'aclocal.m4' where unknown from 'automake'.  Now
+     'automake' can just trace the 'm4_include's to discover the
+     dependencies.
+
+     'aclocal' also starts using the '--trace' Autoconf option in order
+     to discover used macros more accurately.  This will turn out to be
+     very tricky (later releases will improve this) as people had
+     devised many ways to cope with the limitation of previous 'aclocal'
+     versions, notably using handwritten 'm4_include's: 'aclocal' must
+     make sure not to redefine a rule that is already included by such
+     statement.
+
+     Automake also has seen its guts rewritten.  Although this rewriting
+     took a lot of efforts, it is only apparent to the users in that
+     some constructions previously disallowed by the implementation now
+     work nicely.  Conditionals, Locations, Variable and Rule
+     definitions, Options: these items on which Automake works have been
+     rewritten as separate Perl modules, and documented.
+
+2004-01-11 Automake 1.8.1
+2004-01-12 Automake 1.8.2
+2004-03-07 Automake 1.8.3
+2004-04-25 Automake 1.8.4
+2004-05-16 Automake 1.8.5
+
+2004-07-28 Automake 1.9
+
+     This release tries to simplify the compilation rules it outputs to
+     reduce the size of the Makefile.  The complaint initially come from
+     the libgcj developers.  Their 'Makefile.in' generated with Automake
+     1.4 and custom build rules (1.4 did not support compiled Java) is
+     250KB.  The one generated by 1.8 was over 9MB!  1.9 gets it down to
+     1.2MB.
+
+     Aside from this it contains mainly minor changes and bug-fixes.
+
+2004-08-11 Automake 1.9.1
+2004-09-19 Automake 1.9.2
+
+     Automake has ten years.  This chapter of the manual was initially
+     written for this occasion.
+
+2007-10-29 Automake repository moves to 'savannah.gnu.org'
+     and uses git as primary repository.
+
+
+File: automake-history.info,  Node: Dependency Tracking Evolution,  Next: Releases,  Prev: Timeline,  Up: Top
+
+2 Evolution of Automatic Dependency Tracking
+********************************************
+
+Over the years Automake has deployed three different dependency tracking
+methods.  Each method, including the current one, has had flaws of
+various sorts.  Here we lay out the different dependency tracking
+methods, their flaws, and their fixes.  We conclude with recommendations
+for tool writers, and by indicating future directions for dependency
+tracking work in Automake.
+
+* Menu:
+
+* First Take on Dependencies::    Precomputed dependency tracking
+* Dependencies As Side Effects::  Update at developer compile time
+* Dependencies for the User::     Update at user compile time
+* Techniques for Dependencies::   Alternative approaches
+
+
+File: automake-history.info,  Node: First Take on Dependencies,  Next: Dependencies As Side Effects,  Up: Dependency Tracking Evolution
+
+2.1 First Take on Dependency Tracking
+=====================================
+
+Description
+-----------
+
+Our first attempt at automatic dependency tracking was based on the
+method recommended by GNU 'make'.  (*note Generating Prerequisites
+Automatically: (make)Automatic Prerequisites.)
+
+   This version worked by precomputing dependencies ahead of time.  For
+each source file, it had a special '.P' file that held the dependencies.
+There was a rule to generate a '.P' file by invoking the compiler
+appropriately.  All such '.P' files were included by the 'Makefile',
+thus implicitly becoming dependencies of 'Makefile'.
+
+Bugs
+----
+
+This approach had several critical bugs.
+
+   * The code to generate the '.P' file relied on 'gcc'.  (A limitation,
+     not technically a bug.)
+   * The dependency tracking mechanism itself relied on GNU 'make'.  (A
+     limitation, not technically a bug.)
+   * Because each '.P' file was a dependency of 'Makefile', this meant
+     that dependency tracking was done eagerly by 'make'.  For instance,
+     'make clean' would cause all the dependency files to be updated,
+     and then immediately removed.  This eagerness also caused problems
+     with some configurations; if a certain source file could not be
+     compiled on a given architecture for some reason, dependency
+     tracking would fail, aborting the entire build.
+   * As dependency tracking was done as a pre-pass, compile times were
+     doubled-the compiler had to be run twice per source file.
+   * 'make dist' re-ran 'automake' to generate a 'Makefile' that did not
+     have automatic dependency tracking (and that was thus portable to
+     any version of 'make').  In order to do this portably, Automake had
+     to scan the dependency files and remove any reference that was to a
+     source file not in the distribution.  This process was error-prone.
+     Also, if 'make dist' was run in an environment where some object
+     file had a dependency on a source file that was only conditionally
+     created, Automake would generate a 'Makefile' that referred to a
+     file that might not appear in the end user's build.  A special,
+     hacky mechanism was required to work around this.
+
+Historical Note
+---------------
+
+The code generated by Automake is often inspired by the 'Makefile' style
+of a particular author.  In the case of the first implementation of
+dependency tracking, I believe the impetus and inspiration was Jim
+Meyering.  (I could be mistaken.  If you know otherwise feel free to
+correct me.)
+
+
+File: automake-history.info,  Node: Dependencies As Side Effects,  Next: Dependencies for the User,  Prev: First Take on Dependencies,  Up: Dependency Tracking Evolution
+
+2.2 Dependencies As Side Effects
+================================
+
+Description
+-----------
+
+The next refinement of Automake's automatic dependency tracking scheme
+was to implement dependencies as side effects of the compilation.  This
+was aimed at solving the most commonly reported problems with the first
+approach.  In particular we were most concerned with eliminating the
+weird rebuilding effect associated with make clean.
+
+   In this approach, the '.P' files were included using the '-include'
+command, which let us create these files lazily.  This avoided the 'make
+clean' problem.
+
+   We only computed dependencies when a file was actually compiled.
+This avoided the performance penalty associated with scanning each file
+twice.  It also let us avoid the other problems associated with the
+first, eager, implementation.  For instance, dependencies would never be
+generated for a source file that was not compilable on a given
+architecture (because it in fact would never be compiled).
+
+Bugs
+----
+
+   * This approach also relied on the existence of 'gcc' and GNU 'make'.
+     (A limitation, not technically a bug.)
+   * Dependency tracking was still done by the developer, so the
+     problems from the first implementation relating to massaging of
+     dependencies by 'make dist' were still in effect.
+   * This implementation suffered from the "deleted header file"
+     problem.  Suppose a lazily-created '.P' file includes a dependency
+     on a given header file, like this:
+
+          maude.o: maude.c something.h
+
+     Now suppose that you remove 'something.h' and update 'maude.c' so
+     that this include is no longer needed.  If you run 'make', you will
+     get an error because there is no way to create 'something.h'.
+
+     We fixed this problem in a later release by further massaging the
+     output of 'gcc' to include a dummy dependency for each header file.
+
+
+File: automake-history.info,  Node: Dependencies for the User,  Next: Techniques for Dependencies,  Prev: Dependencies As Side Effects,  Up: Dependency Tracking Evolution
+
+2.3 Dependencies for the User
+=============================
+
+Description
+-----------
+
+The bugs associated with 'make dist', over time, became a real problem.
+Packages using Automake were being built on a large number of platforms,
+and were becoming increasingly complex.  Broken dependencies were
+distributed in "portable" 'Makefile.in's, leading to user complaints.
+Also, the requirement for 'gcc' and GNU 'make' was a constant source of
+bug reports.  The next implementation of dependency tracking aimed to
+remove these problems.
+
+   We realized that the only truly reliable way to automatically track
+dependencies was to do it when the package itself was built.  This meant
+discovering a method portable to any version of make and any compiler.
+Also, we wanted to preserve what we saw as the best point of the second
+implementation: dependency computation as a side effect of compilation.
+
+   In the end we found that most modern make implementations support
+some form of include directive.  Also, we wrote a wrapper script that
+let us abstract away differences between dependency tracking methods for
+compilers.  For instance, some compilers cannot generate dependencies as
+a side effect of compilation.  In this case we simply have the script
+run the compiler twice.  Currently our wrapper script ('depcomp') knows
+about twelve different compilers (including a "compiler" that simply
+invokes 'makedepend' and then the real compiler, which is assumed to be
+a standard Unix-like C compiler with no way to do dependency tracking).
+
+Bugs
+----
+
+   * Running a wrapper script for each compilation slows down the build.
+   * Many users don't really care about precise dependencies.
+   * This implementation, like every other automatic dependency tracking
+     scheme in common use today (indeed, every one we've ever heard of),
+     suffers from the "duplicated new header" bug.
+
+     This bug occurs because dependency tracking tools, such as the
+     compiler, only generate dependencies on the successful opening of a
+     file, and not on every probe.
+
+     Suppose for instance that the compiler searches three directories
+     for a given header, and that the header is found in the third
+     directory.  If the programmer erroneously adds a header file with
+     the same name to the first directory, then a clean rebuild from
+     scratch could fail (suppose the new header file is buggy), whereas
+     an incremental rebuild will succeed.
+
+     What has happened here is that people have a misunderstanding of
+     what a dependency is.  Tool writers think a dependency encodes
+     information about which files were read by the compiler.  However,
+     a dependency must actually encode information about what the
+     compiler tried to do.
+
+     This problem is not serious in practice.  Programmers typically do
+     not use the same name for a header file twice in a given project.
+     (At least, not in C or C++.  This problem may be more troublesome
+     in Java.)  This problem is easy to fix, by modifying dependency
+     generators to record every probe, instead of every successful open.
+
+   * Since Automake generates dependencies as a side effect of
+     compilation, there is a bootstrapping problem when header files are
+     generated by running a program.  The problem is that, the first
+     time the build is done, there is no way by default to know that the
+     headers are required, so make might try to run a compilation for
+     which the headers have not yet been built.
+
+     This was also a problem in the previous dependency tracking
+     implementation.
+
+     The current fix is to use 'BUILT_SOURCES' to list built headers
+     (*note Sources: (automake)Sources.).  This causes them to be built
+     before any other build rules are run.  This is unsatisfactory as a
+     general solution, however in practice it seems sufficient for most
+     actual programs.
+
+   This code is used since Automake 1.5.
+
+   In GCC 3.0, we managed to convince the maintainers to add special
+command-line options to help Automake more efficiently do its job.  We
+hoped this would let us avoid the use of a wrapper script when
+Automake's automatic dependency tracking was used with 'gcc'.
+
+   Unfortunately, this code doesn't quite do what we want.  In
+particular, it removes the dependency file if the compilation fails;
+we'd prefer that it instead only touch the file in any way if the
+compilation succeeds.
+
+   Nevertheless, since Automake 1.7, when a recent 'gcc' is detected at
+'configure' time, we inline the dependency-generation code and do not
+use the 'depcomp' wrapper script.  This makes compilations faster for
+those using this compiler (probably our primary user base).  The
+counterpart is that because we have to encode two compilation rules in
+'Makefile' (with or without 'depcomp'), the produced 'Makefile's are
+larger.
+
+
+File: automake-history.info,  Node: Techniques for Dependencies,  Prev: Dependencies for the User,  Up: Dependency Tracking Evolution
+
+2.4 Techniques for Computing Dependencies
+=========================================
+
+There are actually several ways for a build tool like Automake to cause
+tools to generate dependencies.
+
+'makedepend'
+     This was a commonly-used method in the past.  The idea is to run a
+     special program over the source and have it generate dependency
+     information.  Traditional implementations of 'makedepend' are not
+     completely precise; ordinarily they were conservative and
+     discovered too many dependencies.
+The tool
+     An obvious way to generate dependencies is to simply write the tool
+     so that it can generate the information needed by the build tool.
+     This is also the most portable method.  Many compilers have an
+     option to generate dependencies.  Unfortunately, not all tools
+     provide such an option.
+The file system
+     It is possible to write a special file system that tracks opens,
+     reads, writes, etc, and then feed this information back to the
+     build tool.  'clearmake' does this.  This is a very powerful
+     technique, as it doesn't require cooperation from the tool.
+     Unfortunately it is also very difficult to implement and also not
+     practical in the general case.
+'LD_PRELOAD'
+     Rather than use the file system, one could write a special library
+     to intercept 'open' and other syscalls.  This technique is also
+     quite powerful, but unfortunately it is not portable enough for use
+     in 'automake'.
+
+* Menu:
+
+* Recommendations for Tool Writers::
+* Future Directions for Dependencies::
+
+
+File: automake-history.info,  Node: Recommendations for Tool Writers,  Next: Future Directions for Dependencies,  Up: Techniques for Dependencies
+
+2.4.1 Recommendations for Tool Writers
+--------------------------------------
+
+We think that every compilation tool ought to be able to generate
+dependencies as a side effect of compilation.  Furthermore, at least
+while 'make'-based tools are nearly universally in use (at least in the
+free software community), the tool itself should generate dummy
+dependencies for header files, to avoid the deleted header file bug.
+Finally, the tool should generate a dependency for each probe, instead
+of each successful file open, in order to avoid the duplicated new
+header bug.
+
+
+File: automake-history.info,  Node: Future Directions for Dependencies,  Prev: Recommendations for Tool Writers,  Up: Techniques for Dependencies
+
+2.4.2 Future Directions for Dependencies
+----------------------------------------
+
+Currently, only languages and compilers understood by Automake can have
+dependency tracking enabled.  We would like to see if it is practical
+(and worthwhile) to let this support be extended by the user to
+languages unknown to Automake.
+
+
+File: automake-history.info,  Node: Releases,  Next: Copying This Manual,  Prev: Dependency Tracking Evolution,  Up: Top
+
+3 Release Statistics
+********************
+
+The following table (inspired by 'perlhist(1)') quantifies the evolution
+of Automake using these metrics:
+
+Date, Rel
+     The date and version of the release.
+am
+     The number of lines of the 'automake' script.
+acl
+     The number of lines of the 'aclocal' script.
+pm
+     The number of lines of the 'Perl' supporting modules.
+'*.am'
+     The number of lines of the 'Makefile' fragments.  The number in
+     parentheses is the number of files.
+m4
+     The number of lines (and files) of Autoconf macros.
+doc
+     The number of pages of the documentation (the Postscript version).
+t
+     The number of test cases in the test suite.  Of those, the number
+     in parentheses is the number of generated test cases.
+
+Date         Rel      am     acl    pm     '*.am'      m4          doc   t
+------------------------------------------------------------------------------------
+1994-09-19   CVS      141                  299 (24)
+1994-11-05   CVS      208                  332 (28)
+1995-11-23   0.20     533                  458 (35)                9
+1995-11-26   0.21     613                  480 (36)                11
+1995-11-28   0.22     1116                 539 (38)                12
+1995-11-29   0.23     1240                 541 (38)                12
+1995-12-08   0.24     1462                 504 (33)                14
+1995-12-10   0.25     1513                 511 (37)                15
+1996-01-03   0.26     1706                 438 (36)                16
+1996-01-03   0.27     1706                 438 (36)                16
+1996-01-13   0.28     1964                 934 (33)                16
+1996-02-07   0.29     2299                 936 (33)                17
+1996-02-24   0.30     2544                 919 (32)    85 (1)      20    9
+1996-03-11   0.31     2877                 919 (32)    85 (1)      29    17
+1996-04-27   0.32     3058                 921 (31)    85 (1)      30    26
+1996-05-18   0.33     3110                 926 (31)    105 (1)     30    35
+1996-05-28   1.0      3134                 973 (32)    105 (1)     30    38
+1997-06-22   1.2      6089   385           1294 (36)   592 (20)    37    126
+1998-04-05   1.3      6415   422           1470 (39)   741 (23)    39    156
+1999-01-14   1.4      7240   426           1591 (40)   734 (20)    51    197
+2001-05-08   1.4-p1   7251   426           1591 (40)   734 (20)    51    197
+2001-05-24   1.4-p2   7268   439           1591 (40)   734 (20)    49    197
+2001-06-07   1.4-p3   7312   439           1591 (40)   734 (20)    49    197
+2001-06-10   1.4-p4   7321   439           1591 (40)   734 (20)    49    198
+2001-07-15   1.4-p5   7228   426           1596 (40)   734 (20)    51    198
+2001-08-23   1.5      8016   475    600    2654 (39)   1166 (29)   63    327
+2002-03-05   1.6      8465   475    1136   2732 (39)   1603 (27)   66    365
+2002-04-11   1.6.1    8544   475    1136   2741 (39)   1603 (27)   66    372
+2002-06-14   1.6.2    8575   475    1136   2800 (39)   1609 (27)   67    386
+2002-07-28   1.6.3    8600   475    1153   2809 (39)   1609 (27)   67    391
+2002-07-28   1.4-p6   7332   455           1596 (40)   735 (20)    49    197
+2002-09-25   1.7      9189   471    1790   2965 (39)   1606 (28)   73    430
+2002-10-16   1.7.1    9229   475    1790   2977 (39)   1606 (28)   73    437
+2002-12-06   1.7.2    9334   475    1790   2988 (39)   1606 (28)   77    445
+2003-02-20   1.7.3    9389   475    1790   3023 (39)   1651 (29)   84    448
+2003-04-23   1.7.4    9429   475    1790   3031 (39)   1644 (29)   85    458
+2003-05-18   1.7.5    9429   475    1790   3033 (39)   1645 (29)   85    459
+2003-07-10   1.7.6    9442   475    1790   3033 (39)   1660 (29)   85    461
+2003-09-07   1.7.7    9443   475    1790   3041 (39)   1660 (29)   90    467
+2003-10-07   1.7.8    9444   475    1790   3041 (39)   1660 (29)   90    468
+2003-11-09   1.7.9    9444   475    1790   3048 (39)   1660 (29)   90    468
+2003-12-10   1.8      7171   585    7730   3236 (39)   1666 (31)   104   521
+2004-01-11   1.8.1    7217   663    7726   3287 (39)   1686 (31)   104   525
+2004-01-12   1.8.2    7217   663    7726   3288 (39)   1686 (31)   104   526
+2004-03-07   1.8.3    7214   686    7735   3303 (39)   1695 (31)   111   530
+2004-04-25   1.8.4    7214   686    7736   3310 (39)   1701 (31)   112   531
+2004-05-16   1.8.5    7240   686    7736   3299 (39)   1701 (31)   112   533
+2004-07-28   1.9      7508   715    7794   3352 (40)   1812 (32)   115   551
+2004-08-11   1.9.1    7512   715    7794   3354 (40)   1812 (32)   115   552
+2004-09-19   1.9.2    7512   715    7794   3354 (40)   1812 (32)   132   554
+2004-11-01   1.9.3    7507   718    7804   3354 (40)   1812 (32)   134   556
+2004-12-18   1.9.4    7508   718    7856   3361 (40)   1811 (32)   140   560
+2005-02-13   1.9.5    7523   719    7859   3373 (40)   1453 (32)   142   562
+2005-07-10   1.9.6    7539   699    7867   3400 (40)   1453 (32)   144   570
+2006-10-15   1.10     7859   1072   8024   3512 (40)   1496 (34)   172   604
+2008-01-19   1.10.1   7870   1089   8025   3520 (40)   1499 (34)   173   617
+2008-11-23   1.10.2   7882   1089   8027   3540 (40)   1509 (34)   176   628
+2009-05-17   1.11     8721   1092   8289   4164 (42)   1714 (37)   181   732 (20)
+
+
+File: automake-history.info,  Node: Copying This Manual,  Prev: Releases,  Up: Top
+
+Appendix A Copying This Manual
+******************************
+
+* Menu:
+
+* GNU Free Documentation License::  License for copying this manual
+
+
+File: automake-history.info,  Node: GNU Free Documentation License,  Up: Copying This Manual
+
+A.1 GNU Free Documentation License
+==================================
+
+                     Version 1.3, 3 November 2008
+
+     Copyright (C) 2000-2013 Free Software Foundation, Inc.
+     <http://fsf.org/>
+
+     Everyone is permitted to copy and distribute verbatim copies
+     of this license document, but changing it is not allowed.
+
+  0. PREAMBLE
+
+     The purpose of this License is to make a manual, textbook, or other
+     functional and useful document "free" in the sense of freedom: to
+     assure everyone the effective freedom to copy and redistribute it,
+     with or without modifying it, either commercially or
+     noncommercially.  Secondarily, this License preserves for the
+     author and publisher a way to get credit for their work, while not
+     being considered responsible for modifications made by others.
+
+     This License is a kind of "copyleft", which means that derivative
+     works of the document must themselves be free in the same sense.
+     It complements the GNU General Public License, which is a copyleft
+     license designed for free software.
+
+     We have designed this License in order to use it for manuals for
+     free software, because free software needs free documentation: a
+     free program should come with manuals providing the same freedoms
+     that the software does.  But this License is not limited to
+     software manuals; it can be used for any textual work, regardless
+     of subject matter or whether it is published as a printed book.  We
+     recommend this License principally for works whose purpose is
+     instruction or reference.
+
+  1. APPLICABILITY AND DEFINITIONS
+
+     This License applies to any manual or other work, in any medium,
+     that contains a notice placed by the copyright holder saying it can
+     be distributed under the terms of this License.  Such a notice
+     grants a world-wide, royalty-free license, unlimited in duration,
+     to use that work under the conditions stated herein.  The
+     "Document", below, refers to any such manual or work.  Any member
+     of the public is a licensee, and is addressed as "you".  You accept
+     the license if you copy, modify or distribute the work in a way
+     requiring permission under copyright law.
+
+     A "Modified Version" of the Document means any work containing the
+     Document or a portion of it, either copied verbatim, or with
+     modifications and/or translated into another language.
+
+     A "Secondary Section" is a named appendix or a front-matter section
+     of the Document that deals exclusively with the relationship of the
+     publishers or authors of the Document to the Document's overall
+     subject (or to related matters) and contains nothing that could
+     fall directly within that overall subject.  (Thus, if the Document
+     is in part a textbook of mathematics, a Secondary Section may not
+     explain any mathematics.)  The relationship could be a matter of
+     historical connection with the subject or with related matters, or
+     of legal, commercial, philosophical, ethical or political position
+     regarding them.
+
+     The "Invariant Sections" are certain Secondary Sections whose
+     titles are designated, as being those of Invariant Sections, in the
+     notice that says that the Document is released under this License.
+     If a section does not fit the above definition of Secondary then it
+     is not allowed to be designated as Invariant.  The Document may
+     contain zero Invariant Sections.  If the Document does not identify
+     any Invariant Sections then there are none.
+
+     The "Cover Texts" are certain short passages of text that are
+     listed, as Front-Cover Texts or Back-Cover Texts, in the notice
+     that says that the Document is released under this License.  A
+     Front-Cover Text may be at most 5 words, and a Back-Cover Text may
+     be at most 25 words.
+
+     A "Transparent" copy of the Document means a machine-readable copy,
+     represented in a format whose specification is available to the
+     general public, that is suitable for revising the document
+     straightforwardly with generic text editors or (for images composed
+     of pixels) generic paint programs or (for drawings) some widely
+     available drawing editor, and that is suitable for input to text
+     formatters or for automatic translation to a variety of formats
+     suitable for input to text formatters.  A copy made in an otherwise
+     Transparent file format whose markup, or absence of markup, has
+     been arranged to thwart or discourage subsequent modification by
+     readers is not Transparent.  An image format is not Transparent if
+     used for any substantial amount of text.  A copy that is not
+     "Transparent" is called "Opaque".
+
+     Examples of suitable formats for Transparent copies include plain
+     ASCII without markup, Texinfo input format, LaTeX input format,
+     SGML or XML using a publicly available DTD, and standard-conforming
+     simple HTML, PostScript or PDF designed for human modification.
+     Examples of transparent image formats include PNG, XCF and JPG.
+     Opaque formats include proprietary formats that can be read and
+     edited only by proprietary word processors, SGML or XML for which
+     the DTD and/or processing tools are not generally available, and
+     the machine-generated HTML, PostScript or PDF produced by some word
+     processors for output purposes only.
+
+     The "Title Page" means, for a printed book, the title page itself,
+     plus such following pages as are needed to hold, legibly, the
+     material this License requires to appear in the title page.  For
+     works in formats which do not have any title page as such, "Title
+     Page" means the text near the most prominent appearance of the
+     work's title, preceding the beginning of the body of the text.
+
+     The "publisher" means any person or entity that distributes copies
+     of the Document to the public.
+
+     A section "Entitled XYZ" means a named subunit of the Document
+     whose title either is precisely XYZ or contains XYZ in parentheses
+     following text that translates XYZ in another language.  (Here XYZ
+     stands for a specific section name mentioned below, such as
+     "Acknowledgements", "Dedications", "Endorsements", or "History".)
+     To "Preserve the Title" of such a section when you modify the
+     Document means that it remains a section "Entitled XYZ" according
+     to this definition.
+
+     The Document may include Warranty Disclaimers next to the notice
+     which states that this License applies to the Document.  These
+     Warranty Disclaimers are considered to be included by reference in
+     this License, but only as regards disclaiming warranties: any other
+     implication that these Warranty Disclaimers may have is void and
+     has no effect on the meaning of this License.
+
+  2. VERBATIM COPYING
+
+     You may copy and distribute the Document in any medium, either
+     commercially or noncommercially, provided that this License, the
+     copyright notices, and the license notice saying this License
+     applies to the Document are reproduced in all copies, and that you
+     add no other conditions whatsoever to those of this License.  You
+     may not use technical measures to obstruct or control the reading
+     or further copying of the copies you make or distribute.  However,
+     you may accept compensation in exchange for copies.  If you
+     distribute a large enough number of copies you must also follow the
+     conditions in section 3.
+
+     You may also lend copies, under the same conditions stated above,
+     and you may publicly display copies.
+
+  3. COPYING IN QUANTITY
+
+     If you publish printed copies (or copies in media that commonly
+     have printed covers) of the Document, numbering more than 100, and
+     the Document's license notice requires Cover Texts, you must
+     enclose the copies in covers that carry, clearly and legibly, all
+     of these Cover Texts: Front-Cover Texts on the front cover, and
+     Back-Cover Texts on the back cover.  Both covers must also clearly
+     and legibly identify you as the publisher of these copies.  The
+     front cover must present the full title with all words of the title
+     equally prominent and visible.  You may add other material on the
+     covers in addition.  Copying with changes limited to the covers, as
+     long as they preserve the title of the Document and satisfy these
+     conditions, can be treated as verbatim copying in other respects.
+
+     If the required texts for either cover are too voluminous to fit
+     legibly, you should put the first ones listed (as many as fit
+     reasonably) on the actual cover, and continue the rest onto
+     adjacent pages.
+
+     If you publish or distribute Opaque copies of the Document
+     numbering more than 100, you must either include a machine-readable
+     Transparent copy along with each Opaque copy, or state in or with
+     each Opaque copy a computer-network location from which the general
+     network-using public has access to download using public-standard
+     network protocols a complete Transparent copy of the Document, free
+     of added material.  If you use the latter option, you must take
+     reasonably prudent steps, when you begin distribution of Opaque
+     copies in quantity, to ensure that this Transparent copy will
+     remain thus accessible at the stated location until at least one
+     year after the last time you distribute an Opaque copy (directly or
+     through your agents or retailers) of that edition to the public.
+
+     It is requested, but not required, that you contact the authors of
+     the Document well before redistributing any large number of copies,
+     to give them a chance to provide you with an updated version of the
+     Document.
+
+  4. MODIFICATIONS
+
+     You may copy and distribute a Modified Version of the Document
+     under the conditions of sections 2 and 3 above, provided that you
+     release the Modified Version under precisely this License, with the
+     Modified Version filling the role of the Document, thus licensing
+     distribution and modification of the Modified Version to whoever
+     possesses a copy of it.  In addition, you must do these things in
+     the Modified Version:
+
+       A. Use in the Title Page (and on the covers, if any) a title
+          distinct from that of the Document, and from those of previous
+          versions (which should, if there were any, be listed in the
+          History section of the Document).  You may use the same title
+          as a previous version if the original publisher of that
+          version gives permission.
+
+       B. List on the Title Page, as authors, one or more persons or
+          entities responsible for authorship of the modifications in
+          the Modified Version, together with at least five of the
+          principal authors of the Document (all of its principal
+          authors, if it has fewer than five), unless they release you
+          from this requirement.
+
+       C. State on the Title page the name of the publisher of the
+          Modified Version, as the publisher.
+
+       D. Preserve all the copyright notices of the Document.
+
+       E. Add an appropriate copyright notice for your modifications
+          adjacent to the other copyright notices.
+
+       F. Include, immediately after the copyright notices, a license
+          notice giving the public permission to use the Modified
+          Version under the terms of this License, in the form shown in
+          the Addendum below.
+
+       G. Preserve in that license notice the full lists of Invariant
+          Sections and required Cover Texts given in the Document's
+          license notice.
+
+       H. Include an unaltered copy of this License.
+
+       I. Preserve the section Entitled "History", Preserve its Title,
+          and add to it an item stating at least the title, year, new
+          authors, and publisher of the Modified Version as given on the
+          Title Page.  If there is no section Entitled "History" in the
+          Document, create one stating the title, year, authors, and
+          publisher of the Document as given on its Title Page, then add
+          an item describing the Modified Version as stated in the
+          previous sentence.
+
+       J. Preserve the network location, if any, given in the Document
+          for public access to a Transparent copy of the Document, and
+          likewise the network locations given in the Document for
+          previous versions it was based on.  These may be placed in the
+          "History" section.  You may omit a network location for a work
+          that was published at least four years before the Document
+          itself, or if the original publisher of the version it refers
+          to gives permission.
+
+       K. For any section Entitled "Acknowledgements" or "Dedications",
+          Preserve the Title of the section, and preserve in the section
+          all the substance and tone of each of the contributor
+          acknowledgements and/or dedications given therein.
+
+       L. Preserve all the Invariant Sections of the Document, unaltered
+          in their text and in their titles.  Section numbers or the
+          equivalent are not considered part of the section titles.
+
+       M. Delete any section Entitled "Endorsements".  Such a section
+          may not be included in the Modified Version.
+
+       N. Do not retitle any existing section to be Entitled
+          "Endorsements" or to conflict in title with any Invariant
+          Section.
+
+       O. Preserve any Warranty Disclaimers.
+
+     If the Modified Version includes new front-matter sections or
+     appendices that qualify as Secondary Sections and contain no
+     material copied from the Document, you may at your option designate
+     some or all of these sections as invariant.  To do this, add their
+     titles to the list of Invariant Sections in the Modified Version's
+     license notice.  These titles must be distinct from any other
+     section titles.
+
+     You may add a section Entitled "Endorsements", provided it contains
+     nothing but endorsements of your Modified Version by various
+     parties--for example, statements of peer review or that the text
+     has been approved by an organization as the authoritative
+     definition of a standard.
+
+     You may add a passage of up to five words as a Front-Cover Text,
+     and a passage of up to 25 words as a Back-Cover Text, to the end of
+     the list of Cover Texts in the Modified Version.  Only one passage
+     of Front-Cover Text and one of Back-Cover Text may be added by (or
+     through arrangements made by) any one entity.  If the Document
+     already includes a cover text for the same cover, previously added
+     by you or by arrangement made by the same entity you are acting on
+     behalf of, you may not add another; but you may replace the old
+     one, on explicit permission from the previous publisher that added
+     the old one.
+
+     The author(s) and publisher(s) of the Document do not by this
+     License give permission to use their names for publicity for or to
+     assert or imply endorsement of any Modified Version.
+
+  5. COMBINING DOCUMENTS
+
+     You may combine the Document with other documents released under
+     this License, under the terms defined in section 4 above for
+     modified versions, provided that you include in the combination all
+     of the Invariant Sections of all of the original documents,
+     unmodified, and list them all as Invariant Sections of your
+     combined work in its license notice, and that you preserve all
+     their Warranty Disclaimers.
+
+     The combined work need only contain one copy of this License, and
+     multiple identical Invariant Sections may be replaced with a single
+     copy.  If there are multiple Invariant Sections with the same name
+     but different contents, make the title of each such section unique
+     by adding at the end of it, in parentheses, the name of the
+     original author or publisher of that section if known, or else a
+     unique number.  Make the same adjustment to the section titles in
+     the list of Invariant Sections in the license notice of the
+     combined work.
+
+     In the combination, you must combine any sections Entitled
+     "History" in the various original documents, forming one section
+     Entitled "History"; likewise combine any sections Entitled
+     "Acknowledgements", and any sections Entitled "Dedications".  You
+     must delete all sections Entitled "Endorsements."
+
+  6. COLLECTIONS OF DOCUMENTS
+
+     You may make a collection consisting of the Document and other
+     documents released under this License, and replace the individual
+     copies of this License in the various documents with a single copy
+     that is included in the collection, provided that you follow the
+     rules of this License for verbatim copying of each of the documents
+     in all other respects.
+
+     You may extract a single document from such a collection, and
+     distribute it individually under this License, provided you insert
+     a copy of this License into the extracted document, and follow this
+     License in all other respects regarding verbatim copying of that
+     document.
+
+  7. AGGREGATION WITH INDEPENDENT WORKS
+
+     A compilation of the Document or its derivatives with other
+     separate and independent documents or works, in or on a volume of a
+     storage or distribution medium, is called an "aggregate" if the
+     copyright resulting from the compilation is not used to limit the
+     legal rights of the compilation's users beyond what the individual
+     works permit.  When the Document is included in an aggregate, this
+     License does not apply to the other works in the aggregate which
+     are not themselves derivative works of the Document.
+
+     If the Cover Text requirement of section 3 is applicable to these
+     copies of the Document, then if the Document is less than one half
+     of the entire aggregate, the Document's Cover Texts may be placed
+     on covers that bracket the Document within the aggregate, or the
+     electronic equivalent of covers if the Document is in electronic
+     form.  Otherwise they must appear on printed covers that bracket
+     the whole aggregate.
+
+  8. TRANSLATION
+
+     Translation is considered a kind of modification, so you may
+     distribute translations of the Document under the terms of section
+     4.  Replacing Invariant Sections with translations requires special
+     permission from their copyright holders, but you may include
+     translations of some or all Invariant Sections in addition to the
+     original versions of these Invariant Sections.  You may include a
+     translation of this License, and all the license notices in the
+     Document, and any Warranty Disclaimers, provided that you also
+     include the original English version of this License and the
+     original versions of those notices and disclaimers.  In case of a
+     disagreement between the translation and the original version of
+     this License or a notice or disclaimer, the original version will
+     prevail.
+
+     If a section in the Document is Entitled "Acknowledgements",
+     "Dedications", or "History", the requirement (section 4) to
+     Preserve its Title (section 1) will typically require changing the
+     actual title.
+
+  9. TERMINATION
+
+     You may not copy, modify, sublicense, or distribute the Document
+     except as expressly provided under this License.  Any attempt
+     otherwise to copy, modify, sublicense, or distribute it is void,
+     and will automatically terminate your rights under this License.
+
+     However, if you cease all violation of this License, then your
+     license from a particular copyright holder is reinstated (a)
+     provisionally, unless and until the copyright holder explicitly and
+     finally terminates your license, and (b) permanently, if the
+     copyright holder fails to notify you of the violation by some
+     reasonable means prior to 60 days after the cessation.
+
+     Moreover, your license from a particular copyright holder is
+     reinstated permanently if the copyright holder notifies you of the
+     violation by some reasonable means, this is the first time you have
+     received notice of violation of this License (for any work) from
+     that copyright holder, and you cure the violation prior to 30 days
+     after your receipt of the notice.
+
+     Termination of your rights under this section does not terminate
+     the licenses of parties who have received copies or rights from you
+     under this License.  If your rights have been terminated and not
+     permanently reinstated, receipt of a copy of some or all of the
+     same material does not give you any rights to use it.
+
+  10. FUTURE REVISIONS OF THIS LICENSE
+
+     The Free Software Foundation may publish new, revised versions of
+     the GNU Free Documentation License from time to time.  Such new
+     versions will be similar in spirit to the present version, but may
+     differ in detail to address new problems or concerns.  See
+     <http://www.gnu.org/copyleft/>.
+
+     Each version of the License is given a distinguishing version
+     number.  If the Document specifies that a particular numbered
+     version of this License "or any later version" applies to it, you
+     have the option of following the terms and conditions either of
+     that specified version or of any later version that has been
+     published (not as a draft) by the Free Software Foundation.  If the
+     Document does not specify a version number of this License, you may
+     choose any version ever published (not as a draft) by the Free
+     Software Foundation.  If the Document specifies that a proxy can
+     decide which future versions of this License can be used, that
+     proxy's public statement of acceptance of a version permanently
+     authorizes you to choose that version for the Document.
+
+  11. RELICENSING
+
+     "Massive Multiauthor Collaboration Site" (or "MMC Site") means any
+     World Wide Web server that publishes copyrightable works and also
+     provides prominent facilities for anybody to edit those works.  A
+     public wiki that anybody can edit is an example of such a server.
+     A "Massive Multiauthor Collaboration" (or "MMC") contained in the
+     site means any set of copyrightable works thus published on the MMC
+     site.
+
+     "CC-BY-SA" means the Creative Commons Attribution-Share Alike 3.0
+     license published by Creative Commons Corporation, a not-for-profit
+     corporation with a principal place of business in San Francisco,
+     California, as well as future copyleft versions of that license
+     published by that same organization.
+
+     "Incorporate" means to publish or republish a Document, in whole or
+     in part, as part of another Document.
+
+     An MMC is "eligible for relicensing" if it is licensed under this
+     License, and if all works that were first published under this
+     License somewhere other than this MMC, and subsequently
+     incorporated in whole or in part into the MMC, (1) had no cover
+     texts or invariant sections, and (2) were thus incorporated prior
+     to November 1, 2008.
+
+     The operator of an MMC Site may republish an MMC contained in the
+     site under CC-BY-SA on the same site at any time before August 1,
+     2009, provided the MMC is eligible for relicensing.
+
+ADDENDUM: How to use this License for your documents
+====================================================
+
+To use this License in a document you have written, include a copy of
+the License in the document and put the following copyright and license
+notices just after the title page:
+
+       Copyright (C)  YEAR  YOUR NAME.
+       Permission is granted to copy, distribute and/or modify this document
+       under the terms of the GNU Free Documentation License, Version 1.3
+       or any later version published by the Free Software Foundation;
+       with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
+       Texts.  A copy of the license is included in the section entitled ``GNU
+       Free Documentation License''.
+
+   If you have Invariant Sections, Front-Cover Texts and Back-Cover
+Texts, replace the "with...Texts."  line with this:
+
+         with the Invariant Sections being LIST THEIR TITLES, with
+         the Front-Cover Texts being LIST, and with the Back-Cover Texts
+         being LIST.
+
+   If you have Invariant Sections without Cover Texts, or some other
+combination of the three, merge those two alternatives to suit the
+situation.
+
+   If your document contains nontrivial examples of program code, we
+recommend releasing these examples in parallel under your choice of free
+software license, such as the GNU General Public License, to permit
+their use in free software.
+
+
+
+Tag Table:
+Node: Top702
+Node: Timeline2236
+Node: Dependency Tracking Evolution33697
+Node: First Take on Dependencies34539
+Node: Dependencies As Side Effects37197
+Node: Dependencies for the User39256
+Node: Techniques for Dependencies44286
+Node: Recommendations for Tool Writers45984
+Node: Future Directions for Dependencies46703
+Node: Releases47173
+Node: Copying This Manual52560
+Node: GNU Free Documentation License52787
+
+End Tag Table
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info
new file mode 100644
index 0000000..042fe6e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info
@@ -0,0 +1,241 @@
+This is automake.info, produced by makeinfo version 5.2 from
+automake.texi.
+
+This manual is for GNU Automake (version 1.14.1, 6 November 2013), a
+program that creates GNU standards-compliant Makefiles from template
+files.
+
+   Copyright (C) 1995-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with no Front-Cover texts,
+     and with no Back-Cover Texts.  A copy of the license is included in
+     the section entitled "GNU Free Documentation License."
+
+INFO-DIR-SECTION Software development
+START-INFO-DIR-ENTRY
+* Automake: (automake).         Making GNU standards-compliant Makefiles.
+END-INFO-DIR-ENTRY
+
+INFO-DIR-SECTION Individual utilities
+START-INFO-DIR-ENTRY
+* aclocal-invocation: (automake)aclocal Invocation.   Generating aclocal.m4.
+* automake-invocation: (automake)automake Invocation. Generating Makefile.in.
+END-INFO-DIR-ENTRY
+
+
+Indirect:
+automake.info-1: 1079
+automake.info-2: 304798
+
+Tag Table:
+(Indirect)
+Node: Top1079
+Node: Introduction14332
+Ref: Introduction-Footnote-115701
+Node: Autotools Introduction15850
+Node: GNU Build System17201
+Node: Use Cases19865
+Node: Basic Installation21973
+Node: Standard Targets25439
+Node: Standard Directory Variables26924
+Node: Standard Configuration Variables28665
+Node: config.site29976
+Node: VPATH Builds31358
+Node: Two-Part Install35242
+Node: Cross-Compilation37646
+Node: Renaming40537
+Node: DESTDIR41653
+Node: Preparing Distributions43765
+Node: Dependency Tracking45715
+Node: Nested Packages47791
+Node: Why Autotools49281
+Node: Hello World50901
+Ref: amhello Explained51311
+Node: Creating amhello51479
+Node: amhello's configure.ac Setup Explained56660
+Node: amhello's Makefile.am Setup Explained61316
+Node: Generalities64810
+Node: General Operation65504
+Node: Strictness68812
+Node: Uniform70403
+Node: Length Limitations75136
+Node: Canonicalization77412
+Node: User Variables78456
+Node: Auxiliary Programs79901
+Node: Examples83487
+Node: Complete84353
+Node: true86340
+Node: automake Invocation88720
+Ref: Invoking automake88871
+Node: configure96002
+Node: Requirements96897
+Node: Optional101871
+Node: aclocal Invocation110723
+Ref: Invoking aclocal110884
+Node: aclocal Options113795
+Node: Macro Search Path117144
+Ref: ACLOCAL_PATH121259
+Node: Extending aclocal122762
+Node: Local Macros126364
+Node: Serials130202
+Node: Future of aclocal135103
+Node: Macros137376
+Node: Public Macros137905
+Ref: Modernize AM_INIT_AUTOMAKE invocation139284
+Node: Obsolete Macros143527
+Node: Private Macros144809
+Node: Directories146211
+Node: Subdirectories147785
+Node: Conditional Subdirectories151048
+Node: SUBDIRS vs DIST_SUBDIRS152644
+Node: Subdirectories with AM_CONDITIONAL154190
+Node: Subdirectories with AC_SUBST155304
+Node: Unconfigured Subdirectories156103
+Node: Alternative159400
+Ref: Alternative-Footnote-1161540
+Node: Subpackages161665
+Node: Programs164862
+Node: A Program166402
+Node: Program Sources167125
+Node: Linking168936
+Node: Conditional Sources172391
+Node: Conditional Programs175207
+Node: A Library177023
+Node: A Shared Library179572
+Node: Libtool Concept180574
+Node: Libtool Libraries182630
+Node: Conditional Libtool Libraries184304
+Node: Conditional Libtool Sources186667
+Node: Libtool Convenience Libraries188006
+Node: Libtool Modules191331
+Node: Libtool Flags192599
+Node: LTLIBOBJS194395
+Node: Libtool Issues194986
+Node: Error required file ltmain.sh not found195325
+Node: Objects created both with libtool and without196489
+Node: Program and Library Variables198325
+Ref: Program and Library Variables-Footnote-1209143
+Node: Default _SOURCES209218
+Node: LIBOBJS211574
+Node: Program Variables216545
+Node: Yacc and Lex219935
+Ref: Yacc and Lex-Footnote-1225269
+Node: C++ Support225498
+Node: Objective C Support226340
+Node: Objective C++ Support227275
+Node: Unified Parallel C Support228259
+Node: Assembly Support229217
+Node: Fortran 77 Support230297
+Ref: Fortran 77 Support-Footnote-1231930
+Node: Preprocessing Fortran 77232133
+Node: Compiling Fortran 77 Files232709
+Node: Mixing Fortran 77 With C and C++233281
+Ref: Mixing Fortran 77 With C and C++-Footnote-1235518
+Node: How the Linker is Chosen235821
+Node: Fortran 9x Support237297
+Node: Compiling Fortran 9x Files238305
+Node: Java Support with gcj238889
+Node: Vala Support240274
+Node: Support for Other Languages242305
+Node: Dependencies243013
+Node: EXEEXT244842
+Node: Other Objects247002
+Node: Scripts247594
+Node: Headers250333
+Node: Data252056
+Node: Sources252713
+Node: Built Sources Example255484
+Node: Other GNU Tools262394
+Node: Emacs Lisp262919
+Node: gettext264931
+Node: Libtool265583
+Node: Java265838
+Node: Python268255
+Node: Documentation273108
+Node: Texinfo273412
+Node: Man Pages280102
+Node: Install283107
+Node: Basics of Installation283807
+Node: The Two Parts of Install285297
+Node: Extending Installation286716
+Node: Staged Installs287460
+Node: Install Rules for the User288809
+Node: Clean289335
+Node: Dist291411
+Node: Basics of Distribution291903
+Node: Fine-grained Distribution Control294732
+Node: The dist Hook295639
+Node: Checking the Distribution298024
+Node: The Types of Distributions304798
+Node: Tests306916
+Node: Generalities about Testing308092
+Node: Simple Tests311096
+Node: Scripts-based Testsuites311477
+Ref: Testsuite progress on console313818
+Ref: Simple tests and color-tests314893
+Node: Serial Test Harness318773
+Node: Parallel Test Harness320837
+Ref: Basics of test metadata321335
+Node: Custom Test Drivers329741
+Node: Overview of Custom Test Drivers Support330032
+Node: Declaring Custom Test Drivers332968
+Node: API for Custom Test Drivers334334
+Node: Command-line arguments for test drivers335111
+Node: Log files generation and test results recording337751
+Node: Testsuite progress output341805
+Node: Using the TAP test protocol343217
+Node: Introduction to TAP343579
+Node: Use TAP with the Automake test harness345368
+Node: Incompatibilities with other TAP parsers and drivers350768
+Node: Links and external resources on TAP352115
+Node: DejaGnu Tests353693
+Node: Install Tests355718
+Node: Rebuilding356020
+Node: Options359505
+Node: Options generalities359804
+Node: List of Automake options361522
+Ref: tar-formats367547
+Node: Miscellaneous370794
+Node: Tags371137
+Node: Suffixes374073
+Node: Include375645
+Node: Conditionals377284
+Node: Usage of Conditionals378115
+Node: Limits of Conditionals381343
+Node: Silencing Make382524
+Node: Make verbosity382871
+Ref: Make verbosity-Footnote-1384171
+Node: Tricks For Silencing Make384245
+Node: Automake Silent Rules386676
+Node: Gnits393489
+Node: Not Enough395798
+Node: Extending396235
+Node: Third-Party Makefiles400966
+Node: Distributing407556
+Node: API Versioning408193
+Node: Upgrading410832
+Node: FAQ412787
+Node: CVS413907
+Node: maintainer-mode422062
+Node: Wildcards426054
+Node: Limitations on File Names429329
+Node: Errors with distclean431905
+Node: Flag Variables Ordering436653
+Node: Renamed Objects444024
+Node: Per-Object Flags445567
+Node: Multiple Outputs448464
+Node: Hard-Coded Install Paths460057
+Node: Debugging Make Rules465071
+Ref: Debugging Make Rules-Footnote-1467142
+Node: Reporting Bugs467306
+Node: Copying This Manual469239
+Node: GNU Free Documentation License469469
+Node: Indices494569
+Node: Macro Index494858
+Node: Variable Index500497
+Node: General Index531373
+
+End Tag Table
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-1
new file mode 100644
index 0000000..8f3733a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-1
@@ -0,0 +1,7267 @@
+This is automake.info, produced by makeinfo version 5.2 from
+automake.texi.
+
+This manual is for GNU Automake (version 1.14.1, 6 November 2013), a
+program that creates GNU standards-compliant Makefiles from template
+files.
+
+   Copyright (C) 1995-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with no Front-Cover texts,
+     and with no Back-Cover Texts.  A copy of the license is included in
+     the section entitled "GNU Free Documentation License."
+
+INFO-DIR-SECTION Software development
+START-INFO-DIR-ENTRY
+* Automake: (automake).         Making GNU standards-compliant Makefiles.
+END-INFO-DIR-ENTRY
+
+INFO-DIR-SECTION Individual utilities
+START-INFO-DIR-ENTRY
+* aclocal-invocation: (automake)aclocal Invocation.   Generating aclocal.m4.
+* automake-invocation: (automake)automake Invocation. Generating Makefile.in.
+END-INFO-DIR-ENTRY
+
+
+File: automake.info,  Node: Top,  Next: Introduction,  Up: (dir)
+
+GNU Automake
+************
+
+This manual is for GNU Automake (version 1.14.1, 6 November 2013), a
+program that creates GNU standards-compliant Makefiles from template
+files.
+
+   Copyright (C) 1995-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with no Front-Cover texts,
+     and with no Back-Cover Texts.  A copy of the license is included in
+     the section entitled "GNU Free Documentation License."
+
+* Menu:
+
+* Introduction::                Automake's purpose
+* Autotools Introduction::      An Introduction to the Autotools
+* Generalities::                General ideas
+* Examples::                    Some example packages
+* automake Invocation::         Creating a Makefile.in
+* configure::                   Scanning configure.ac, using aclocal
+* Directories::                 Declaring subdirectories
+* Programs::                    Building programs and libraries
+* Other Objects::               Other derived objects
+* Other GNU Tools::             Other GNU Tools
+* Documentation::               Building documentation
+* Install::                     What gets installed
+* Clean::                       What gets cleaned
+* Dist::                        What goes in a distribution
+* Tests::                       Support for test suites
+* Rebuilding::                  Automatic rebuilding of Makefile
+* Options::                     Changing Automake's behavior
+* Miscellaneous::               Miscellaneous rules
+* Include::                     Including extra files in an Automake template
+* Conditionals::                Conditionals
+* Silencing Make::              Obtain less verbose output from 'make'
+* Gnits::                       The effect of '--gnu' and '--gnits'
+* Not Enough::                  When Automake is not Enough
+* Distributing::                Distributing the Makefile.in
+* API Versioning::              About compatibility between Automake versions
+* Upgrading::                   Upgrading to a Newer Automake Version
+* FAQ::                         Frequently Asked Questions
+* Copying This Manual::         How to make copies of this manual
+* Indices::                     Indices of variables, macros, and concepts
+
+ -- The Detailed Node Listing --
+
+An Introduction to the Autotools
+
+* GNU Build System::            Introducing the GNU Build System
+* Use Cases::                   Use Cases for the GNU Build System
+* Why Autotools::               How Autotools Help
+* Hello World::                 A Small Hello World Package
+
+Use Cases for the GNU Build System
+
+* Basic Installation::          Common installation procedure
+* Standard Targets::            A list of standard Makefile targets
+* Standard Directory Variables::  A list of standard directory variables
+* Standard Configuration Variables::  Using configuration variables
+* config.site::                 Using a config.site file
+* VPATH Builds::                Parallel build trees
+* Two-Part Install::            Installing data and programs separately
+* Cross-Compilation::           Building for other architectures
+* Renaming::                    Renaming programs at install time
+* DESTDIR::                     Building binary packages with DESTDIR
+* Preparing Distributions::     Rolling out tarballs
+* Dependency Tracking::         Automatic dependency tracking
+* Nested Packages::             The GNU Build Systems can be nested
+
+A Small Hello World
+
+* Creating amhello::            Create 'amhello-1.0.tar.gz' from scratch
+* amhello's configure.ac Setup Explained::
+* amhello's Makefile.am Setup Explained::
+
+General ideas
+
+* General Operation::           General operation of Automake
+* Strictness::                  Standards conformance checking
+* Uniform::                     The Uniform Naming Scheme
+* Length Limitations::          Staying below the command line length limit
+* Canonicalization::            How derived variables are named
+* User Variables::              Variables reserved for the user
+* Auxiliary Programs::          Programs automake might require
+
+Some example packages
+
+* Complete::                    A simple example, start to finish
+* true::                        Building true and false
+
+Scanning 'configure.ac', using 'aclocal'
+
+* Requirements::                Configuration requirements
+* Optional::                    Other things Automake recognizes
+* aclocal Invocation::          Auto-generating aclocal.m4
+* Macros::                      Autoconf macros supplied with Automake
+
+Auto-generating aclocal.m4
+
+* aclocal Options::             Options supported by aclocal
+* Macro Search Path::           How aclocal finds .m4 files
+* Extending aclocal::           Writing your own aclocal macros
+* Local Macros::                Organizing local macros
+* Serials::                     Serial lines in Autoconf macros
+* Future of aclocal::           aclocal's scheduled death
+
+Autoconf macros supplied with Automake
+
+* Public Macros::               Macros that you can use.
+* Private Macros::              Macros that you should not use.
+
+Directories
+
+* Subdirectories::              Building subdirectories recursively
+* Conditional Subdirectories::  Conditionally not building directories
+* Alternative::                 Subdirectories without recursion
+* Subpackages::                 Nesting packages
+
+Conditional Subdirectories
+
+* SUBDIRS vs DIST_SUBDIRS::     Two sets of directories
+* Subdirectories with AM_CONDITIONAL::  Specifying conditional subdirectories
+* Subdirectories with AC_SUBST::  Another way for conditional recursion
+* Unconfigured Subdirectories::  Not even creating a 'Makefile'
+
+Building Programs and Libraries
+
+* A Program::                   Building a program
+* A Library::                   Building a library
+* A Shared Library::            Building a Libtool library
+* Program and Library Variables::  Variables controlling program and
+                                library builds
+* Default _SOURCES::            Default source files
+* LIBOBJS::                     Special handling for LIBOBJS and ALLOCA
+* Program Variables::           Variables used when building a program
+* Yacc and Lex::                Yacc and Lex support
+* C++ Support::                 Compiling C++ sources
+* Objective C Support::         Compiling Objective C sources
+* Objective C++ Support::       Compiling Objective C++ sources
+* Unified Parallel C Support::  Compiling Unified Parallel C sources
+* Assembly Support::            Compiling assembly sources
+* Fortran 77 Support::          Compiling Fortran 77 sources
+* Fortran 9x Support::          Compiling Fortran 9x sources
+* Java Support with gcj::       Compiling Java sources using gcj
+* Vala Support::                Compiling Vala sources
+* Support for Other Languages::  Compiling other languages
+* Dependencies::                Automatic dependency tracking
+* EXEEXT::                      Support for executable extensions
+
+Building a program
+
+* Program Sources::             Defining program sources
+* Linking::                     Linking with libraries or extra objects
+* Conditional Sources::         Handling conditional sources
+* Conditional Programs::        Building a program conditionally
+
+Building a Shared Library
+
+* Libtool Concept::             Introducing Libtool
+* Libtool Libraries::           Declaring Libtool Libraries
+* Conditional Libtool Libraries::  Building Libtool Libraries Conditionally
+* Conditional Libtool Sources::  Choosing Library Sources Conditionally
+* Libtool Convenience Libraries::  Building Convenience Libtool Libraries
+* Libtool Modules::             Building Libtool Modules
+* Libtool Flags::               Using _LIBADD, _LDFLAGS, and _LIBTOOLFLAGS
+* LTLIBOBJS::                   Using $(LTLIBOBJS) and $(LTALLOCA)
+* Libtool Issues::              Common Issues Related to Libtool's Use
+
+Common Issues Related to Libtool's Use
+
+* Error required file ltmain.sh not found::  The need to run libtoolize
+* Objects created both with libtool and without::  Avoid a specific build race
+
+Fortran 77 Support
+
+* Preprocessing Fortran 77::    Preprocessing Fortran 77 sources
+* Compiling Fortran 77 Files::  Compiling Fortran 77 sources
+* Mixing Fortran 77 With C and C++::  Mixing Fortran 77 With C and C++
+
+Mixing Fortran 77 With C and C++
+
+* How the Linker is Chosen::    Automatic linker selection
+
+Fortran 9x Support
+
+* Compiling Fortran 9x Files::  Compiling Fortran 9x sources
+
+Other Derived Objects
+
+* Scripts::                     Executable scripts
+* Headers::                     Header files
+* Data::                        Architecture-independent data files
+* Sources::                     Derived sources
+
+Built Sources
+
+* Built Sources Example::       Several ways to handle built sources.
+
+Other GNU Tools
+
+* Emacs Lisp::                  Emacs Lisp
+* gettext::                     Gettext
+* Libtool::                     Libtool
+* Java::                        Java bytecode compilation (deprecated)
+* Python::                      Python
+
+Building documentation
+
+* Texinfo::                     Texinfo
+* Man Pages::                   Man pages
+
+What Gets Installed
+
+* Basics of Installation::      What gets installed where
+* The Two Parts of Install::    Installing data and programs separately
+* Extending Installation::      Adding your own rules for installation
+* Staged Installs::             Installation in a temporary location
+* Install Rules for the User::  Useful additional rules
+
+What Goes in a Distribution
+
+* Basics of Distribution::      Files distributed by default
+* Fine-grained Distribution Control::  'dist_' and 'nodist_' prefixes
+* The dist Hook::               A target for last-minute distribution changes
+* Checking the Distribution::   'make distcheck' explained
+* The Types of Distributions::  A variety of formats and compression methods
+
+Support for test suites
+
+* Generalities about Testing::  Generic concepts and terminology about testing
+* Simple Tests::                Listing test scripts in 'TESTS'
+* Custom Test Drivers::         Writing and using custom test drivers
+* Using the TAP test protocol:: Integrating test scripts that use the TAP protocol
+* DejaGnu Tests::               Interfacing with the 'dejagnu' testing framework
+* Install Tests::               Running tests on installed packages
+
+Simple Tests
+
+* Scripts-based Testsuites::    Automake-specific concepts and terminology
+* Serial Test Harness::         Older (and discouraged) serial test harness
+* Parallel Test Harness::       Generic concurrent test harness
+
+Using the TAP test protocol
+
+* Introduction to TAP::
+* Use TAP with the Automake test harness::
+* Incompatibilities with other TAP parsers and drivers::
+* Links and external resources on TAP::
+
+Custom Test Drivers
+
+* Overview of Custom Test Drivers Support::
+* Declaring Custom Test Drivers::
+* API for Custom Test Drivers::
+
+API for Custom Test Drivers
+
+* Command-line arguments for test drivers::
+* Log files generation and test results recording::
+* Testsuite progress output::
+
+Changing Automake's Behavior
+
+* Options generalities::        Semantics of Automake option
+* List of Automake options::    A comprehensive list of Automake options
+
+Miscellaneous Rules
+
+* Tags::                        Interfacing to cscope, etags and mkid
+* Suffixes::                    Handling new file extensions
+
+Conditionals
+
+* Usage of Conditionals::       Declaring conditional content
+* Limits of Conditionals::      Enclosing complete statements
+
+Silencing Make
+
+* Make verbosity::              Make is verbose by default
+* Tricks For Silencing Make::   Standard and generic ways to silence make
+* Automake Silent Rules::       How Automake can help in silencing make
+
+When Automake Isn't Enough
+
+* Extending::                   Adding new rules or overriding existing ones.
+* Third-Party Makefiles::       Integrating Non-Automake 'Makefile's.
+
+Frequently Asked Questions about Automake
+
+* CVS::                         CVS and generated files
+* maintainer-mode::             missing and AM_MAINTAINER_MODE
+* Wildcards::                   Why doesn't Automake support wildcards?
+* Limitations on File Names::   Limitations on source and installed file names
+* Errors with distclean::       Files left in build directory after distclean
+* Flag Variables Ordering::     CFLAGS vs. AM_CFLAGS vs. mumble_CFLAGS
+* Renamed Objects::             Why are object files sometimes renamed?
+* Per-Object Flags::            How to simulate per-object flags?
+* Multiple Outputs::            Writing rules for tools with many output files
+* Hard-Coded Install Paths::    Installing to hard-coded locations
+* Debugging Make Rules::        Strategies when things don't work as expected
+* Reporting Bugs::              Feedback on bugs and feature requests
+
+Copying This Manual
+
+* GNU Free Documentation License::  License for copying this manual
+
+Indices
+
+* Macro Index::                 Index of Autoconf macros
+* Variable Index::              Index of Makefile variables
+* General Index::               General index
+
+
+
+File: automake.info,  Node: Introduction,  Next: Autotools Introduction,  Prev: Top,  Up: Top
+
+1 Introduction
+**************
+
+Automake is a tool for automatically generating 'Makefile.in's from
+files called 'Makefile.am'.  Each 'Makefile.am' is basically a series of
+'make' variable definitions(1), with rules being thrown in occasionally.
+The generated 'Makefile.in's are compliant with the GNU Makefile
+standards.
+
+   The GNU Makefile Standards Document (*note (standards)Makefile
+Conventions::) is long, complicated, and subject to change.  The goal of
+Automake is to remove the burden of Makefile maintenance from the back
+of the individual GNU maintainer (and put it on the back of the Automake
+maintainers).
+
+   The typical Automake input file is simply a series of variable
+definitions.  Each such file is processed to create a 'Makefile.in'.
+
+   Automake does constrain a project in certain ways; for instance, it
+assumes that the project uses Autoconf (*note Introduction:
+(autoconf)Top.), and enforces certain restrictions on the 'configure.ac'
+contents.
+
+   Automake requires 'perl' in order to generate the 'Makefile.in's.
+However, the distributions created by Automake are fully GNU
+standards-compliant, and do not require 'perl' in order to be built.
+
+   For more information on bug reports, *Note Reporting Bugs::.
+
+   ---------- Footnotes ----------
+
+   (1) These variables are also called "make macros" in Make
+terminology, however in this manual we reserve the term "macro" for
+Autoconf's macros.
+
+
+File: automake.info,  Node: Autotools Introduction,  Next: Generalities,  Prev: Introduction,  Up: Top
+
+2 An Introduction to the Autotools
+**********************************
+
+If you are new to Automake, maybe you know that it is part of a set of
+tools called _The Autotools_.  Maybe you've already delved into a
+package full of files named 'configure', 'configure.ac', 'Makefile.in',
+'Makefile.am', 'aclocal.m4', ..., some of them claiming to be _generated
+by_ Autoconf or Automake.  But the exact purpose of these files and
+their relations is probably fuzzy.  The goal of this chapter is to
+introduce you to this machinery, to show you how it works and how
+powerful it is.  If you've never installed or seen such a package, do
+not worry: this chapter will walk you through it.
+
+   If you need some teaching material, more illustrations, or a less
+'automake'-centered continuation, some slides for this introduction are
+available in Alexandre Duret-Lutz's Autotools Tutorial
+(http://www.lrde.epita.fr/~adl/autotools.html).  This chapter is the
+written version of the first part of his tutorial.
+
+* Menu:
+
+* GNU Build System::            Introducing the GNU Build System
+* Use Cases::                   Use Cases for the GNU Build System
+* Why Autotools::               How Autotools Help
+* Hello World::                 A Small Hello World Package
+
+
+File: automake.info,  Node: GNU Build System,  Next: Use Cases,  Up: Autotools Introduction
+
+2.1 Introducing the GNU Build System
+====================================
+
+It is a truth universally acknowledged, that as a developer in
+possession of a new package, you must be in want of a build system.
+
+   In the Unix world, such a build system is traditionally achieved
+using the command 'make' (*note Overview: (make)Top.).  You express the
+recipe to build your package in a 'Makefile'.  This file is a set of
+rules to build the files in the package.  For instance the program
+'prog' may be built by running the linker on the files 'main.o',
+'foo.o', and 'bar.o'; the file 'main.o' may be built by running the
+compiler on 'main.c'; etc.  Each time 'make' is run, it reads
+'Makefile', checks the existence and modification time of the files
+mentioned, decides what files need to be built (or rebuilt), and runs
+the associated commands.
+
+   When a package needs to be built on a different platform than the one
+it was developed on, its 'Makefile' usually needs to be adjusted.  For
+instance the compiler may have another name or require more options.  In
+1991, David J. MacKenzie got tired of customizing 'Makefile' for the 20
+platforms he had to deal with.  Instead, he handcrafted a little shell
+script called 'configure' to automatically adjust the 'Makefile' (*note
+Genesis: (autoconf)Genesis.).  Compiling his package was now as simple
+as running './configure && make'.
+
+   Today this process has been standardized in the GNU project.  The GNU
+Coding Standards (*note The Release Process: (standards)Managing
+Releases.) explains how each package of the GNU project should have a
+'configure' script, and the minimal interface it should have.  The
+'Makefile' too should follow some established conventions.  The result?
+A unified build system that makes all packages almost indistinguishable
+by the installer.  In its simplest scenario, all the installer has to do
+is to unpack the package, run './configure && make && make install', and
+repeat with the next package to install.
+
+   We call this build system the "GNU Build System", since it was grown
+out of the GNU project.  However it is used by a vast number of other
+packages: following any existing convention has its advantages.
+
+   The Autotools are tools that will create a GNU Build System for your
+package.  Autoconf mostly focuses on 'configure' and Automake on
+'Makefile's.  It is entirely possible to create a GNU Build System
+without the help of these tools.  However it is rather burdensome and
+error-prone.  We will discuss this again after some illustration of the
+GNU Build System in action.
+
+
+File: automake.info,  Node: Use Cases,  Next: Why Autotools,  Prev: GNU Build System,  Up: Autotools Introduction
+
+2.2 Use Cases for the GNU Build System
+======================================
+
+In this section we explore several use cases for the GNU Build System.
+You can replay all of these examples on the 'amhello-1.0.tar.gz' package
+distributed with Automake.  If Automake is installed on your system, you
+should find a copy of this file in
+'PREFIX/share/doc/automake/amhello-1.0.tar.gz', where PREFIX is the
+installation prefix specified during configuration (PREFIX defaults to
+'/usr/local', however if Automake was installed by some GNU/Linux
+distribution it most likely has been set to '/usr').  If you do not have
+a copy of Automake installed, you can find a copy of this file inside
+the 'doc/' directory of the Automake package.
+
+   Some of the following use cases present features that are in fact
+extensions to the GNU Build System.  Read: they are not specified by the
+GNU Coding Standards, but they are nonetheless part of the build system
+created by the Autotools.  To keep things simple, we do not point out
+the difference.  Our objective is to show you many of the features that
+the build system created by the Autotools will offer to you.
+
+* Menu:
+
+* Basic Installation::          Common installation procedure
+* Standard Targets::            A list of standard Makefile targets
+* Standard Directory Variables::  A list of standard directory variables
+* Standard Configuration Variables::  Using configuration variables
+* config.site::                 Using a config.site file
+* VPATH Builds::                Parallel build trees
+* Two-Part Install::            Installing data and programs separately
+* Cross-Compilation::           Building for other architectures
+* Renaming::                    Renaming programs at install time
+* DESTDIR::                     Building binary packages with DESTDIR
+* Preparing Distributions::     Rolling out tarballs
+* Dependency Tracking::         Automatic dependency tracking
+* Nested Packages::             The GNU Build Systems can be nested
+
+
+File: automake.info,  Node: Basic Installation,  Next: Standard Targets,  Up: Use Cases
+
+2.2.1 Basic Installation
+------------------------
+
+The most common installation procedure looks as follows.
+
+     ~ % tar zxf amhello-1.0.tar.gz
+     ~ % cd amhello-1.0
+     ~/amhello-1.0 % ./configure
+     ...
+     config.status: creating Makefile
+     config.status: creating src/Makefile
+     ...
+     ~/amhello-1.0 % make
+     ...
+     ~/amhello-1.0 % make check
+     ...
+     ~/amhello-1.0 % su
+     Password:
+     /home/adl/amhello-1.0 # make install
+     ...
+     /home/adl/amhello-1.0 # exit
+     ~/amhello-1.0 % make installcheck
+     ...
+
+   The user first unpacks the package.  Here, and in the following
+examples, we will use the non-portable 'tar zxf' command for simplicity.
+On a system without GNU 'tar' installed, this command should read
+'gunzip -c amhello-1.0.tar.gz | tar xf -'.
+
+   The user then enters the newly created directory to run the
+'configure' script.  This script probes the system for various features,
+and finally creates the 'Makefile's.  In this toy example there are only
+two 'Makefile's, but in real-world projects, there may be many more,
+usually one 'Makefile' per directory.
+
+   It is now possible to run 'make'.  This will construct all the
+programs, libraries, and scripts that need to be constructed for the
+package.  In our example, this compiles the 'hello' program.  All files
+are constructed in place, in the source tree; we will see later how this
+can be changed.
+
+   'make check' causes the package's tests to be run.  This step is not
+mandatory, but it is often good to make sure the programs that have been
+built behave as they should, before you decide to install them.  Our
+example does not contain any tests, so running 'make check' is a no-op.
+
+   After everything has been built, and maybe tested, it is time to
+install it on the system.  That means copying the programs, libraries,
+header files, scripts, and other data files from the source directory to
+their final destination on the system.  The command 'make install' will
+do that.  However, by default everything will be installed in
+subdirectories of '/usr/local': binaries will go into '/usr/local/bin',
+libraries will end up in '/usr/local/lib', etc.  This destination is
+usually not writable by any user, so we assume that we have to become
+root before we can run 'make install'.  In our example, running 'make
+install' will copy the program 'hello' into '/usr/local/bin' and
+'README' into '/usr/local/share/doc/amhello'.
+
+   A last and optional step is to run 'make installcheck'.  This command
+may run tests on the installed files.  'make check' tests the files in
+the source tree, while 'make installcheck' tests their installed copies.
+The tests run by the latter can be different from those run by the
+former.  For instance, there are tests that cannot be run in the source
+tree.  Conversely, some packages are set up so that 'make installcheck'
+will run the very same tests as 'make check', only on different files
+(non-installed vs. installed).  It can make a difference, for instance
+when the source tree's layout is different from that of the
+installation.  Furthermore it may help to diagnose an incomplete
+installation.
+
+   Presently most packages do not have any 'installcheck' tests because
+the existence of 'installcheck' is little known, and its usefulness is
+neglected.  Our little toy package is no better: 'make installcheck'
+does nothing.
+
+
+File: automake.info,  Node: Standard Targets,  Next: Standard Directory Variables,  Prev: Basic Installation,  Up: Use Cases
+
+2.2.2 Standard 'Makefile' Targets
+---------------------------------
+
+So far we have come across four ways to run 'make' in the GNU Build
+System: 'make', 'make check', 'make install', and 'make installcheck'.
+The words 'check', 'install', and 'installcheck', passed as arguments to
+'make', are called "targets".  'make' is a shorthand for 'make all',
+'all' being the default target in the GNU Build System.
+
+   Here is a list of the most useful targets that the GNU Coding
+Standards specify.
+
+'make all'
+     Build programs, libraries, documentation, etc. (same as 'make').
+'make install'
+     Install what needs to be installed, copying the files from the
+     package's tree to system-wide directories.
+'make install-strip'
+     Same as 'make install', then strip debugging symbols.  Some users
+     like to trade space for useful bug reports...
+'make uninstall'
+     The opposite of 'make install': erase the installed files.  (This
+     needs to be run from the same build tree that was installed.)
+'make clean'
+     Erase from the build tree the files built by 'make all'.
+'make distclean'
+     Additionally erase anything './configure' created.
+'make check'
+     Run the test suite, if any.
+'make installcheck'
+     Check the installed programs or libraries, if supported.
+'make dist'
+     Recreate 'PACKAGE-VERSION.tar.gz' from all the source files.
+
+
+File: automake.info,  Node: Standard Directory Variables,  Next: Standard Configuration Variables,  Prev: Standard Targets,  Up: Use Cases
+
+2.2.3 Standard Directory Variables
+----------------------------------
+
+The GNU Coding Standards also specify a hierarchy of variables to denote
+installation directories.  Some of these are:
+
+Directory variable   Default value
+-------------------------------------------------------
+'prefix'             '/usr/local'
+  'exec_prefix'      '${prefix}'
+    'bindir'         '${exec_prefix}/bin'
+    'libdir'         '${exec_prefix}/lib'
+    ...
+  'includedir'       '${prefix}/include'
+  'datarootdir'      '${prefix}/share'
+    'datadir'        '${datarootdir}'
+    'mandir'         '${datarootdir}/man'
+    'infodir'        '${datarootdir}/info'
+    'docdir'         '${datarootdir}/doc/${PACKAGE}'
+  ...
+
+   Each of these directories has a role which is often obvious from its
+name.  In a package, any installable file will be installed in one of
+these directories.  For instance in 'amhello-1.0', the program 'hello'
+is to be installed in BINDIR, the directory for binaries.  The default
+value for this directory is '/usr/local/bin', but the user can supply a
+different value when calling 'configure'.  Also the file 'README' will
+be installed into DOCDIR, which defaults to
+'/usr/local/share/doc/amhello'.
+
+   As a user, if you wish to install a package on your own account, you
+could proceed as follows:
+
+     ~/amhello-1.0 % ./configure --prefix ~/usr
+     ...
+     ~/amhello-1.0 % make
+     ...
+     ~/amhello-1.0 % make install
+     ...
+
+   This would install '~/usr/bin/hello' and
+'~/usr/share/doc/amhello/README'.
+
+   The list of all such directory options is shown by './configure
+--help'.
+
+
+File: automake.info,  Node: Standard Configuration Variables,  Next: config.site,  Prev: Standard Directory Variables,  Up: Use Cases
+
+2.2.4 Standard Configuration Variables
+--------------------------------------
+
+The GNU Coding Standards also define a set of standard configuration
+variables used during the build.  Here are some:
+
+'CC'
+     C compiler command
+'CFLAGS'
+     C compiler flags
+'CXX'
+     C++ compiler command
+'CXXFLAGS'
+     C++ compiler flags
+'LDFLAGS'
+     linker flags
+'CPPFLAGS'
+     C/C++ preprocessor flags
+...
+
+   'configure' usually does a good job at setting appropriate values for
+these variables, but there are cases where you may want to override
+them.  For instance you may have several versions of a compiler
+installed and would like to use another one, you may have header files
+installed outside the default search path of the compiler, or even
+libraries out of the way of the linker.
+
+   Here is how one would call 'configure' to force it to use 'gcc-3' as
+C compiler, use header files from '~/usr/include' when compiling, and
+libraries from '~/usr/lib' when linking.
+
+     ~/amhello-1.0 % ./configure --prefix ~/usr CC=gcc-3 \
+     CPPFLAGS=-I$HOME/usr/include LDFLAGS=-L$HOME/usr/lib
+
+   Again, a full list of these variables appears in the output of
+'./configure --help'.
+
+
+File: automake.info,  Node: config.site,  Next: VPATH Builds,  Prev: Standard Configuration Variables,  Up: Use Cases
+
+2.2.5 Overriding Default Configuration Setting with 'config.site'
+-----------------------------------------------------------------
+
+When installing several packages using the same setup, it can be
+convenient to create a file to capture common settings.  If a file named
+'PREFIX/share/config.site' exists, 'configure' will source it at the
+beginning of its execution.
+
+   Recall the command from the previous section:
+
+     ~/amhello-1.0 % ./configure --prefix ~/usr CC=gcc-3 \
+     CPPFLAGS=-I$HOME/usr/include LDFLAGS=-L$HOME/usr/lib
+
+   Assuming we are installing many package in '~/usr', and will always
+want to use these definitions of 'CC', 'CPPFLAGS', and 'LDFLAGS', we can
+automate this by creating the following '~/usr/share/config.site' file:
+
+     test -z "$CC" && CC=gcc-3
+     test -z "$CPPFLAGS" && CPPFLAGS=-I$HOME/usr/include
+     test -z "$LDFLAGS" && LDFLAGS=-L$HOME/usr/lib
+
+   Now, any time a 'configure' script is using the '~/usr' prefix, it
+will execute the above 'config.site' and define these three variables.
+
+     ~/amhello-1.0 % ./configure --prefix ~/usr
+     configure: loading site script /home/adl/usr/share/config.site
+     ...
+
+   *Note Setting Site Defaults: (autoconf)Site Defaults, for more
+information about this feature.
+
+
+File: automake.info,  Node: VPATH Builds,  Next: Two-Part Install,  Prev: config.site,  Up: Use Cases
+
+2.2.6 Parallel Build Trees (a.k.a. VPATH Builds)
+------------------------------------------------
+
+The GNU Build System distinguishes two trees: the source tree, and the
+build tree.
+
+   The source tree is rooted in the directory containing 'configure'.
+It contains all the sources files (those that are distributed), and may
+be arranged using several subdirectories.
+
+   The build tree is rooted in the directory in which 'configure' was
+run, and is populated with all object files, programs, libraries, and
+other derived files built from the sources (and hence not distributed).
+The build tree usually has the same subdirectory layout as the source
+tree; its subdirectories are created automatically by the build system.
+
+   If 'configure' is executed in its own directory, the source and build
+trees are combined: derived files are constructed in the same
+directories as their sources.  This was the case in our first
+installation example (*note Basic Installation::).
+
+   A common request from users is that they want to confine all derived
+files to a single directory, to keep their source directories
+uncluttered.  Here is how we could run 'configure' to build everything
+in a subdirectory called 'build/'.
+
+     ~ % tar zxf ~/amhello-1.0.tar.gz
+     ~ % cd amhello-1.0
+     ~/amhello-1.0 % mkdir build && cd build
+     ~/amhello-1.0/build % ../configure
+     ...
+     ~/amhello-1.0/build % make
+     ...
+
+   These setups, where source and build trees are different, are often
+called "parallel builds" or "VPATH builds".  The expression _parallel
+build_ is misleading: the word _parallel_ is a reference to the way the
+build tree shadows the source tree, it is not about some concurrency in
+the way build commands are run.  For this reason we refer to such setups
+using the name _VPATH builds_ in the following.  _VPATH_ is the name of
+the 'make' feature used by the 'Makefile's to allow these builds (*note
+'VPATH' Search Path for All Prerequisites: (make)General Search.).
+
+   VPATH builds have other interesting uses.  One is to build the same
+sources with multiple configurations.  For instance:
+
+     ~ % tar zxf ~/amhello-1.0.tar.gz
+     ~ % cd amhello-1.0
+     ~/amhello-1.0 % mkdir debug optim && cd debug
+     ~/amhello-1.0/debug % ../configure CFLAGS='-g -O0'
+     ...
+     ~/amhello-1.0/debug % make
+     ...
+     ~/amhello-1.0/debug % cd ../optim
+     ~/amhello-1.0/optim % ../configure CFLAGS='-O3 -fomit-frame-pointer'
+     ...
+     ~/amhello-1.0/optim % make
+     ...
+
+   With network file systems, a similar approach can be used to build
+the same sources on different machines.  For instance, suppose that the
+sources are installed on a directory shared by two hosts: 'HOST1' and
+'HOST2', which may be different platforms.
+
+     ~ % cd /nfs/src
+     /nfs/src % tar zxf ~/amhello-1.0.tar.gz
+
+   On the first host, you could create a local build directory:
+     [HOST1] ~ % mkdir /tmp/amh && cd /tmp/amh
+     [HOST1] /tmp/amh % /nfs/src/amhello-1.0/configure
+     ...
+     [HOST1] /tmp/amh % make && sudo make install
+     ...
+
+(Here we assume that the installer has configured 'sudo' so it can
+execute 'make install' with root privileges; it is more convenient than
+using 'su' like in *note Basic Installation::).
+
+   On the second host, you would do exactly the same, possibly at the
+same time:
+     [HOST2] ~ % mkdir /tmp/amh && cd /tmp/amh
+     [HOST2] /tmp/amh % /nfs/src/amhello-1.0/configure
+     ...
+     [HOST2] /tmp/amh % make && sudo make install
+     ...
+
+   In this scenario, nothing forbids the '/nfs/src/amhello-1.0'
+directory from being read-only.  In fact VPATH builds are also a means
+of building packages from a read-only medium such as a CD-ROM. (The FSF
+used to sell CD-ROM with unpacked source code, before the GNU project
+grew so big.)
+
+
+File: automake.info,  Node: Two-Part Install,  Next: Cross-Compilation,  Prev: VPATH Builds,  Up: Use Cases
+
+2.2.7 Two-Part Installation
+---------------------------
+
+In our last example (*note VPATH Builds::), a source tree was shared by
+two hosts, but compilation and installation were done separately on each
+host.
+
+   The GNU Build System also supports networked setups where part of the
+installed files should be shared amongst multiple hosts.  It does so by
+distinguishing architecture-dependent files from
+architecture-independent files, and providing two 'Makefile' targets to
+install each of these classes of files.
+
+   These targets are 'install-exec' for architecture-dependent files and
+'install-data' for architecture-independent files.  The command we used
+up to now, 'make install', can be thought of as a shorthand for 'make
+install-exec install-data'.
+
+   From the GNU Build System point of view, the distinction between
+architecture-dependent files and architecture-independent files is based
+exclusively on the directory variable used to specify their installation
+destination.  In the list of directory variables we provided earlier
+(*note Standard Directory Variables::), all the variables based on
+EXEC-PREFIX designate architecture-dependent directories whose files
+will be installed by 'make install-exec'.  The others designate
+architecture-independent directories and will serve files installed by
+'make install-data'.  *Note The Two Parts of Install::, for more
+details.
+
+   Here is how we could revisit our two-host installation example,
+assuming that (1) we want to install the package directly in '/usr', and
+(2) the directory '/usr/share' is shared by the two hosts.
+
+   On the first host we would run
+     [HOST1] ~ % mkdir /tmp/amh && cd /tmp/amh
+     [HOST1] /tmp/amh % /nfs/src/amhello-1.0/configure --prefix /usr
+     ...
+     [HOST1] /tmp/amh % make && sudo make install
+     ...
+
+   On the second host, however, we need only install the
+architecture-specific files.
+     [HOST2] ~ % mkdir /tmp/amh && cd /tmp/amh
+     [HOST2] /tmp/amh % /nfs/src/amhello-1.0/configure --prefix /usr
+     ...
+     [HOST2] /tmp/amh % make && sudo make install-exec
+     ...
+
+   In packages that have installation checks, it would make sense to run
+'make installcheck' (*note Basic Installation::) to verify that the
+package works correctly despite the apparent partial installation.
+
+
+File: automake.info,  Node: Cross-Compilation,  Next: Renaming,  Prev: Two-Part Install,  Up: Use Cases
+
+2.2.8 Cross-Compilation
+-----------------------
+
+To "cross-compile" is to build on one platform a binary that will run on
+another platform.  When speaking of cross-compilation, it is important
+to distinguish between the "build platform" on which the compilation is
+performed, and the "host platform" on which the resulting executable is
+expected to run.  The following 'configure' options are used to specify
+each of them:
+
+'--build=BUILD'
+     The system on which the package is built.
+'--host=HOST'
+     The system where built programs and libraries will run.
+
+   When the '--host' is used, 'configure' will search for the
+cross-compiling suite for this platform.  Cross-compilation tools
+commonly have their target architecture as prefix of their name.  For
+instance my cross-compiler for MinGW32 has its binaries called
+'i586-mingw32msvc-gcc', 'i586-mingw32msvc-ld', 'i586-mingw32msvc-as',
+etc.
+
+   Here is how we could build 'amhello-1.0' for 'i586-mingw32msvc' on a
+GNU/Linux PC.
+
+     ~/amhello-1.0 % ./configure --build i686-pc-linux-gnu --host i586-mingw32msvc
+     checking for a BSD-compatible install... /usr/bin/install -c
+     checking whether build environment is sane... yes
+     checking for gawk... gawk
+     checking whether make sets $(MAKE)... yes
+     checking for i586-mingw32msvc-strip... i586-mingw32msvc-strip
+     checking for i586-mingw32msvc-gcc... i586-mingw32msvc-gcc
+     checking for C compiler default output file name... a.exe
+     checking whether the C compiler works... yes
+     checking whether we are cross compiling... yes
+     checking for suffix of executables... .exe
+     checking for suffix of object files... o
+     checking whether we are using the GNU C compiler... yes
+     checking whether i586-mingw32msvc-gcc accepts -g... yes
+     checking for i586-mingw32msvc-gcc option to accept ANSI C...
+     ...
+     ~/amhello-1.0 % make
+     ...
+     ~/amhello-1.0 % cd src; file hello.exe
+     hello.exe: MS Windows PE 32-bit Intel 80386 console executable not relocatable
+
+   The '--host' and '--build' options are usually all we need for
+cross-compiling.  The only exception is if the package being built is
+itself a cross-compiler: we need a third option to specify its target
+architecture.
+
+'--target=TARGET'
+     When building compiler tools: the system for which the tools will
+     create output.
+
+   For instance when installing GCC, the GNU Compiler Collection, we can
+use '--target=TARGET' to specify that we want to build GCC as a
+cross-compiler for TARGET.  Mixing '--build' and '--target', we can
+actually cross-compile a cross-compiler; such a three-way
+cross-compilation is known as a "Canadian cross".
+
+   *Note Specifying the System Type: (autoconf)Specifying Names, for
+more information about these 'configure' options.
+
+
+File: automake.info,  Node: Renaming,  Next: DESTDIR,  Prev: Cross-Compilation,  Up: Use Cases
+
+2.2.9 Renaming Programs at Install Time
+---------------------------------------
+
+The GNU Build System provides means to automatically rename executables
+and manpages before they are installed (*note Man Pages::).  This is
+especially convenient when installing a GNU package on a system that
+already has a proprietary implementation you do not want to overwrite.
+For instance, you may want to install GNU 'tar' as 'gtar' so you can
+distinguish it from your vendor's 'tar'.
+
+   This can be done using one of these three 'configure' options.
+
+'--program-prefix=PREFIX'
+     Prepend PREFIX to installed program names.
+'--program-suffix=SUFFIX'
+     Append SUFFIX to installed program names.
+'--program-transform-name=PROGRAM'
+     Run 'sed PROGRAM' on installed program names.
+
+   The following commands would install 'hello' as
+'/usr/local/bin/test-hello', for instance.
+
+     ~/amhello-1.0 % ./configure --program-prefix test-
+     ...
+     ~/amhello-1.0 % make
+     ...
+     ~/amhello-1.0 % sudo make install
+     ...
+
+
+File: automake.info,  Node: DESTDIR,  Next: Preparing Distributions,  Prev: Renaming,  Up: Use Cases
+
+2.2.10 Building Binary Packages Using DESTDIR
+---------------------------------------------
+
+The GNU Build System's 'make install' and 'make uninstall' interface
+does not exactly fit the needs of a system administrator who has to
+deploy and upgrade packages on lots of hosts.  In other words, the GNU
+Build System does not replace a package manager.
+
+   Such package managers usually need to know which files have been
+installed by a package, so a mere 'make install' is inappropriate.
+
+   The 'DESTDIR' variable can be used to perform a staged installation.
+The package should be configured as if it was going to be installed in
+its final location (e.g., '--prefix /usr'), but when running 'make
+install', the 'DESTDIR' should be set to the absolute name of a
+directory into which the installation will be diverted.  From this
+directory it is easy to review which files are being installed where,
+and finally copy them to their final location by some means.
+
+   For instance here is how we could create a binary package containing
+a snapshot of all the files to be installed.
+
+     ~/amhello-1.0 % ./configure --prefix /usr
+     ...
+     ~/amhello-1.0 % make
+     ...
+     ~/amhello-1.0 % make DESTDIR=$HOME/inst install
+     ...
+     ~/amhello-1.0 % cd ~/inst
+     ~/inst % find . -type f -print > ../files.lst
+     ~/inst % tar zcvf ~/amhello-1.0-i686.tar.gz `cat ../files.lst`
+     ./usr/bin/hello
+     ./usr/share/doc/amhello/README
+
+   After this example, 'amhello-1.0-i686.tar.gz' is ready to be
+uncompressed in '/' on many hosts.  (Using '`cat ../files.lst`' instead
+of '.' as argument for 'tar' avoids entries for each subdirectory in the
+archive: we would not like 'tar' to restore the modification time of
+'/', '/usr/', etc.)
+
+   Note that when building packages for several architectures, it might
+be convenient to use 'make install-data' and 'make install-exec' (*note
+Two-Part Install::) to gather architecture-independent files in a single
+package.
+
+   *Note Install::, for more information.
+
+
+File: automake.info,  Node: Preparing Distributions,  Next: Dependency Tracking,  Prev: DESTDIR,  Up: Use Cases
+
+2.2.11 Preparing Distributions
+------------------------------
+
+We have already mentioned 'make dist'.  This target collects all your
+source files and the necessary parts of the build system to create a
+tarball named 'PACKAGE-VERSION.tar.gz'.
+
+   Another, more useful command is 'make distcheck'.  The 'distcheck'
+target constructs 'PACKAGE-VERSION.tar.gz' just as well as 'dist', but
+it additionally ensures most of the use cases presented so far work:
+
+   * It attempts a full compilation of the package (*note Basic
+     Installation::), unpacking the newly constructed tarball, running
+     'make', 'make check', 'make install', as well as 'make
+     installcheck', and even 'make dist',
+   * it tests VPATH builds with read-only source tree (*note VPATH
+     Builds::),
+   * it makes sure 'make clean', 'make distclean', and 'make uninstall'
+     do not omit any file (*note Standard Targets::),
+   * and it checks that 'DESTDIR' installations work (*note DESTDIR::).
+
+   All of these actions are performed in a temporary subdirectory, so
+that no root privileges are required.
+
+   Releasing a package that fails 'make distcheck' means that one of the
+scenarios we presented will not work and some users will be
+disappointed.  Therefore it is a good practice to release a package only
+after a successful 'make distcheck'.  This of course does not imply that
+the package will be flawless, but at least it will prevent some of the
+embarrassing errors you may find in packages released by people who have
+never heard about 'distcheck' (like 'DESTDIR' not working because of a
+typo, or a distributed file being erased by 'make clean', or even
+'VPATH' builds not working).
+
+   *Note Creating amhello::, to recreate 'amhello-1.0.tar.gz' using
+'make distcheck'.  *Note Checking the Distribution::, for more
+information about 'distcheck'.
+
+
+File: automake.info,  Node: Dependency Tracking,  Next: Nested Packages,  Prev: Preparing Distributions,  Up: Use Cases
+
+2.2.12 Automatic Dependency Tracking
+------------------------------------
+
+Dependency tracking is performed as a side-effect of compilation.  Each
+time the build system compiles a source file, it computes its list of
+dependencies (in C these are the header files included by the source
+being compiled).  Later, any time 'make' is run and a dependency appears
+to have changed, the dependent files will be rebuilt.
+
+   Automake generates code for automatic dependency tracking by default,
+unless the developer chooses to override it; for more information, *note
+Dependencies::.
+
+   When 'configure' is executed, you can see it probing each compiler
+for the dependency mechanism it supports (several mechanisms can be
+used):
+
+     ~/amhello-1.0 % ./configure --prefix /usr
+     ...
+     checking dependency style of gcc... gcc3
+     ...
+
+   Because dependencies are only computed as a side-effect of the
+compilation, no dependency information exists the first time a package
+is built.  This is OK because all the files need to be built anyway:
+'make' does not have to decide which files need to be rebuilt.  In fact,
+dependency tracking is completely useless for one-time builds and there
+is a 'configure' option to disable this:
+
+'--disable-dependency-tracking'
+     Speed up one-time builds.
+
+   Some compilers do not offer any practical way to derive the list of
+dependencies as a side-effect of the compilation, requiring a separate
+run (maybe of another tool) to compute these dependencies.  The
+performance penalty implied by these methods is important enough to
+disable them by default.  The option '--enable-dependency-tracking' must
+be passed to 'configure' to activate them.
+
+'--enable-dependency-tracking'
+     Do not reject slow dependency extractors.
+
+   *Note Dependency Tracking Evolution: (automake-history)Dependency
+Tracking Evolution, for some discussion about the different dependency
+tracking schemes used by Automake over the years.
+
+
+File: automake.info,  Node: Nested Packages,  Prev: Dependency Tracking,  Up: Use Cases
+
+2.2.13 Nested Packages
+----------------------
+
+Although nesting packages isn't something we would recommend to someone
+who is discovering the Autotools, it is a nice feature worthy of mention
+in this small advertising tour.
+
+   Autoconfiscated packages (that means packages whose build system have
+been created by Autoconf and friends) can be nested to arbitrary depth.
+
+   A typical setup is that package A will distribute one of the
+libraries it needs in a subdirectory.  This library B is a complete
+package with its own GNU Build System.  The 'configure' script of A will
+run the 'configure' script of B as part of its execution, building and
+installing A will also build and install B. Generating a distribution
+for A will also include B.
+
+   It is possible to gather several packages like this.  GCC is a heavy
+user of this feature.  This gives installers a single package to
+configure, build and install, while it allows developers to work on
+subpackages independently.
+
+   When configuring nested packages, the 'configure' options given to
+the top-level 'configure' are passed recursively to nested 'configure's.
+A package that does not understand an option will ignore it, assuming it
+is meaningful to some other package.
+
+   The command 'configure --help=recursive' can be used to display the
+options supported by all the included packages.
+
+   *Note Subpackages::, for an example setup.
+
+
+File: automake.info,  Node: Why Autotools,  Next: Hello World,  Prev: Use Cases,  Up: Autotools Introduction
+
+2.3 How Autotools Help
+======================
+
+There are several reasons why you may not want to implement the GNU
+Build System yourself (read: write a 'configure' script and 'Makefile's
+yourself).
+
+   * As we have seen, the GNU Build System has a lot of features (*note
+     Use Cases::).  Some users may expect features you have not
+     implemented because you did not need them.
+   * Implementing these features portably is difficult and exhausting.
+     Think of writing portable shell scripts, and portable 'Makefile's,
+     for systems you may not have handy.  *Note Portable Shell
+     Programming: (autoconf)Portable Shell, to convince yourself.
+   * You will have to upgrade your setup to follow changes to the GNU
+     Coding Standards.
+
+   The GNU Autotools take all this burden off your back and provide:
+
+   * Tools to create a portable, complete, and self-contained GNU Build
+     System, from simple instructions.  _Self-contained_ meaning the
+     resulting build system does not require the GNU Autotools.
+   * A central place where fixes and improvements are made: a bug-fix
+     for a portability issue will benefit every package.
+
+   Yet there also exist reasons why you may want NOT to use the
+Autotools...  For instance you may be already using (or used to) another
+incompatible build system.  Autotools will only be useful if you do
+accept the concepts of the GNU Build System.  People who have their own
+idea of how a build system should work will feel frustrated by the
+Autotools.
+
+
+File: automake.info,  Node: Hello World,  Prev: Why Autotools,  Up: Autotools Introduction
+
+2.4 A Small Hello World
+=======================
+
+In this section we recreate the 'amhello-1.0' package from scratch.  The
+first subsection shows how to call the Autotools to instantiate the GNU
+Build System, while the second explains the meaning of the
+'configure.ac' and 'Makefile.am' files read by the Autotools.
+
+* Menu:
+
+* Creating amhello::            Create 'amhello-1.0.tar.gz' from scratch
+* amhello's configure.ac Setup Explained::
+* amhello's Makefile.am Setup Explained::
+
+
+File: automake.info,  Node: Creating amhello,  Next: amhello's configure.ac Setup Explained,  Up: Hello World
+
+2.4.1 Creating 'amhello-1.0.tar.gz'
+-----------------------------------
+
+Here is how we can recreate 'amhello-1.0.tar.gz' from scratch.  The
+package is simple enough so that we will only need to write 5 files.
+(You may copy them from the final 'amhello-1.0.tar.gz' that is
+distributed with Automake if you do not want to write them.)
+
+   Create the following files in an empty directory.
+
+   * 'src/main.c' is the source file for the 'hello' program.  We store
+     it in the 'src/' subdirectory, because later, when the package
+     evolves, it will ease the addition of a 'man/' directory for man
+     pages, a 'data/' directory for data files, etc.
+          ~/amhello % cat src/main.c
+          #include <config.h>
+          #include <stdio.h>
+
+          int
+          main (void)
+          {
+            puts ("Hello World!");
+            puts ("This is " PACKAGE_STRING ".");
+            return 0;
+          }
+
+   * 'README' contains some very limited documentation for our little
+     package.
+          ~/amhello % cat README
+          This is a demonstration package for GNU Automake.
+          Type 'info Automake' to read the Automake manual.
+
+   * 'Makefile.am' and 'src/Makefile.am' contain Automake instructions
+     for these two directories.
+
+          ~/amhello % cat src/Makefile.am
+          bin_PROGRAMS = hello
+          hello_SOURCES = main.c
+          ~/amhello % cat Makefile.am
+          SUBDIRS = src
+          dist_doc_DATA = README
+
+   * Finally, 'configure.ac' contains Autoconf instructions to create
+     the 'configure' script.
+
+          ~/amhello % cat configure.ac
+          AC_INIT([amhello], [1.0], [bug-automake@gnu.org])
+          AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+          AC_PROG_CC
+          AC_CONFIG_HEADERS([config.h])
+          AC_CONFIG_FILES([
+           Makefile
+           src/Makefile
+          ])
+          AC_OUTPUT
+
+   Once you have these five files, it is time to run the Autotools to
+instantiate the build system.  Do this using the 'autoreconf' command as
+follows:
+
+     ~/amhello % autoreconf --install
+     configure.ac: installing './install-sh'
+     configure.ac: installing './missing'
+     configure.ac: installing './compile'
+     src/Makefile.am: installing './depcomp'
+
+   At this point the build system is complete.
+
+   In addition to the three scripts mentioned in its output, you can see
+that 'autoreconf' created four other files: 'configure', 'config.h.in',
+'Makefile.in', and 'src/Makefile.in'.  The latter three files are
+templates that will be adapted to the system by 'configure' under the
+names 'config.h', 'Makefile', and 'src/Makefile'.  Let's do this:
+
+     ~/amhello % ./configure
+     checking for a BSD-compatible install... /usr/bin/install -c
+     checking whether build environment is sane... yes
+     checking for gawk... no
+     checking for mawk... mawk
+     checking whether make sets $(MAKE)... yes
+     checking for gcc... gcc
+     checking for C compiler default output file name... a.out
+     checking whether the C compiler works... yes
+     checking whether we are cross compiling... no
+     checking for suffix of executables...
+     checking for suffix of object files... o
+     checking whether we are using the GNU C compiler... yes
+     checking whether gcc accepts -g... yes
+     checking for gcc option to accept ISO C89... none needed
+     checking for style of include used by make... GNU
+     checking dependency style of gcc... gcc3
+     configure: creating ./config.status
+     config.status: creating Makefile
+     config.status: creating src/Makefile
+     config.status: creating config.h
+     config.status: executing depfiles commands
+
+   You can see 'Makefile', 'src/Makefile', and 'config.h' being created
+at the end after 'configure' has probed the system.  It is now possible
+to run all the targets we wish (*note Standard Targets::).  For
+instance:
+
+     ~/amhello % make
+     ...
+     ~/amhello % src/hello
+     Hello World!
+     This is amhello 1.0.
+     ~/amhello % make distcheck
+     ...
+     =============================================
+     amhello-1.0 archives ready for distribution:
+     amhello-1.0.tar.gz
+     =============================================
+
+   Note that running 'autoreconf' is only needed initially when the GNU
+Build System does not exist.  When you later change some instructions in
+a 'Makefile.am' or 'configure.ac', the relevant part of the build system
+will be regenerated automatically when you execute 'make'.
+
+   'autoreconf' is a script that calls 'autoconf', 'automake', and a
+bunch of other commands in the right order.  If you are beginning with
+these tools, it is not important to figure out in which order all of
+these tools should be invoked and why.  However, because Autoconf and
+Automake have separate manuals, the important point to understand is
+that 'autoconf' is in charge of creating 'configure' from
+'configure.ac', while 'automake' is in charge of creating 'Makefile.in's
+from 'Makefile.am's and 'configure.ac'.  This should at least direct you
+to the right manual when seeking answers.
+
+
+File: automake.info,  Node: amhello's configure.ac Setup Explained,  Next: amhello's Makefile.am Setup Explained,  Prev: Creating amhello,  Up: Hello World
+
+2.4.2 'amhello''s 'configure.ac' Setup Explained
+------------------------------------------------
+
+Let us begin with the contents of 'configure.ac'.
+
+     AC_INIT([amhello], [1.0], [bug-automake@gnu.org])
+     AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+     AC_PROG_CC
+     AC_CONFIG_HEADERS([config.h])
+     AC_CONFIG_FILES([
+      Makefile
+      src/Makefile
+     ])
+     AC_OUTPUT
+
+   This file is read by both 'autoconf' (to create 'configure') and
+'automake' (to create the various 'Makefile.in's).  It contains a series
+of M4 macros that will be expanded as shell code to finally form the
+'configure' script.  We will not elaborate on the syntax of this file,
+because the Autoconf manual has a whole section about it (*note Writing
+'configure.ac': (autoconf)Writing Autoconf Input.).
+
+   The macros prefixed with 'AC_' are Autoconf macros, documented in the
+Autoconf manual (*note Autoconf Macro Index: (autoconf)Autoconf Macro
+Index.).  The macros that start with 'AM_' are Automake macros,
+documented later in this manual (*note Macro Index::).
+
+   The first two lines of 'configure.ac' initialize Autoconf and
+Automake.  'AC_INIT' takes in as parameters the name of the package, its
+version number, and a contact address for bug-reports about the package
+(this address is output at the end of './configure --help', for
+instance).  When adapting this setup to your own package, by all means
+please do not blindly copy Automake's address: use the mailing list of
+your package, or your own mail address.
+
+   The argument to 'AM_INIT_AUTOMAKE' is a list of options for
+'automake' (*note Options::).  '-Wall' and '-Werror' ask 'automake' to
+turn on all warnings and report them as errors.  We are speaking of
+*Automake* warnings here, such as dubious instructions in 'Makefile.am'.
+This has absolutely nothing to do with how the compiler will be called,
+even though it may support options with similar names.  Using '-Wall
+-Werror' is a safe setting when starting to work on a package: you do
+not want to miss any issues.  Later you may decide to relax things a
+bit.  The 'foreign' option tells Automake that this package will not
+follow the GNU Standards.  GNU packages should always distribute
+additional files such as 'ChangeLog', 'AUTHORS', etc.  We do not want
+'automake' to complain about these missing files in our small example.
+
+   The 'AC_PROG_CC' line causes the 'configure' script to search for a C
+compiler and define the variable 'CC' with its name.  The
+'src/Makefile.in' file generated by Automake uses the variable 'CC' to
+build 'hello', so when 'configure' creates 'src/Makefile' from
+'src/Makefile.in', it will define 'CC' with the value it has found.  If
+Automake is asked to create a 'Makefile.in' that uses 'CC' but
+'configure.ac' does not define it, it will suggest you add a call to
+'AC_PROG_CC'.
+
+   The 'AC_CONFIG_HEADERS([config.h])' invocation causes the 'configure'
+script to create a 'config.h' file gathering '#define's defined by other
+macros in 'configure.ac'.  In our case, the 'AC_INIT' macro already
+defined a few of them.  Here is an excerpt of 'config.h' after
+'configure' has run:
+
+     ...
+     /* Define to the address where bug reports for this package should be sent. */
+     #define PACKAGE_BUGREPORT "bug-automake@gnu.org"
+
+     /* Define to the full name and version of this package. */
+     #define PACKAGE_STRING "amhello 1.0"
+     ...
+
+   As you probably noticed, 'src/main.c' includes 'config.h' so it can
+use 'PACKAGE_STRING'.  In a real-world project, 'config.h' can grow
+really big, with one '#define' per feature probed on the system.
+
+   The 'AC_CONFIG_FILES' macro declares the list of files that
+'configure' should create from their '*.in' templates.  Automake also
+scans this list to find the 'Makefile.am' files it must process.  (This
+is important to remember: when adding a new directory to your project,
+you should add its 'Makefile' to this list, otherwise Automake will
+never process the new 'Makefile.am' you wrote in that directory.)
+
+   Finally, the 'AC_OUTPUT' line is a closing command that actually
+produces the part of the script in charge of creating the files
+registered with 'AC_CONFIG_HEADERS' and 'AC_CONFIG_FILES'.
+
+   When starting a new project, we suggest you start with such a simple
+'configure.ac', and gradually add the other tests it requires.  The
+command 'autoscan' can also suggest a few of the tests your package may
+need (*note Using 'autoscan' to Create 'configure.ac':
+(autoconf)autoscan Invocation.).
+
+
+File: automake.info,  Node: amhello's Makefile.am Setup Explained,  Prev: amhello's configure.ac Setup Explained,  Up: Hello World
+
+2.4.3 'amhello''s 'Makefile.am' Setup Explained
+-----------------------------------------------
+
+We now turn to 'src/Makefile.am'.  This file contains Automake
+instructions to build and install 'hello'.
+
+     bin_PROGRAMS = hello
+     hello_SOURCES = main.c
+
+   A 'Makefile.am' has the same syntax as an ordinary 'Makefile'.  When
+'automake' processes a 'Makefile.am' it copies the entire file into the
+output 'Makefile.in' (that will be later turned into 'Makefile' by
+'configure') but will react to certain variable definitions by
+generating some build rules and other variables.  Often 'Makefile.am's
+contain only a list of variable definitions as above, but they can also
+contain other variable and rule definitions that 'automake' will pass
+along without interpretation.
+
+   Variables that end with '_PROGRAMS' are special variables that list
+programs that the resulting 'Makefile' should build.  In Automake speak,
+this '_PROGRAMS' suffix is called a "primary"; Automake recognizes other
+primaries such as '_SCRIPTS', '_DATA', '_LIBRARIES', etc. corresponding
+to different types of files.
+
+   The 'bin' part of the 'bin_PROGRAMS' tells 'automake' that the
+resulting programs should be installed in BINDIR.  Recall that the GNU
+Build System uses a set of variables to denote destination directories
+and allow users to customize these locations (*note Standard Directory
+Variables::).  Any such directory variable can be put in front of a
+primary (omitting the 'dir' suffix) to tell 'automake' where to install
+the listed files.
+
+   Programs need to be built from source files, so for each program
+'PROG' listed in a '_PROGRAMS' variable, 'automake' will look for
+another variable named 'PROG_SOURCES' listing its source files.  There
+may be more than one source file: they will all be compiled and linked
+together.
+
+   Automake also knows that source files need to be distributed when
+creating a tarball (unlike built programs).  So a side-effect of this
+'hello_SOURCES' declaration is that 'main.c' will be part of the tarball
+created by 'make dist'.
+
+   Finally here are some explanations regarding the top-level
+'Makefile.am'.
+
+     SUBDIRS = src
+     dist_doc_DATA = README
+
+   'SUBDIRS' is a special variable listing all directories that 'make'
+should recurse into before processing the current directory.  So this
+line is responsible for 'make' building 'src/hello' even though we run
+it from the top-level.  This line also causes 'make install' to install
+'src/hello' before installing 'README' (not that this order matters).
+
+   The line 'dist_doc_DATA = README' causes 'README' to be distributed
+and installed in DOCDIR.  Files listed with the '_DATA' primary are not
+automatically part of the tarball built with 'make dist', so we add the
+'dist_' prefix so they get distributed.  However, for 'README' it would
+not have been necessary: 'automake' automatically distributes any
+'README' file it encounters (the list of other files automatically
+distributed is presented by 'automake --help').  The only important
+effect of this second line is therefore to install 'README' during 'make
+install'.
+
+   One thing not covered in this example is accessing the installation
+directory values (*note Standard Directory Variables::) from your
+program code, that is, converting them into defined macros.  For this,
+*note (autoconf)Defining Directories::.
+
+
+File: automake.info,  Node: Generalities,  Next: Examples,  Prev: Autotools Introduction,  Up: Top
+
+3 General ideas
+***************
+
+The following sections cover a few basic ideas that will help you
+understand how Automake works.
+
+* Menu:
+
+* General Operation::           General operation of Automake
+* Strictness::                  Standards conformance checking
+* Uniform::                     The Uniform Naming Scheme
+* Length Limitations::          Staying below the command line length limit
+* Canonicalization::            How derived variables are named
+* User Variables::              Variables reserved for the user
+* Auxiliary Programs::          Programs automake might require
+
+
+File: automake.info,  Node: General Operation,  Next: Strictness,  Up: Generalities
+
+3.1 General Operation
+=====================
+
+Automake works by reading a 'Makefile.am' and generating a
+'Makefile.in'.  Certain variables and rules defined in the 'Makefile.am'
+instruct Automake to generate more specialized code; for instance, a
+'bin_PROGRAMS' variable definition will cause rules for compiling and
+linking programs to be generated.
+
+   The variable definitions and rules in the 'Makefile.am' are copied
+mostly verbatim into the generated file, with all variable definitions
+preceding all rules.  This allows you to add almost arbitrary code into
+the generated 'Makefile.in'.  For instance, the Automake distribution
+includes a non-standard rule for the 'git-dist' target, which the
+Automake maintainer uses to make distributions from the source control
+system.
+
+   Note that most GNU make extensions are not recognized by Automake.
+Using such extensions in a 'Makefile.am' will lead to errors or
+confusing behavior.
+
+   A special exception is that the GNU make append operator, '+=', is
+supported.  This operator appends its right hand argument to the
+variable specified on the left.  Automake will translate the operator
+into an ordinary '=' operator; '+=' will thus work with any make
+program.
+
+   Automake tries to keep comments grouped with any adjoining rules or
+variable definitions.
+
+   Generally, Automake is not particularly smart in the parsing of
+unusual Makefile constructs, so you're advised to avoid fancy constructs
+or "creative" use of whitespaces.  For example, <TAB> characters cannot
+be used between a target name and the following "':'" character, and
+variable assignments shouldn't be indented with <TAB> characters.  Also,
+using more complex macro in target names can cause trouble:
+
+     % cat Makefile.am
+     $(FOO:=x): bar
+     % automake
+     Makefile.am:1: bad characters in variable name '$(FOO'
+     Makefile.am:1: ':='-style assignments are not portable
+
+   A rule defined in 'Makefile.am' generally overrides any such rule of
+a similar name that would be automatically generated by 'automake'.
+Although this is a supported feature, it is generally best to avoid
+making use of it, as sometimes the generated rules are very particular.
+
+   Similarly, a variable defined in 'Makefile.am' or 'AC_SUBST'ed from
+'configure.ac' will override any definition of the variable that
+'automake' would ordinarily create.  This feature is more often useful
+than the ability to override a rule.  Be warned that many of the
+variables generated by 'automake' are considered to be for internal use
+only, and their names might change in future releases.
+
+   When examining a variable definition, Automake will recursively
+examine variables referenced in the definition.  For example, if
+Automake is looking at the content of 'foo_SOURCES' in this snippet
+
+     xs = a.c b.c
+     foo_SOURCES = c.c $(xs)
+
+   it would use the files 'a.c', 'b.c', and 'c.c' as the contents of
+'foo_SOURCES'.
+
+   Automake also allows a form of comment that is _not_ copied into the
+output; all lines beginning with '##' (leading spaces allowed) are
+completely ignored by Automake.
+
+   It is customary to make the first line of 'Makefile.am' read:
+
+     ## Process this file with automake to produce Makefile.in
+
+
+File: automake.info,  Node: Strictness,  Next: Uniform,  Prev: General Operation,  Up: Generalities
+
+3.2 Strictness
+==============
+
+While Automake is intended to be used by maintainers of GNU packages, it
+does make some effort to accommodate those who wish to use it, but do
+not want to use all the GNU conventions.
+
+   To this end, Automake supports three levels of "strictness"--the
+strictness indicating how stringently Automake should check standards
+conformance.
+
+   The valid strictness levels are:
+
+'foreign'
+     Automake will check for only those things that are absolutely
+     required for proper operations.  For instance, whereas GNU
+     standards dictate the existence of a 'NEWS' file, it will not be
+     required in this mode.  This strictness will also turn off some
+     warnings by default (among them, portability warnings).  The name
+     comes from the fact that Automake is intended to be used for GNU
+     programs; these relaxed rules are not the standard mode of
+     operation.
+
+'gnu'
+     Automake will check--as much as possible--for compliance to the GNU
+     standards for packages.  This is the default.
+
+'gnits'
+     Automake will check for compliance to the as-yet-unwritten "Gnits
+     standards".  These are based on the GNU standards, but are even
+     more detailed.  Unless you are a Gnits standards contributor, it is
+     recommended that you avoid this option until such time as the Gnits
+     standard is actually published (which may never happen).
+
+   *Note Gnits::, for more information on the precise implications of
+the strictness level.
+
+
+File: automake.info,  Node: Uniform,  Next: Length Limitations,  Prev: Strictness,  Up: Generalities
+
+3.3 The Uniform Naming Scheme
+=============================
+
+Automake variables generally follow a "uniform naming scheme" that makes
+it easy to decide how programs (and other derived objects) are built,
+and how they are installed.  This scheme also supports 'configure' time
+determination of what should be built.
+
+   At 'make' time, certain variables are used to determine which objects
+are to be built.  The variable names are made of several pieces that are
+concatenated together.
+
+   The piece that tells 'automake' what is being built is commonly
+called the "primary".  For instance, the primary 'PROGRAMS' holds a list
+of programs that are to be compiled and linked.
+
+   A different set of names is used to decide where the built objects
+should be installed.  These names are prefixes to the primary, and they
+indicate which standard directory should be used as the installation
+directory.  The standard directory names are given in the GNU standards
+(*note (standards)Directory Variables::).  Automake extends this list
+with 'pkgdatadir', 'pkgincludedir', 'pkglibdir', and 'pkglibexecdir';
+these are the same as the non-'pkg' versions, but with '$(PACKAGE)'
+appended.  For instance, 'pkglibdir' is defined as
+'$(libdir)/$(PACKAGE)'.
+
+   For each primary, there is one additional variable named by
+prepending 'EXTRA_' to the primary name.  This variable is used to list
+objects that may or may not be built, depending on what 'configure'
+decides.  This variable is required because Automake must statically
+know the entire list of objects that may be built in order to generate a
+'Makefile.in' that will work in all cases.
+
+   For instance, 'cpio' decides at configure time which programs should
+be built.  Some of the programs are installed in 'bindir', and some are
+installed in 'sbindir':
+
+     EXTRA_PROGRAMS = mt rmt
+     bin_PROGRAMS = cpio pax
+     sbin_PROGRAMS = $(MORE_PROGRAMS)
+
+   Defining a primary without a prefix as a variable, e.g., 'PROGRAMS',
+is an error.
+
+   Note that the common 'dir' suffix is left off when constructing the
+variable names; thus one writes 'bin_PROGRAMS' and not
+'bindir_PROGRAMS'.
+
+   Not every sort of object can be installed in every directory.
+Automake will flag those attempts it finds in error (but see below how
+to override the check if you really need to).  Automake will also
+diagnose obvious misspellings in directory names.
+
+   Sometimes the standard directories--even as augmented by
+Automake--are not enough.  In particular it is sometimes useful, for
+clarity, to install objects in a subdirectory of some predefined
+directory.  To this end, Automake allows you to extend the list of
+possible installation directories.  A given prefix (e.g., 'zar') is
+valid if a variable of the same name with 'dir' appended is defined
+(e.g., 'zardir').
+
+   For instance, the following snippet will install 'file.xml' into
+'$(datadir)/xml'.
+
+     xmldir = $(datadir)/xml
+     xml_DATA = file.xml
+
+   This feature can also be used to override the sanity checks Automake
+performs to diagnose suspicious directory/primary couples (in the
+unlikely case these checks are undesirable, and you really know what
+you're doing).  For example, Automake would error out on this input:
+
+     # Forbidden directory combinations, automake will error out on this.
+     pkglib_PROGRAMS = foo
+     doc_LIBRARIES = libquux.a
+
+but it will succeed with this:
+
+     # Work around forbidden directory combinations.  Do not use this
+     # without a very good reason!
+     my_execbindir = $(pkglibdir)
+     my_doclibdir = $(docdir)
+     my_execbin_PROGRAMS = foo
+     my_doclib_LIBRARIES = libquux.a
+
+   The 'exec' substring of the 'my_execbindir' variable lets the files
+be installed at the right time (*note The Two Parts of Install::).
+
+   The special prefix 'noinst_' indicates that the objects in question
+should be built but not installed at all.  This is usually used for
+objects required to build the rest of your package, for instance static
+libraries (*note A Library::), or helper scripts.
+
+   The special prefix 'check_' indicates that the objects in question
+should not be built until the 'make check' command is run.  Those
+objects are not installed either.
+
+   The current primary names are 'PROGRAMS', 'LIBRARIES', 'LTLIBRARIES',
+'LISP', 'PYTHON', 'JAVA', 'SCRIPTS', 'DATA', 'HEADERS', 'MANS', and
+'TEXINFOS'.
+
+   Some primaries also allow additional prefixes that control other
+aspects of 'automake''s behavior.  The currently defined prefixes are
+'dist_', 'nodist_', 'nobase_', and 'notrans_'.  These prefixes are
+explained later (*note Program and Library Variables::) (*note Man
+Pages::).
+
+
+File: automake.info,  Node: Length Limitations,  Next: Canonicalization,  Prev: Uniform,  Up: Generalities
+
+3.4 Staying below the command line length limit
+===============================================
+
+Traditionally, most unix-like systems have a length limitation for the
+command line arguments and environment contents when creating new
+processes (see for example
+<http://www.in-ulm.de/~mascheck/various/argmax/> for an overview on this
+issue), which of course also applies to commands spawned by 'make'.
+POSIX requires this limit to be at least 4096 bytes, and most modern
+systems have quite high limits (or are unlimited).
+
+   In order to create portable Makefiles that do not trip over these
+limits, it is necessary to keep the length of file lists bounded.
+Unfortunately, it is not possible to do so fully transparently within
+Automake, so your help may be needed.  Typically, you can split long
+file lists manually and use different installation directory names for
+each list.  For example,
+
+     data_DATA = file1 ... fileN fileN+1 ... file2N
+
+may also be written as
+
+     data_DATA = file1 ... fileN
+     data2dir = $(datadir)
+     data2_DATA = fileN+1 ... file2N
+
+and will cause Automake to treat the two lists separately during 'make
+install'.  See *note The Two Parts of Install:: for choosing directory
+names that will keep the ordering of the two parts of installation Note
+that 'make dist' may still only work on a host with a higher length
+limit in this example.
+
+   Automake itself employs a couple of strategies to avoid long command
+lines.  For example, when '${srcdir}/' is prepended to file names, as
+can happen with above '$(data_DATA)' lists, it limits the amount of
+arguments passed to external commands.
+
+   Unfortunately, some system's 'make' commands may prepend 'VPATH'
+prefixes like '${srcdir}/' to file names from the source tree
+automatically (*note Automatic Rule Rewriting: (autoconf)Automatic Rule
+Rewriting.).  In this case, the user may have to switch to use GNU Make,
+or refrain from using VPATH builds, in order to stay below the length
+limit.
+
+   For libraries and programs built from many sources, convenience
+archives may be used as intermediates in order to limit the object list
+length (*note Libtool Convenience Libraries::).
+
+
+File: automake.info,  Node: Canonicalization,  Next: User Variables,  Prev: Length Limitations,  Up: Generalities
+
+3.5 How derived variables are named
+===================================
+
+Sometimes a Makefile variable name is derived from some text the
+maintainer supplies.  For instance, a program name listed in '_PROGRAMS'
+is rewritten into the name of a '_SOURCES' variable.  In cases like
+this, Automake canonicalizes the text, so that program names and the
+like do not have to follow Makefile variable naming rules.  All
+characters in the name except for letters, numbers, the strudel (@), and
+the underscore are turned into underscores when making variable
+references.
+
+   For example, if your program is named 'sniff-glue', the derived
+variable name would be 'sniff_glue_SOURCES', not 'sniff-glue_SOURCES'.
+Similarly the sources for a library named 'libmumble++.a' should be
+listed in the 'libmumble___a_SOURCES' variable.
+
+   The strudel is an addition, to make the use of Autoconf substitutions
+in variable names less obfuscating.
+
+
+File: automake.info,  Node: User Variables,  Next: Auxiliary Programs,  Prev: Canonicalization,  Up: Generalities
+
+3.6 Variables reserved for the user
+===================================
+
+Some 'Makefile' variables are reserved by the GNU Coding Standards for
+the use of the "user"--the person building the package.  For instance,
+'CFLAGS' is one such variable.
+
+   Sometimes package developers are tempted to set user variables such
+as 'CFLAGS' because it appears to make their job easier.  However, the
+package itself should never set a user variable, particularly not to
+include switches that are required for proper compilation of the
+package.  Since these variables are documented as being for the package
+builder, that person rightfully expects to be able to override any of
+these variables at build time.
+
+   To get around this problem, Automake introduces an automake-specific
+shadow variable for each user flag variable.  (Shadow variables are not
+introduced for variables like 'CC', where they would make no sense.)
+The shadow variable is named by prepending 'AM_' to the user variable's
+name.  For instance, the shadow variable for 'YFLAGS' is 'AM_YFLAGS'.
+The package maintainer--that is, the author(s) of the 'Makefile.am' and
+'configure.ac' files--may adjust these shadow variables however
+necessary.
+
+   *Note Flag Variables Ordering::, for more discussion about these
+variables and how they interact with per-target variables.
+
+
+File: automake.info,  Node: Auxiliary Programs,  Prev: User Variables,  Up: Generalities
+
+3.7 Programs automake might require
+===================================
+
+Automake sometimes requires helper programs so that the generated
+'Makefile' can do its work properly.  There are a fairly large number of
+them, and we list them here.
+
+   Although all of these files are distributed and installed with
+Automake, a couple of them are maintained separately.  The Automake
+copies are updated before each release, but we mention the original
+source in case you need more recent versions.
+
+'ar-lib'
+     This is a wrapper primarily for the Microsoft lib archiver, to make
+     it more POSIX-like.
+
+'compile'
+     This is a wrapper for compilers that do not accept options '-c' and
+     '-o' at the same time.  It is only used when absolutely required.
+     Such compilers are rare, with the Microsoft C/C++ Compiler as the
+     most notable exception.  This wrapper also makes the following
+     common options available for that compiler, while performing file
+     name translation where needed: '-I', '-L', '-l', '-Wl,' and
+     '-Xlinker'.
+
+'config.guess'
+'config.sub'
+     These two programs compute the canonical triplets for the given
+     build, host, or target architecture.  These programs are updated
+     regularly to support new architectures and fix probes broken by
+     changes in new kernel versions.  Each new release of Automake comes
+     with up-to-date copies of these programs.  If your copy of Automake
+     is getting old, you are encouraged to fetch the latest versions of
+     these files from <http://savannah.gnu.org/git/?group=config> before
+     making a release.
+
+'depcomp'
+     This program understands how to run a compiler so that it will
+     generate not only the desired output but also dependency
+     information that is then used by the automatic dependency tracking
+     feature (*note Dependencies::).
+
+'install-sh'
+     This is a replacement for the 'install' program that works on
+     platforms where 'install' is unavailable or unusable.
+
+'mdate-sh'
+     This script is used to generate a 'version.texi' file.  It examines
+     a file and prints some date information about it.
+
+'missing'
+     This wraps a number of programs that are typically only required by
+     maintainers.  If the program in question doesn't exist, or seems to
+     old, 'missing' will print an informative warning before failing
+     out, to provide the user with more context and information.
+
+'mkinstalldirs'
+     This script used to be a wrapper around 'mkdir -p', which is not
+     portable.  Now we prefer to use 'install-sh -d' when 'configure'
+     finds that 'mkdir -p' does not work, this makes one less script to
+     distribute.
+
+     For backward compatibility 'mkinstalldirs' is still used and
+     distributed when 'automake' finds it in a package.  But it is no
+     longer installed automatically, and it should be safe to remove it.
+
+'py-compile'
+     This is used to byte-compile Python scripts.
+
+'test-driver'
+     This implements the default test driver offered by the parallel
+     testsuite harness.
+
+'texinfo.tex'
+     Not a program, this file is required for 'make dvi', 'make ps' and
+     'make pdf' to work when Texinfo sources are in the package.  The
+     latest version can be downloaded from
+     <http://www.gnu.org/software/texinfo/>.
+
+'ylwrap'
+     This program wraps 'lex' and 'yacc' to rename their output files.
+     It also ensures that, for instance, multiple 'yacc' instances can
+     be invoked in a single directory in parallel.
+
+
+File: automake.info,  Node: Examples,  Next: automake Invocation,  Prev: Generalities,  Up: Top
+
+4 Some example packages
+***********************
+
+This section contains two small examples.
+
+   The first example (*note Complete::) assumes you have an existing
+project already using Autoconf, with handcrafted 'Makefile's, and that
+you want to convert it to using Automake.  If you are discovering both
+tools, it is probably better that you look at the Hello World example
+presented earlier (*note Hello World::).
+
+   The second example (*note true::) shows how two programs can be built
+from the same file, using different compilation parameters.  It contains
+some technical digressions that are probably best skipped on first read.
+
+* Menu:
+
+* Complete::                    A simple example, start to finish
+* true::                        Building true and false
+
+
+File: automake.info,  Node: Complete,  Next: true,  Up: Examples
+
+4.1 A simple example, start to finish
+=====================================
+
+Let's suppose you just finished writing 'zardoz', a program to make your
+head float from vortex to vortex.  You've been using Autoconf to provide
+a portability framework, but your 'Makefile.in's have been ad-hoc.  You
+want to make them bulletproof, so you turn to Automake.
+
+   The first step is to update your 'configure.ac' to include the
+commands that 'automake' needs.  The way to do this is to add an
+'AM_INIT_AUTOMAKE' call just after 'AC_INIT':
+
+     AC_INIT([zardoz], [1.0])
+     AM_INIT_AUTOMAKE
+     ...
+
+   Since your program doesn't have any complicating factors (e.g., it
+doesn't use 'gettext', it doesn't want to build a shared library),
+you're done with this part.  That was easy!
+
+   Now you must regenerate 'configure'.  But to do that, you'll need to
+tell 'autoconf' how to find the new macro you've used.  The easiest way
+to do this is to use the 'aclocal' program to generate your 'aclocal.m4'
+for you.  But wait... maybe you already have an 'aclocal.m4', because
+you had to write some hairy macros for your program.  The 'aclocal'
+program lets you put your own macros into 'acinclude.m4', so simply
+rename and then run:
+
+     mv aclocal.m4 acinclude.m4
+     aclocal
+     autoconf
+
+   Now it is time to write your 'Makefile.am' for 'zardoz'.  Since
+'zardoz' is a user program, you want to install it where the rest of the
+user programs go: 'bindir'.  Additionally, 'zardoz' has some Texinfo
+documentation.  Your 'configure.ac' script uses 'AC_REPLACE_FUNCS', so
+you need to link against '$(LIBOBJS)'.  So here's what you'd write:
+
+     bin_PROGRAMS = zardoz
+     zardoz_SOURCES = main.c head.c float.c vortex9.c gun.c
+     zardoz_LDADD = $(LIBOBJS)
+
+     info_TEXINFOS = zardoz.texi
+
+   Now you can run 'automake --add-missing' to generate your
+'Makefile.in' and grab any auxiliary files you might need, and you're
+done!
+
+
+File: automake.info,  Node: true,  Prev: Complete,  Up: Examples
+
+4.2 Building true and false
+===========================
+
+Here is another, trickier example.  It shows how to generate two
+programs ('true' and 'false') from the same source file ('true.c').  The
+difficult part is that each compilation of 'true.c' requires different
+'cpp' flags.
+
+     bin_PROGRAMS = true false
+     false_SOURCES =
+     false_LDADD = false.o
+
+     true.o: true.c
+             $(COMPILE) -DEXIT_CODE=0 -c true.c
+
+     false.o: true.c
+             $(COMPILE) -DEXIT_CODE=1 -o false.o -c true.c
+
+   Note that there is no 'true_SOURCES' definition.  Automake will
+implicitly assume that there is a source file named 'true.c' (*note
+Default _SOURCES::), and define rules to compile 'true.o' and link
+'true'.  The 'true.o: true.c' rule supplied by the above 'Makefile.am',
+will override the Automake generated rule to build 'true.o'.
+
+   'false_SOURCES' is defined to be empty--that way no implicit value is
+substituted.  Because we have not listed the source of 'false', we have
+to tell Automake how to link the program.  This is the purpose of the
+'false_LDADD' line.  A 'false_DEPENDENCIES' variable, holding the
+dependencies of the 'false' target will be automatically generated by
+Automake from the content of 'false_LDADD'.
+
+   The above rules won't work if your compiler doesn't accept both '-c'
+and '-o'.  The simplest fix for this is to introduce a bogus dependency
+(to avoid problems with a parallel 'make'):
+
+     true.o: true.c false.o
+             $(COMPILE) -DEXIT_CODE=0 -c true.c
+
+     false.o: true.c
+             $(COMPILE) -DEXIT_CODE=1 -c true.c && mv true.o false.o
+
+   As it turns out, there is also a much easier way to do this same
+task.  Some of the above technique is useful enough that we've kept the
+example in the manual.  However if you were to build 'true' and 'false'
+in real life, you would probably use per-program compilation flags, like
+so:
+
+     bin_PROGRAMS = false true
+
+     false_SOURCES = true.c
+     false_CPPFLAGS = -DEXIT_CODE=1
+
+     true_SOURCES = true.c
+     true_CPPFLAGS = -DEXIT_CODE=0
+
+   In this case Automake will cause 'true.c' to be compiled twice, with
+different flags.  In this instance, the names of the object files would
+be chosen by automake; they would be 'false-true.o' and 'true-true.o'.
+(The name of the object files rarely matters.)
+
+
+File: automake.info,  Node: automake Invocation,  Next: configure,  Prev: Examples,  Up: Top
+
+5 Creating a 'Makefile.in'
+**************************
+
+To create all the 'Makefile.in's for a package, run the 'automake'
+program in the top level directory, with no arguments.  'automake' will
+automatically find each appropriate 'Makefile.am' (by scanning
+'configure.ac'; *note configure::) and generate the corresponding
+'Makefile.in'.  Note that 'automake' has a rather simplistic view of
+what constitutes a package; it assumes that a package has only one
+'configure.ac', at the top.  If your package has multiple
+'configure.ac's, then you must run 'automake' in each directory holding
+a 'configure.ac'.  (Alternatively, you may rely on Autoconf's
+'autoreconf', which is able to recurse your package tree and run
+'automake' where appropriate.)
+
+   You can optionally give 'automake' an argument; '.am' is appended to
+the argument and the result is used as the name of the input file.  This
+feature is generally only used to automatically rebuild an out-of-date
+'Makefile.in'.  Note that 'automake' must always be run from the topmost
+directory of a project, even if being used to regenerate the
+'Makefile.in' in some subdirectory.  This is necessary because
+'automake' must scan 'configure.ac', and because 'automake' uses the
+knowledge that a 'Makefile.in' is in a subdirectory to change its
+behavior in some cases.
+
+   Automake will run 'autoconf' to scan 'configure.ac' and its
+dependencies (i.e., 'aclocal.m4' and any included file), therefore
+'autoconf' must be in your 'PATH'.  If there is an 'AUTOCONF' variable
+in your environment it will be used instead of 'autoconf', this allows
+you to select a particular version of Autoconf.  By the way, don't
+misunderstand this paragraph: 'automake' runs 'autoconf' to *scan* your
+'configure.ac', this won't build 'configure' and you still have to run
+'autoconf' yourself for this purpose.
+
+   'automake' accepts the following options:
+
+'-a'
+'--add-missing'
+     Automake requires certain common files to exist in certain
+     situations; for instance, 'config.guess' is required if
+     'configure.ac' invokes 'AC_CANONICAL_HOST'.  Automake is
+     distributed with several of these files (*note Auxiliary
+     Programs::); this option will cause the missing ones to be
+     automatically added to the package, whenever possible.  In general
+     if Automake tells you a file is missing, try using this option.  By
+     default Automake tries to make a symbolic link pointing to its own
+     copy of the missing file; this can be changed with '--copy'.
+
+     Many of the potentially-missing files are common scripts whose
+     location may be specified via the 'AC_CONFIG_AUX_DIR' macro.
+     Therefore, 'AC_CONFIG_AUX_DIR''s setting affects whether a file is
+     considered missing, and where the missing file is added (*note
+     Optional::).
+
+     In some strictness modes, additional files are installed, see *note
+     Gnits:: for more information.
+
+'--libdir=DIR'
+     Look for Automake data files in directory DIR instead of in the
+     installation directory.  This is typically used for debugging.
+
+'--print-libdir'
+     Print the path of the installation directory containing
+     Automake-provided scripts and data files (like e.g., 'texinfo.texi'
+     and 'install-sh').
+
+'-c'
+'--copy'
+     When used with '--add-missing', causes installed files to be
+     copied.  The default is to make a symbolic link.
+
+'-f'
+'--force-missing'
+     When used with '--add-missing', causes standard files to be
+     reinstalled even if they already exist in the source tree.  This
+     involves removing the file from the source tree before creating the
+     new symlink (or, with '--copy', copying the new file).
+
+'--foreign'
+     Set the global strictness to 'foreign'.  For more information, see
+     *note Strictness::.
+
+'--gnits'
+     Set the global strictness to 'gnits'.  For more information, see
+     *note Gnits::.
+
+'--gnu'
+     Set the global strictness to 'gnu'.  For more information, see
+     *note Gnits::.  This is the default strictness.
+
+'--help'
+     Print a summary of the command line options and exit.
+
+'-i'
+'--ignore-deps'
+     This disables the dependency tracking feature in generated
+     'Makefile's; see *note Dependencies::.
+
+'--include-deps'
+     This enables the dependency tracking feature.  This feature is
+     enabled by default.  This option is provided for historical reasons
+     only and probably should not be used.
+
+'--no-force'
+     Ordinarily 'automake' creates all 'Makefile.in's mentioned in
+     'configure.ac'.  This option causes it to only update those
+     'Makefile.in's that are out of date with respect to one of their
+     dependents.
+
+'-o DIR'
+'--output-dir=DIR'
+     Put the generated 'Makefile.in' in the directory DIR.  Ordinarily
+     each 'Makefile.in' is created in the directory of the corresponding
+     'Makefile.am'.  This option is deprecated and will be removed in a
+     future release.
+
+'-v'
+'--verbose'
+     Cause Automake to print information about which files are being
+     read or created.
+
+'--version'
+     Print the version number of Automake and exit.
+
+'-W CATEGORY'
+'--warnings=CATEGORY'
+     Output warnings falling in CATEGORY.  CATEGORY can be one of:
+     'gnu'
+          warnings related to the GNU Coding Standards (*note
+          (standards)Top::).
+     'obsolete'
+          obsolete features or constructions
+     'override'
+          user redefinitions of Automake rules or variables
+     'portability'
+          portability issues (e.g., use of 'make' features that are
+          known to be not portable)
+     'extra-portability'
+          extra portability issues related to obscure tools.  One
+          example of such a tool is the Microsoft 'lib' archiver.
+     'syntax'
+          weird syntax, unused variables, typos
+     'unsupported'
+          unsupported or incomplete features
+     'all'
+          all the warnings
+     'none'
+          turn off all the warnings
+     'error'
+          treat warnings as errors
+
+     A category can be turned off by prefixing its name with 'no-'.  For
+     instance, '-Wno-syntax' will hide the warnings about unused
+     variables.
+
+     The categories output by default are 'obsolete', 'syntax' and
+     'unsupported'.  Additionally, 'gnu' and 'portability' are enabled
+     in '--gnu' and '--gnits' strictness.
+
+     Turning off 'portability' will also turn off 'extra-portability',
+     and similarly turning on 'extra-portability' will also turn on
+     'portability'.  However, turning on 'portability' or turning off
+     'extra-portability' will not affect the other category.
+
+     The environment variable 'WARNINGS' can contain a comma separated
+     list of categories to enable.  It will be taken into account before
+     the command-line switches, this way '-Wnone' will also ignore any
+     warning category enabled by 'WARNINGS'.  This variable is also used
+     by other tools like 'autoconf'; unknown categories are ignored for
+     this reason.
+
+   If the environment variable 'AUTOMAKE_JOBS' contains a positive
+number, it is taken as the maximum number of Perl threads to use in
+'automake' for generating multiple 'Makefile.in' files concurrently.
+This is an experimental feature.
+
+
+File: automake.info,  Node: configure,  Next: Directories,  Prev: automake Invocation,  Up: Top
+
+6 Scanning 'configure.ac', using 'aclocal'
+******************************************
+
+Automake scans the package's 'configure.ac' to determine certain
+information about the package.  Some 'autoconf' macros are required and
+some variables must be defined in 'configure.ac'.  Automake will also
+use information from 'configure.ac' to further tailor its output.
+
+   Automake also supplies some Autoconf macros to make the maintenance
+easier.  These macros can automatically be put into your 'aclocal.m4'
+using the 'aclocal' program.
+
+* Menu:
+
+* Requirements::                Configuration requirements
+* Optional::                    Other things Automake recognizes
+* aclocal Invocation::          Auto-generating aclocal.m4
+* Macros::                      Autoconf macros supplied with Automake
+
+
+File: automake.info,  Node: Requirements,  Next: Optional,  Up: configure
+
+6.1 Configuration requirements
+==============================
+
+The one real requirement of Automake is that your 'configure.ac' call
+'AM_INIT_AUTOMAKE'.  This macro does several things that are required
+for proper Automake operation (*note Macros::).
+
+   Here are the other macros that Automake requires but which are not
+run by 'AM_INIT_AUTOMAKE':
+
+'AC_CONFIG_FILES'
+'AC_OUTPUT'
+     These two macros are usually invoked as follows near the end of
+     'configure.ac'.
+
+          ...
+          AC_CONFIG_FILES([
+            Makefile
+            doc/Makefile
+            src/Makefile
+            src/lib/Makefile
+            ...
+          ])
+          AC_OUTPUT
+
+     Automake uses these to determine which files to create (*note
+     Creating Output Files: (autoconf)Output.).  A listed file is
+     considered to be an Automake generated 'Makefile' if there exists a
+     file with the same name and the '.am' extension appended.
+     Typically, 'AC_CONFIG_FILES([foo/Makefile])' will cause Automake to
+     generate 'foo/Makefile.in' if 'foo/Makefile.am' exists.
+
+     When using 'AC_CONFIG_FILES' with multiple input files, as in
+
+          AC_CONFIG_FILES([Makefile:top.in:Makefile.in:bot.in])
+
+     'automake' will generate the first '.in' input file for which a
+     '.am' file exists.  If no such file exists the output file is not
+     considered to be generated by Automake.
+
+     Files created by 'AC_CONFIG_FILES', be they Automake 'Makefile's or
+     not, are all removed by 'make distclean'.  Their inputs are
+     automatically distributed, unless they are the output of prior
+     'AC_CONFIG_FILES' commands.  Finally, rebuild rules are generated
+     in the Automake 'Makefile' existing in the subdirectory of the
+     output file, if there is one, or in the top-level 'Makefile'
+     otherwise.
+
+     The above machinery (cleaning, distributing, and rebuilding) works
+     fine if the 'AC_CONFIG_FILES' specifications contain only literals.
+     If part of the specification uses shell variables, 'automake' will
+     not be able to fulfill this setup, and you will have to complete
+     the missing bits by hand.  For instance, on
+
+          file=input
+          ...
+          AC_CONFIG_FILES([output:$file],, [file=$file])
+
+     'automake' will output rules to clean 'output', and rebuild it.
+     However the rebuild rule will not depend on 'input', and this file
+     will not be distributed either.  (You must add 'EXTRA_DIST = input'
+     to your 'Makefile.am' if 'input' is a source file.)
+
+     Similarly
+
+          file=output
+          file2=out:in
+          ...
+          AC_CONFIG_FILES([$file:input],, [file=$file])
+          AC_CONFIG_FILES([$file2],, [file2=$file2])
+
+     will only cause 'input' to be distributed.  No file will be cleaned
+     automatically (add 'DISTCLEANFILES = output out' yourself), and no
+     rebuild rule will be output.
+
+     Obviously 'automake' cannot guess what value '$file' is going to
+     hold later when 'configure' is run, and it cannot use the shell
+     variable '$file' in a 'Makefile'.  However, if you make reference
+     to '$file' as '${file}' (i.e., in a way that is compatible with
+     'make''s syntax) and furthermore use 'AC_SUBST' to ensure that
+     '${file}' is meaningful in a 'Makefile', then 'automake' will be
+     able to use '${file}' to generate all of these rules.  For
+     instance, here is how the Automake package itself generates
+     versioned scripts for its test suite:
+
+          AC_SUBST([APIVERSION], ...)
+          ...
+          AC_CONFIG_FILES(
+            [tests/aclocal-${APIVERSION}:tests/aclocal.in],
+            [chmod +x tests/aclocal-${APIVERSION}],
+            [APIVERSION=$APIVERSION])
+          AC_CONFIG_FILES(
+            [tests/automake-${APIVERSION}:tests/automake.in],
+            [chmod +x tests/automake-${APIVERSION}])
+
+     Here cleaning, distributing, and rebuilding are done automatically,
+     because '${APIVERSION}' is known at 'make'-time.
+
+     Note that you should not use shell variables to declare 'Makefile'
+     files for which 'automake' must create 'Makefile.in'.  Even
+     'AC_SUBST' does not help here, because 'automake' needs to know the
+     file name when it runs in order to check whether 'Makefile.am'
+     exists.  (In the very hairy case that your setup requires such use
+     of variables, you will have to tell Automake which 'Makefile.in's
+     to generate on the command-line.)
+
+     It is possible to let 'automake' emit conditional rules for
+     'AC_CONFIG_FILES' with the help of 'AM_COND_IF' (*note Optional::).
+
+     To summarize:
+        * Use literals for 'Makefile's, and for other files whenever
+          possible.
+        * Use '$file' (or '${file}' without 'AC_SUBST([file])') for
+          files that 'automake' should ignore.
+        * Use '${file}' and 'AC_SUBST([file])' for files that 'automake'
+          should not ignore.
+
+
+File: automake.info,  Node: Optional,  Next: aclocal Invocation,  Prev: Requirements,  Up: configure
+
+6.2 Other things Automake recognizes
+====================================
+
+Every time Automake is run it calls Autoconf to trace 'configure.ac'.
+This way it can recognize the use of certain macros and tailor the
+generated 'Makefile.in' appropriately.  Currently recognized macros and
+their effects are:
+
+'AC_CANONICAL_BUILD'
+'AC_CANONICAL_HOST'
+'AC_CANONICAL_TARGET'
+     Automake will ensure that 'config.guess' and 'config.sub' exist.
+     Also, the 'Makefile' variables 'build_triplet', 'host_triplet' and
+     'target_triplet' are introduced.  See *note Getting the Canonical
+     System Type: (autoconf)Canonicalizing.
+
+'AC_CONFIG_AUX_DIR'
+     Automake will look for various helper scripts, such as
+     'install-sh', in the directory named in this macro invocation.
+     (The full list of scripts is: 'ar-lib', 'config.guess',
+     'config.sub', 'depcomp', 'compile', 'install-sh', 'ltmain.sh',
+     'mdate-sh', 'missing', 'mkinstalldirs', 'py-compile',
+     'test-driver', 'texinfo.tex', 'ylwrap'.)  Not all scripts are
+     always searched for; some scripts will only be sought if the
+     generated 'Makefile.in' requires them.
+
+     If 'AC_CONFIG_AUX_DIR' is not given, the scripts are looked for in
+     their standard locations.  For 'mdate-sh', 'texinfo.tex', and
+     'ylwrap', the standard location is the source directory
+     corresponding to the current 'Makefile.am'.  For the rest, the
+     standard location is the first one of '.', '..', or '../..'
+     (relative to the top source directory) that provides any one of the
+     helper scripts.  *Note Finding 'configure' Input: (autoconf)Input.
+
+     Required files from 'AC_CONFIG_AUX_DIR' are automatically
+     distributed, even if there is no 'Makefile.am' in this directory.
+
+'AC_CONFIG_LIBOBJ_DIR'
+     Automake will require the sources file declared with 'AC_LIBSOURCE'
+     (see below) in the directory specified by this macro.
+
+'AC_CONFIG_HEADERS'
+     Automake will generate rules to rebuild these headers from the
+     corresponding templates (usually, the template for a 'foo.h' header
+     being 'foo.h.in').  Older versions of Automake required the use of
+     'AM_CONFIG_HEADER'; this is no longer the case, and that macro has
+     indeed been removed.
+
+     As with 'AC_CONFIG_FILES' (*note Requirements::), parts of the
+     specification using shell variables will be ignored as far as
+     cleaning, distributing, and rebuilding is concerned.
+
+'AC_CONFIG_LINKS'
+     Automake will generate rules to remove 'configure' generated links
+     on 'make distclean' and to distribute named source files as part of
+     'make dist'.
+
+     As for 'AC_CONFIG_FILES' (*note Requirements::), parts of the
+     specification using shell variables will be ignored as far as
+     cleaning and distributing is concerned.  (There are no rebuild
+     rules for links.)
+
+'AC_LIBOBJ'
+'AC_LIBSOURCE'
+'AC_LIBSOURCES'
+     Automake will automatically distribute any file listed in
+     'AC_LIBSOURCE' or 'AC_LIBSOURCES'.
+
+     Note that the 'AC_LIBOBJ' macro calls 'AC_LIBSOURCE'.  So if an
+     Autoconf macro is documented to call 'AC_LIBOBJ([file])', then
+     'file.c' will be distributed automatically by Automake.  This
+     encompasses many macros like 'AC_FUNC_ALLOCA', 'AC_FUNC_MEMCMP',
+     'AC_REPLACE_FUNCS', and others.
+
+     By the way, direct assignments to 'LIBOBJS' are no longer
+     supported.  You should always use 'AC_LIBOBJ' for this purpose.
+     *Note 'AC_LIBOBJ' vs. 'LIBOBJS': (autoconf)AC_LIBOBJ vs LIBOBJS.
+
+'AC_PROG_RANLIB'
+     This is required if any libraries are built in the package.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_PROG_CXX'
+     This is required if any C++ source is included.  *Note Particular
+     Program Checks: (autoconf)Particular Programs.
+
+'AC_PROG_OBJC'
+     This is required if any Objective C source is included.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_PROG_OBJCXX'
+     This is required if any Objective C++ source is included.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_PROG_F77'
+     This is required if any Fortran 77 source is included.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_F77_LIBRARY_LDFLAGS'
+     This is required for programs and shared libraries that are a
+     mixture of languages that include Fortran 77 (*note Mixing Fortran
+     77 With C and C++::).  *Note Autoconf macros supplied with
+     Automake: Macros.
+
+'AC_FC_SRCEXT'
+     Automake will add the flags computed by 'AC_FC_SRCEXT' to
+     compilation of files with the respective source extension (*note
+     Fortran Compiler Characteristics: (autoconf)Fortran Compiler.).
+
+'AC_PROG_FC'
+     This is required if any Fortran 90/95 source is included.  This
+     macro is distributed with Autoconf version 2.58 and later.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_PROG_LIBTOOL'
+     Automake will turn on processing for 'libtool' (*note Introduction:
+     (libtool)Top.).
+
+'AC_PROG_YACC'
+     If a Yacc source file is seen, then you must either use this macro
+     or define the variable 'YACC' in 'configure.ac'.  The former is
+     preferred (*note Particular Program Checks: (autoconf)Particular
+     Programs.).
+
+'AC_PROG_LEX'
+     If a Lex source file is seen, then this macro must be used.  *Note
+     Particular Program Checks: (autoconf)Particular Programs.
+
+'AC_REQUIRE_AUX_FILE'
+     For each 'AC_REQUIRE_AUX_FILE([FILE])', 'automake' will ensure that
+     'FILE' exists in the aux directory, and will complain otherwise.
+     It will also automatically distribute the file.  This macro should
+     be used by third-party Autoconf macros that require some supporting
+     files in the aux directory specified with 'AC_CONFIG_AUX_DIR'
+     above.  *Note Finding 'configure' Input: (autoconf)Input.
+
+'AC_SUBST'
+     The first argument is automatically defined as a variable in each
+     generated 'Makefile.in', unless 'AM_SUBST_NOTMAKE' is also used for
+     this variable.  *Note Setting Output Variables: (autoconf)Setting
+     Output Variables.
+
+     For every substituted variable VAR, 'automake' will add a line 'VAR
+     = VALUE' to each 'Makefile.in' file.  Many Autoconf macros invoke
+     'AC_SUBST' to set output variables this way, e.g., 'AC_PATH_XTRA'
+     defines 'X_CFLAGS' and 'X_LIBS'.  Thus, you can access these
+     variables as '$(X_CFLAGS)' and '$(X_LIBS)' in any 'Makefile.am' if
+     'AC_PATH_XTRA' is called.
+
+'AM_CONDITIONAL'
+     This introduces an Automake conditional (*note Conditionals::).
+
+'AM_COND_IF'
+     This macro allows 'automake' to detect subsequent access within
+     'configure.ac' to a conditional previously introduced with
+     'AM_CONDITIONAL', thus enabling conditional 'AC_CONFIG_FILES'
+     (*note Usage of Conditionals::).
+
+'AM_GNU_GETTEXT'
+     This macro is required for packages that use GNU gettext (*note
+     gettext::).  It is distributed with gettext.  If Automake sees this
+     macro it ensures that the package meets some of gettext's
+     requirements.
+
+'AM_GNU_GETTEXT_INTL_SUBDIR'
+     This macro specifies that the 'intl/' subdirectory is to be built,
+     even if the 'AM_GNU_GETTEXT' macro was invoked with a first
+     argument of 'external'.
+
+'AM_MAINTAINER_MODE([DEFAULT-MODE])'
+     This macro adds an '--enable-maintainer-mode' option to
+     'configure'.  If this is used, 'automake' will cause
+     "maintainer-only" rules to be turned off by default in the
+     generated 'Makefile.in's, unless DEFAULT-MODE is 'enable'.  This
+     macro defines the 'MAINTAINER_MODE' conditional, which you can use
+     in your own 'Makefile.am'.  *Note maintainer-mode::.
+
+'AM_SUBST_NOTMAKE(VAR)'
+     Prevent Automake from defining a variable VAR, even if it is
+     substituted by 'config.status'.  Normally, Automake defines a
+     'make' variable for each 'configure' substitution, i.e., for each
+     'AC_SUBST([VAR])'.  This macro prevents that definition from
+     Automake.  If 'AC_SUBST' has not been called for this variable,
+     then 'AM_SUBST_NOTMAKE' has no effects.  Preventing variable
+     definitions may be useful for substitution of multi-line values,
+     where 'VAR = @VALUE@' might yield unintended results.
+
+'m4_include'
+     Files included by 'configure.ac' using this macro will be detected
+     by Automake and automatically distributed.  They will also appear
+     as dependencies in 'Makefile' rules.
+
+     'm4_include' is seldom used by 'configure.ac' authors, but can
+     appear in 'aclocal.m4' when 'aclocal' detects that some required
+     macros come from files local to your package (as opposed to macros
+     installed in a system-wide directory, *note aclocal Invocation::).
+
+
+File: automake.info,  Node: aclocal Invocation,  Next: Macros,  Prev: Optional,  Up: configure
+
+6.3 Auto-generating aclocal.m4
+==============================
+
+Automake includes a number of Autoconf macros that can be used in your
+package (*note Macros::); some of them are actually required by Automake
+in certain situations.  These macros must be defined in your
+'aclocal.m4'; otherwise they will not be seen by 'autoconf'.
+
+   The 'aclocal' program will automatically generate 'aclocal.m4' files
+based on the contents of 'configure.ac'.  This provides a convenient way
+to get Automake-provided macros, without having to search around.  The
+'aclocal' mechanism allows other packages to supply their own macros
+(*note Extending aclocal::).  You can also use it to maintain your own
+set of custom macros (*note Local Macros::).
+
+   At startup, 'aclocal' scans all the '.m4' files it can find, looking
+for macro definitions (*note Macro Search Path::).  Then it scans
+'configure.ac'.  Any mention of one of the macros found in the first
+step causes that macro, and any macros it in turn requires, to be put
+into 'aclocal.m4'.
+
+   _Putting_ the file that contains the macro definition into
+'aclocal.m4' is usually done by copying the entire text of this file,
+including unused macro definitions as well as both '#' and 'dnl'
+comments.  If you want to make a comment that will be completely ignored
+by 'aclocal', use '##' as the comment leader.
+
+   When a file selected by 'aclocal' is located in a subdirectory
+specified as a relative search path with 'aclocal''s '-I' argument,
+'aclocal' assumes the file belongs to the package and uses 'm4_include'
+instead of copying it into 'aclocal.m4'.  This makes the package
+smaller, eases dependency tracking, and cause the file to be distributed
+automatically.  (*Note Local Macros::, for an example.)  Any macro that
+is found in a system-wide directory, or via an absolute search path will
+be copied.  So use '-I `pwd`/reldir' instead of '-I reldir' whenever
+some relative directory should be considered outside the package.
+
+   The contents of 'acinclude.m4', if this file exists, are also
+automatically included in 'aclocal.m4'.  We recommend against using
+'acinclude.m4' in new packages (*note Local Macros::).
+
+   While computing 'aclocal.m4', 'aclocal' runs 'autom4te' (*note Using
+'Autom4te': (autoconf)Using autom4te.) in order to trace the macros that
+are really used, and omit from 'aclocal.m4' all macros that are
+mentioned but otherwise unexpanded (this can happen when a macro is
+called conditionally).  'autom4te' is expected to be in the 'PATH', just
+as 'autoconf'.  Its location can be overridden using the 'AUTOM4TE'
+environment variable.
+
+* Menu:
+
+* aclocal Options::             Options supported by aclocal
+* Macro Search Path::           How aclocal finds .m4 files
+* Extending aclocal::           Writing your own aclocal macros
+* Local Macros::                Organizing local macros
+* Serials::                     Serial lines in Autoconf macros
+* Future of aclocal::           aclocal's scheduled death
+
+
+File: automake.info,  Node: aclocal Options,  Next: Macro Search Path,  Up: aclocal Invocation
+
+6.3.1 aclocal Options
+---------------------
+
+'aclocal' accepts the following options:
+
+'--automake-acdir=DIR'
+     Look for the automake-provided macro files in DIR instead of in the
+     installation directory.  This is typically used for debugging.
+
+'--system-acdir=DIR'
+     Look for the system-wide third-party macro files (and the special
+     'dirlist' file) in DIR instead of in the installation directory.
+     This is typically used for debugging.
+
+'--diff[=COMMAND]'
+     Run COMMAND on M4 file that would be installed or overwritten by
+     '--install'.  The default COMMAND is 'diff -u'.  This option
+     implies '--install' and '--dry-run'.
+
+'--dry-run'
+     Do not actually overwrite (or create) 'aclocal.m4' and M4 files
+     installed by '--install'.
+
+'--help'
+     Print a summary of the command line options and exit.
+
+'-I DIR'
+     Add the directory DIR to the list of directories searched for '.m4'
+     files.
+
+'--install'
+     Install system-wide third-party macros into the first directory
+     specified with '-I DIR' instead of copying them in the output file.
+     Note that this will happen also if DIR is an absolute path.
+
+     When this option is used, and only when this option is used,
+     'aclocal' will also honor '#serial NUMBER' lines that appear in
+     macros: an M4 file is ignored if there exists another M4 file with
+     the same basename and a greater serial number in the search path
+     (*note Serials::).
+
+'--force'
+     Always overwrite the output file.  The default is to overwrite the
+     output file only when really needed, i.e., when its contents
+     changes or if one of its dependencies is younger.
+
+     This option forces the update of 'aclocal.m4' (or the file
+     specified with '--output' below) and only this file, it has
+     absolutely no influence on files that may need to be installed by
+     '--install'.
+
+'--output=FILE'
+     Cause the output to be put into FILE instead of 'aclocal.m4'.
+
+'--print-ac-dir'
+     Prints the name of the directory that 'aclocal' will search to find
+     third-party '.m4' files.  When this option is given, normal
+     processing is suppressed.  This option was used _in the past_ by
+     third-party packages to determine where to install '.m4' macro
+     files, but _this usage is today discouraged_, since it causes
+     '$(prefix)' not to be thoroughly honoured (which violates the GNU
+     Coding Standards), and a similar semantics can be better obtained
+     with the 'ACLOCAL_PATH' environment variable; *note Extending
+     aclocal::.
+
+'--verbose'
+     Print the names of the files it examines.
+
+'--version'
+     Print the version number of Automake and exit.
+
+'-W CATEGORY'
+'--warnings=CATEGORY'
+     Output warnings falling in CATEGORY.  CATEGORY can be one of:
+     'syntax'
+          dubious syntactic constructs, underquoted macros, unused
+          macros, etc.
+     'unsupported'
+          unknown macros
+     'all'
+          all the warnings, this is the default
+     'none'
+          turn off all the warnings
+     'error'
+          treat warnings as errors
+
+     All warnings are output by default.
+
+     The environment variable 'WARNINGS' is honored in the same way as
+     it is for 'automake' (*note automake Invocation::).
+
+
+File: automake.info,  Node: Macro Search Path,  Next: Extending aclocal,  Prev: aclocal Options,  Up: aclocal Invocation
+
+6.3.2 Macro Search Path
+-----------------------
+
+By default, 'aclocal' searches for '.m4' files in the following
+directories, in this order:
+
+'ACDIR-APIVERSION'
+     This is where the '.m4' macros distributed with Automake itself are
+     stored.  APIVERSION depends on the Automake release used; for
+     example, for Automake 1.11.x, APIVERSION = '1.11'.
+
+'ACDIR'
+     This directory is intended for third party '.m4' files, and is
+     configured when 'automake' itself is built.  This is
+     '@datadir@/aclocal/', which typically expands to
+     '${prefix}/share/aclocal/'.  To find the compiled-in value of
+     ACDIR, use the '--print-ac-dir' option (*note aclocal Options::).
+
+   As an example, suppose that 'automake-1.11.2' was configured with
+'--prefix=/usr/local'.  Then, the search path would be:
+
+  1. '/usr/local/share/aclocal-1.11.2/'
+  2. '/usr/local/share/aclocal/'
+
+   The paths for the ACDIR and ACDIR-APIVERSION directories can be
+changed respectively through aclocal options '--system-acdir' and
+'--automake-acdir' (*note aclocal Options::).  Note however that these
+options are only intended for use by the internal Automake test suite,
+or for debugging under highly unusual situations; they are not
+ordinarily needed by end-users.
+
+   As explained in (*note aclocal Options::), there are several options
+that can be used to change or extend this search path.
+
+Modifying the Macro Search Path: '-I DIR'
+.........................................
+
+Any extra directories specified using '-I' options (*note aclocal
+Options::) are _prepended_ to this search list.  Thus, 'aclocal -I /foo
+-I /bar' results in the following search path:
+
+  1. '/foo'
+  2. '/bar'
+  3. ACDIR-APIVERSION
+  4. ACDIR
+
+Modifying the Macro Search Path: 'dirlist'
+..........................................
+
+There is a third mechanism for customizing the search path.  If a
+'dirlist' file exists in ACDIR, then that file is assumed to contain a
+list of directory patterns, one per line.  'aclocal' expands these
+patterns to directory names, and adds them to the search list _after_
+all other directories.  'dirlist' entries may use shell wildcards such
+as '*', '?', or '[...]'.
+
+   For example, suppose 'ACDIR/dirlist' contains the following:
+
+     /test1
+     /test2
+     /test3*
+
+and that 'aclocal' was called with the '-I /foo -I /bar' options.  Then,
+the search path would be
+
+  1. '/foo'
+  2. '/bar'
+  3. ACDIR-APIVERSION
+  4. ACDIR
+  5. '/test1'
+  6. '/test2'
+
+and all directories with path names starting with '/test3'.
+
+   If the '--system-acdir=DIR' option is used, then 'aclocal' will
+search for the 'dirlist' file in DIR; but remember the warnings above
+against the use of '--system-acdir'.
+
+   'dirlist' is useful in the following situation: suppose that
+'automake' version '1.11.2' is installed with '--prefix=/usr' by the
+system vendor.  Thus, the default search directories are
+
+  1. '/usr/share/aclocal-1.11/'
+  2. '/usr/share/aclocal/'
+
+   However, suppose further that many packages have been manually
+installed on the system, with $prefix=/usr/local, as is typical.  In
+that case, many of these "extra" '.m4' files are in
+'/usr/local/share/aclocal'.  The only way to force '/usr/bin/aclocal' to
+find these "extra" '.m4' files is to always call 'aclocal -I
+/usr/local/share/aclocal'.  This is inconvenient.  With 'dirlist', one
+may create a file '/usr/share/aclocal/dirlist' containing only the
+single line
+
+     /usr/local/share/aclocal
+
+   Now, the "default" search path on the affected system is
+
+  1. '/usr/share/aclocal-1.11/'
+  2. '/usr/share/aclocal/'
+  3. '/usr/local/share/aclocal/'
+
+   without the need for '-I' options; '-I' options can be reserved for
+project-specific needs ('my-source-dir/m4/'), rather than using it to
+work around local system-dependent tool installation directories.
+
+   Similarly, 'dirlist' can be handy if you have installed a local copy
+of Automake in your account and want 'aclocal' to look for macros
+installed at other places on the system.
+
+Modifying the Macro Search Path: 'ACLOCAL_PATH'
+...............................................
+
+The fourth and last mechanism to customize the macro search path is also
+the simplest.  Any directory included in the colon-separated environment
+variable 'ACLOCAL_PATH' is added to the search path and takes precedence
+over system directories (including those found via 'dirlist'), with the
+exception of the versioned directory ACDIR-APIVERSION (*note Macro
+Search Path::).  However, directories passed via '-I' will take
+precedence over directories in 'ACLOCAL_PATH'.
+
+   Also note that, if the '--install' option is used, any '.m4' file
+containing a required macro that is found in a directory listed in
+'ACLOCAL_PATH' will be installed locally.  In this case, serial numbers
+in '.m4' are honoured too, *note Serials::.
+
+   Conversely to 'dirlist', 'ACLOCAL_PATH' is useful if you are using a
+global copy of Automake and want 'aclocal' to look for macros somewhere
+under your home directory.
+
+Planned future incompatibilities
+................................
+
+The order in which the directories in the macro search path are
+currently looked up is confusing and/or suboptimal in various aspects,
+and is probably going to be changed in the future Automake release.  In
+particular, directories in 'ACLOCAL_PATH' and 'ACDIR' might end up
+taking precedence over 'ACDIR-APIVERSION', and directories in
+'ACDIR/dirlist' might end up taking precedence over 'ACDIR'.  _This is a
+possible future incompatibility!_
+
+
+File: automake.info,  Node: Extending aclocal,  Next: Local Macros,  Prev: Macro Search Path,  Up: aclocal Invocation
+
+6.3.3 Writing your own aclocal macros
+-------------------------------------
+
+The 'aclocal' program doesn't have any built-in knowledge of any macros,
+so it is easy to extend it with your own macros.
+
+   This can be used by libraries that want to supply their own Autoconf
+macros for use by other programs.  For instance, the 'gettext' library
+supplies a macro 'AM_GNU_GETTEXT' that should be used by any package
+using 'gettext'.  When the library is installed, it installs this macro
+so that 'aclocal' will find it.
+
+   A macro file's name should end in '.m4'.  Such files should be
+installed in '$(datadir)/aclocal'.  This is as simple as writing:
+
+     aclocaldir = $(datadir)/aclocal
+     aclocal_DATA = mymacro.m4 myothermacro.m4
+
+Please do use '$(datadir)/aclocal', and not something based on the
+result of 'aclocal --print-ac-dir' (*note Hard-Coded Install Paths::,
+for arguments).  It might also be helpful to suggest to the user to add
+the '$(datadir)/aclocal' directory to his 'ACLOCAL_PATH' variable (*note
+ACLOCAL_PATH::) so that 'aclocal' will find the '.m4' files installed by
+your package automatically.
+
+   A file of macros should be a series of properly quoted 'AC_DEFUN''s
+(*note (autoconf)Macro Definitions::).  The 'aclocal' programs also
+understands 'AC_REQUIRE' (*note (autoconf)Prerequisite Macros::), so it
+is safe to put each macro in a separate file.  Each file should have no
+side effects but macro definitions.  Especially, any call to 'AC_PREREQ'
+should be done inside the defined macro, not at the beginning of the
+file.
+
+   Starting with Automake 1.8, 'aclocal' will warn about all underquoted
+calls to 'AC_DEFUN'.  We realize this will annoy a lot of people,
+because 'aclocal' was not so strict in the past and many third party
+macros are underquoted; and we have to apologize for this temporary
+inconvenience.  The reason we have to be stricter is that a future
+implementation of 'aclocal' (*note Future of aclocal::) will have to
+temporarily include all of these third party '.m4' files, maybe several
+times, including even files that are not actually needed.  Doing so
+should alleviate many problems of the current implementation, however it
+requires a stricter style from the macro authors.  Hopefully it is easy
+to revise the existing macros.  For instance,
+
+     # bad style
+     AC_PREREQ(2.68)
+     AC_DEFUN(AX_FOOBAR,
+     [AC_REQUIRE([AX_SOMETHING])dnl
+     AX_FOO
+     AX_BAR
+     ])
+
+should be rewritten as
+
+     AC_DEFUN([AX_FOOBAR],
+     [AC_PREREQ([2.68])dnl
+     AC_REQUIRE([AX_SOMETHING])dnl
+     AX_FOO
+     AX_BAR
+     ])
+
+   Wrapping the 'AC_PREREQ' call inside the macro ensures that Autoconf
+2.68 will not be required if 'AX_FOOBAR' is not actually used.  Most
+importantly, quoting the first argument of 'AC_DEFUN' allows the macro
+to be redefined or included twice (otherwise this first argument would
+be expanded during the second definition).  For consistency we like to
+quote even arguments such as '2.68' that do not require it.
+
+   If you have been directed here by the 'aclocal' diagnostic but are
+not the maintainer of the implicated macro, you will want to contact the
+maintainer of that macro.  Please make sure you have the latest version
+of the macro and that the problem hasn't already been reported before
+doing so: people tend to work faster when they aren't flooded by mails.
+
+   Another situation where 'aclocal' is commonly used is to manage
+macros that are used locally by the package, *note Local Macros::.
+
+
+File: automake.info,  Node: Local Macros,  Next: Serials,  Prev: Extending aclocal,  Up: aclocal Invocation
+
+6.3.4 Handling Local Macros
+---------------------------
+
+Feature tests offered by Autoconf do not cover all needs.  People often
+have to supplement existing tests with their own macros, or with
+third-party macros.
+
+   There are two ways to organize custom macros in a package.
+
+   The first possibility (the historical practice) is to list all your
+macros in 'acinclude.m4'.  This file will be included in 'aclocal.m4'
+when you run 'aclocal', and its macro(s) will henceforth be visible to
+'autoconf'.  However if it contains numerous macros, it will rapidly
+become difficult to maintain, and it will be almost impossible to share
+macros between packages.
+
+   The second possibility, which we do recommend, is to write each macro
+in its own file and gather all these files in a directory.  This
+directory is usually called 'm4/'.  Then it's enough to update
+'configure.ac' by adding a proper call to 'AC_CONFIG_MACRO_DIRS':
+
+     AC_CONFIG_MACRO_DIRS([m4])
+
+   'aclocal' will then take care of automatically adding 'm4/' to its
+search path for m4 files.
+
+   When 'aclocal' is run, it will build an 'aclocal.m4' that
+'m4_include's any file from 'm4/' that defines a required macro.  Macros
+not found locally will still be searched in system-wide directories, as
+explained in *note Macro Search Path::.
+
+   Custom macros should be distributed for the same reason that
+'configure.ac' is: so that other people have all the sources of your
+package if they want to work on it.  Actually, this distribution happens
+automatically because all 'm4_include'd files are distributed.
+
+   However there is no consensus on the distribution of third-party
+macros that your package may use.  Many libraries install their own
+macro in the system-wide 'aclocal' directory (*note Extending
+aclocal::).  For instance, Guile ships with a file called 'guile.m4'
+that contains the macro 'GUILE_FLAGS' that can be used to define setup
+compiler and linker flags appropriate for using Guile.  Using
+'GUILE_FLAGS' in 'configure.ac' will cause 'aclocal' to copy 'guile.m4'
+into 'aclocal.m4', but as 'guile.m4' is not part of the project, it will
+not be distributed.  Technically, that means a user who needs to rebuild
+'aclocal.m4' will have to install Guile first.  This is probably OK, if
+Guile already is a requirement to build the package.  However, if Guile
+is only an optional feature, or if your package might run on
+architectures where Guile cannot be installed, this requirement will
+hinder development.  An easy solution is to copy such third-party macros
+in your local 'm4/' directory so they get distributed.
+
+   Since Automake 1.10, 'aclocal' offers the option '--install' to copy
+these system-wide third-party macros in your local macro directory,
+helping to solve the above problem.
+
+   With this setup, system-wide macros will be copied to 'm4/' the first
+time you run 'aclocal'.  Then the locally installed macros will have
+precedence over the system-wide installed macros each time 'aclocal' is
+run again.
+
+   One reason why you should keep '--install' in the flags even after
+the first run is that when you later edit 'configure.ac' and depend on a
+new macro, this macro will be installed in your 'm4/' automatically.
+Another one is that serial numbers (*note Serials::) can be used to
+update the macros in your source tree automatically when new system-wide
+versions are installed.  A serial number should be a single line of the
+form
+
+     #serial NNN
+
+where NNN contains only digits and dots.  It should appear in the M4
+file before any macro definition.  It is a good practice to maintain a
+serial number for each macro you distribute, even if you do not use the
+'--install' option of 'aclocal': this allows other people to use it.
+
+
+File: automake.info,  Node: Serials,  Next: Future of aclocal,  Prev: Local Macros,  Up: aclocal Invocation
+
+6.3.5 Serial Numbers
+--------------------
+
+Because third-party macros defined in '*.m4' files are naturally shared
+between multiple projects, some people like to version them.  This makes
+it easier to tell which of two M4 files is newer.  Since at least 1996,
+the tradition is to use a '#serial' line for this.
+
+   A serial number should be a single line of the form
+
+     # serial VERSION
+
+where VERSION is a version number containing only digits and dots.
+Usually people use a single integer, and they increment it each time
+they change the macro (hence the name of "serial").  Such a line should
+appear in the M4 file before any macro definition.
+
+   The '#' must be the first character on the line, and it is OK to have
+extra words after the version, as in
+
+     #serial VERSION GARBAGE
+
+   Normally these serial numbers are completely ignored by 'aclocal' and
+'autoconf', like any genuine comment.  However when using 'aclocal''s
+'--install' feature, these serial numbers will modify the way 'aclocal'
+selects the macros to install in the package: if two files with the same
+basename exist in your search path, and if at least one of them uses a
+'#serial' line, 'aclocal' will ignore the file that has the older
+'#serial' line (or the file that has none).
+
+   Note that a serial number applies to a whole M4 file, not to any
+macro it contains.  A file can contains multiple macros, but only one
+serial.
+
+   Here is a use case that illustrates the use of '--install' and its
+interaction with serial numbers.  Let's assume we maintain a package
+called MyPackage, the 'configure.ac' of which requires a third-party
+macro 'AX_THIRD_PARTY' defined in '/usr/share/aclocal/thirdparty.m4' as
+follows:
+
+     # serial 1
+     AC_DEFUN([AX_THIRD_PARTY], [...])
+
+   MyPackage uses an 'm4/' directory to store local macros as explained
+in *note Local Macros::, and has
+
+     AC_CONFIG_MACRO_DIRS([m4])
+
+in its 'configure.ac'.
+
+   Initially the 'm4/' directory is empty.  The first time we run
+'aclocal --install', it will notice that
+
+   * 'configure.ac' uses 'AX_THIRD_PARTY'
+   * No local macros define 'AX_THIRD_PARTY'
+   * '/usr/share/aclocal/thirdparty.m4' defines 'AX_THIRD_PARTY' with
+     serial 1.
+
+Because '/usr/share/aclocal/thirdparty.m4' is a system-wide macro and
+'aclocal' was given the '--install' option, it will copy this file in
+'m4/thirdparty.m4', and output an 'aclocal.m4' that contains
+'m4_include([m4/thirdparty.m4])'.
+
+   The next time 'aclocal --install' is run, something different
+happens.  'aclocal' notices that
+
+   * 'configure.ac' uses 'AX_THIRD_PARTY'
+   * 'm4/thirdparty.m4' defines 'AX_THIRD_PARTY' with serial 1.
+   * '/usr/share/aclocal/thirdparty.m4' defines 'AX_THIRD_PARTY' with
+     serial 1.
+
+Because both files have the same serial number, 'aclocal' uses the first
+it found in its search path order (*note Macro Search Path::).
+'aclocal' therefore ignores '/usr/share/aclocal/thirdparty.m4' and
+outputs an 'aclocal.m4' that contains 'm4_include([m4/thirdparty.m4])'.
+
+   Local directories specified with '-I' are always searched before
+system-wide directories, so a local file will always be preferred to the
+system-wide file in case of equal serial numbers.
+
+   Now suppose the system-wide third-party macro is changed.  This can
+happen if the package installing this macro is updated.  Let's suppose
+the new macro has serial number 2.  The next time 'aclocal --install' is
+run the situation is the following:
+
+   * 'configure.ac' uses 'AX_THIRD_PARTY'
+   * 'm4/thirdparty.m4' defines 'AX_THIRD_PARTY' with serial 1.
+   * '/usr/share/aclocal/thirdparty.m4' defines 'AX_THIRD_PARTY' with
+     serial 2.
+
+When 'aclocal' sees a greater serial number, it immediately forgets
+anything it knows from files that have the same basename and a smaller
+serial number.  So after it has found '/usr/share/aclocal/thirdparty.m4'
+with serial 2, 'aclocal' will proceed as if it had never seen
+'m4/thirdparty.m4'.  This brings us back to a situation similar to that
+at the beginning of our example, where no local file defined the macro.
+'aclocal' will install the new version of the macro in
+'m4/thirdparty.m4', in this case overriding the old version.  MyPackage
+just had its macro updated as a side effect of running 'aclocal'.
+
+   If you are leery of letting 'aclocal' update your local macro, you
+can run 'aclocal --diff' to review the changes 'aclocal --install' would
+perform on these macros.
+
+   Finally, note that the '--force' option of 'aclocal' has absolutely
+no effect on the files installed by '--install'.  For instance, if you
+have modified your local macros, do not expect '--install --force' to
+replace the local macros by their system-wide versions.  If you want to
+do so, simply erase the local macros you want to revert, and run
+'aclocal --install'.
+
+
+File: automake.info,  Node: Future of aclocal,  Prev: Serials,  Up: aclocal Invocation
+
+6.3.6 The Future of 'aclocal'
+-----------------------------
+
+'aclocal' is expected to disappear.  This feature really should not be
+offered by Automake.  Automake should focus on generating 'Makefile's;
+dealing with M4 macros really is Autoconf's job.  The fact that some
+people install Automake just to use 'aclocal', but do not use 'automake'
+otherwise is an indication of how that feature is misplaced.
+
+   The new implementation will probably be done slightly differently.
+For instance, it could enforce the 'm4/'-style layout discussed in *note
+Local Macros::.
+
+   We have no idea when and how this will happen.  This has been
+discussed several times in the past, but someone still has to commit to
+that non-trivial task.
+
+   From the user point of view, 'aclocal''s removal might turn out to be
+painful.  There is a simple precaution that you may take to make that
+switch more seamless: never call 'aclocal' yourself.  Keep this guy
+under the exclusive control of 'autoreconf' and Automake's rebuild
+rules.  Hopefully you won't need to worry about things breaking, when
+'aclocal' disappears, because everything will have been taken care of.
+If otherwise you used to call 'aclocal' directly yourself or from some
+script, you will quickly notice the change.
+
+   Many packages come with a script called 'bootstrap.sh' or
+'autogen.sh', that will just call 'aclocal', 'libtoolize', 'gettextize'
+or 'autopoint', 'autoconf', 'autoheader', and 'automake' in the right
+order.  Actually this is precisely what 'autoreconf' can do for you.  If
+your package has such a 'bootstrap.sh' or 'autogen.sh' script, consider
+using 'autoreconf'.  That should simplify its logic a lot (less things
+to maintain, yum!), it's even likely you will not need the script
+anymore, and more to the point you will not call 'aclocal' directly
+anymore.
+
+   For the time being, third-party packages should continue to install
+public macros into '/usr/share/aclocal/'.  If 'aclocal' is replaced by
+another tool it might make sense to rename the directory, but supporting
+'/usr/share/aclocal/' for backward compatibility should be really easy
+provided all macros are properly written (*note Extending aclocal::).
+
+
+File: automake.info,  Node: Macros,  Prev: aclocal Invocation,  Up: configure
+
+6.4 Autoconf macros supplied with Automake
+==========================================
+
+Automake ships with several Autoconf macros that you can use from your
+'configure.ac'.  When you use one of them it will be included by
+'aclocal' in 'aclocal.m4'.
+
+* Menu:
+
+* Public Macros::               Macros that you can use.
+* Obsolete Macros::             Macros that will soon be removed.
+* Private Macros::              Macros that you should not use.
+
+
+File: automake.info,  Node: Public Macros,  Next: Obsolete Macros,  Up: Macros
+
+6.4.1 Public Macros
+-------------------
+
+'AM_INIT_AUTOMAKE([OPTIONS])'
+     Runs many macros required for proper operation of the generated
+     Makefiles.
+
+     Today, 'AM_INIT_AUTOMAKE' is called with a single argument: a
+     space-separated list of Automake options that should be applied to
+     every 'Makefile.am' in the tree.  The effect is as if each option
+     were listed in 'AUTOMAKE_OPTIONS' (*note Options::).
+
+     This macro can also be called in another, _deprecated_ form:
+     'AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])'.  In this form,
+     there are two required arguments: the package and the version
+     number.  This usage is mostly obsolete because the PACKAGE and
+     VERSION can be obtained from Autoconf's 'AC_INIT' macro.  However,
+     differently from what happens for 'AC_INIT' invocations, this
+     'AM_INIT_AUTOMAKE' invocation supports shell variables' expansions
+     in the 'PACKAGE' and 'VERSION' arguments, and this can be still be
+     useful in some selected situations.  Our hope is that future
+     Autoconf versions will improve their support for package versions
+     defined dynamically at configure runtime; when (and if) this
+     happens, support for the two-args 'AM_INIT_AUTOMAKE' invocation
+     will likely be removed from Automake.
+
+     If your 'configure.ac' has:
+
+          AC_INIT([src/foo.c])
+          AM_INIT_AUTOMAKE([mumble], [1.5])
+
+     you should modernize it as follows:
+
+          AC_INIT([mumble], [1.5])
+          AC_CONFIG_SRCDIR([src/foo.c])
+          AM_INIT_AUTOMAKE
+
+     Note that if you're upgrading your 'configure.ac' from an earlier
+     version of Automake, it is not always correct to simply move the
+     package and version arguments from 'AM_INIT_AUTOMAKE' directly to
+     'AC_INIT', as in the example above.  The first argument to
+     'AC_INIT' should be the name of your package (e.g., 'GNU
+     Automake'), not the tarball name (e.g., 'automake') that you used
+     to pass to 'AM_INIT_AUTOMAKE'.  Autoconf tries to derive a tarball
+     name from the package name, which should work for most but not all
+     package names.  (If it doesn't work for yours, you can use the
+     four-argument form of 'AC_INIT' to provide the tarball name
+     explicitly).
+
+     By default this macro 'AC_DEFINE''s 'PACKAGE' and 'VERSION'.  This
+     can be avoided by passing the 'no-define' option (*note List of
+     Automake options::):
+          AM_INIT_AUTOMAKE([no-define ...])
+
+'AM_PATH_LISPDIR'
+     Searches for the program 'emacs', and, if found, sets the output
+     variable 'lispdir' to the full path to Emacs' site-lisp directory.
+
+     Note that this test assumes the 'emacs' found to be a version that
+     supports Emacs Lisp (such as GNU Emacs or XEmacs).  Other emacsen
+     can cause this test to hang (some, like old versions of MicroEmacs,
+     start up in interactive mode, requiring 'C-x C-c' to exit, which is
+     hardly obvious for a non-emacs user).  In most cases, however, you
+     should be able to use 'C-c' to kill the test.  In order to avoid
+     problems, you can set 'EMACS' to "no" in the environment, or use
+     the '--with-lispdir' option to 'configure' to explicitly set the
+     correct path (if you're sure you have an 'emacs' that supports
+     Emacs Lisp).
+
+'AM_PROG_AR([ACT-IF-FAIL])'
+     You must use this macro when you use the archiver in your project,
+     if you want support for unusual archivers such as Microsoft 'lib'.
+     The content of the optional argument is executed if the archiver
+     interface is not recognized; the default action is to abort
+     configure with an error message.
+
+'AM_PROG_AS'
+     Use this macro when you have assembly code in your project.  This
+     will choose the assembler for you (by default the C compiler) and
+     set 'CCAS', and will also set 'CCASFLAGS' if required.
+
+'AM_PROG_CC_C_O'
+     This is an obsolescent macro that checks that the C compiler
+     supports the '-c' and '-o' options together.  Note that, since
+     Automake 1.14, the 'AC_PROG_CC' is rewritten to implement such
+     checks itself, and thus the explicit use of 'AM_PROG_CC_C_O' should
+     no longer be required.
+
+'AM_PROG_LEX'
+     Like 'AC_PROG_LEX' (*note Particular Program Checks:
+     (autoconf)Particular Programs.), but uses the 'missing' script on
+     systems that do not have 'lex'.  HP-UX 10 is one such system.
+
+'AM_PROG_GCJ'
+     This macro finds the 'gcj' program or causes an error.  It sets
+     'GCJ' and 'GCJFLAGS'.  'gcj' is the Java front-end to the GNU
+     Compiler Collection.
+
+'AM_PROG_UPC([COMPILER-SEARCH-LIST])'
+     Find a compiler for Unified Parallel C and define the 'UPC'
+     variable.  The default COMPILER-SEARCH-LIST is 'upcc upc'.  This
+     macro will abort 'configure' if no Unified Parallel C compiler is
+     found.
+
+'AM_MISSING_PROG(NAME, PROGRAM)'
+     Find a maintainer tool PROGRAM and define the NAME environment
+     variable with its location.  If PROGRAM is not detected, then NAME
+     will instead invoke the 'missing' script, in order to give useful
+     advice to the user about the missing maintainer tool.  *Note
+     maintainer-mode::, for more information on when the 'missing'
+     script is appropriate.
+
+'AM_SILENT_RULES'
+     Control the machinery for less verbose build output (*note Automake
+     Silent Rules::).
+
+'AM_WITH_DMALLOC'
+     Add support for the Dmalloc package (http://dmalloc.com/).  If the
+     user runs 'configure' with '--with-dmalloc', then define
+     'WITH_DMALLOC' and add '-ldmalloc' to 'LIBS'.
+
+
+File: automake.info,  Node: Obsolete Macros,  Next: Private Macros,  Prev: Public Macros,  Up: Macros
+
+6.4.2 Obsolete Macros
+---------------------
+
+Although using some of the following macros was required in past
+releases, you should not use any of them in new code.  _All these macros
+will be removed in the next major Automake version_; if you are still
+using them, running 'autoupdate' should adjust your 'configure.ac'
+automatically (*note Using 'autoupdate' to Modernize 'configure.ac':
+(autoconf)autoupdate Invocation.).  _Do it NOW!_
+
+'AM_PROG_MKDIR_P'
+
+     From Automake 1.8 to 1.9.6 this macro used to define the output
+     variable 'mkdir_p' to one of 'mkdir -p', 'install-sh -d', or
+     'mkinstalldirs'.
+
+     Nowadays Autoconf provides a similar functionality with
+     'AC_PROG_MKDIR_P' (*note Particular Program Checks:
+     (autoconf)Particular Programs.), however this defines the output
+     variable 'MKDIR_P' instead.  In case you are still using the
+     'AM_PROG_MKDIR_P' macro in your 'configure.ac', or its provided
+     variable '$(mkdir_p)' in your 'Makefile.am', you are advised to
+     switch ASAP to the more modern Autoconf-provided interface instead;
+     both the macro and the variable might be removed in a future major
+     Automake release.
+
+
+File: automake.info,  Node: Private Macros,  Prev: Obsolete Macros,  Up: Macros
+
+6.4.3 Private Macros
+--------------------
+
+The following macros are private macros you should not call directly.
+They are called by the other public macros when appropriate.  Do not
+rely on them, as they might be changed in a future version.  Consider
+them as implementation details; or better, do not consider them at all:
+skip this section!
+
+'_AM_DEPENDENCIES'
+'AM_SET_DEPDIR'
+'AM_DEP_TRACK'
+'AM_OUTPUT_DEPENDENCY_COMMANDS'
+     These macros are used to implement Automake's automatic dependency
+     tracking scheme.  They are called automatically by Automake when
+     required, and there should be no need to invoke them manually.
+
+'AM_MAKE_INCLUDE'
+     This macro is used to discover how the user's 'make' handles
+     'include' statements.  This macro is automatically invoked when
+     needed; there should be no need to invoke it manually.
+
+'AM_PROG_INSTALL_STRIP'
+     This is used to find a version of 'install' that can be used to
+     strip a program at installation time.  This macro is automatically
+     included when required.
+
+'AM_SANITY_CHECK'
+     This checks to make sure that a file created in the build directory
+     is newer than a file in the source directory.  This can fail on
+     systems where the clock is set incorrectly.  This macro is
+     automatically run from 'AM_INIT_AUTOMAKE'.
+
+
+File: automake.info,  Node: Directories,  Next: Programs,  Prev: configure,  Up: Top
+
+7 Directories
+*************
+
+For simple projects that distribute all files in the same directory it
+is enough to have a single 'Makefile.am' that builds everything in
+place.
+
+   In larger projects, it is common to organize files in different
+directories, in a tree.  For example, there could be a directory for the
+program's source, one for the testsuite, and one for the documentation;
+or, for very large projects, there could be one directory per program,
+per library or per module.
+
+   The traditional approach is to build these subdirectories
+recursively, employing _make recursion_: each directory contains its own
+'Makefile', and when 'make' is run from the top-level directory, it
+enters each subdirectory in turn, and invokes there a new 'make'
+instance to build the directory's contents.
+
+   Because this approach is very widespread, Automake offers built-in
+support for it.  However, it is worth nothing that the use of make
+recursion has its own serious issues and drawbacks, and that it's well
+possible to have packages with a multi directory layout that make little
+or no use of such recursion (examples of such packages are GNU Bison and
+GNU Automake itself); see also the *note Alternative:: section below.
+
+* Menu:
+
+* Subdirectories::              Building subdirectories recursively
+* Conditional Subdirectories::  Conditionally not building directories
+* Alternative::                 Subdirectories without recursion
+* Subpackages::                 Nesting packages
+
+
+File: automake.info,  Node: Subdirectories,  Next: Conditional Subdirectories,  Up: Directories
+
+7.1 Recursing subdirectories
+============================
+
+In packages using make recursion, the top level 'Makefile.am' must tell
+Automake which subdirectories are to be built.  This is done via the
+'SUBDIRS' variable.
+
+   The 'SUBDIRS' variable holds a list of subdirectories in which
+building of various sorts can occur.  The rules for many targets (e.g.,
+'all') in the generated 'Makefile' will run commands both locally and in
+all specified subdirectories.  Note that the directories listed in
+'SUBDIRS' are not required to contain 'Makefile.am's; only 'Makefile's
+(after configuration).  This allows inclusion of libraries from packages
+that do not use Automake (such as 'gettext'; see also *note Third-Party
+Makefiles::).
+
+   In packages that use subdirectories, the top-level 'Makefile.am' is
+often very short.  For instance, here is the 'Makefile.am' from the GNU
+Hello distribution:
+
+     EXTRA_DIST = BUGS ChangeLog.O README-alpha
+     SUBDIRS = doc intl po src tests
+
+   When Automake invokes 'make' in a subdirectory, it uses the value of
+the 'MAKE' variable.  It passes the value of the variable 'AM_MAKEFLAGS'
+to the 'make' invocation; this can be set in 'Makefile.am' if there are
+flags you must always pass to 'make'.
+
+   The directories mentioned in 'SUBDIRS' are usually direct children of
+the current directory, each subdirectory containing its own
+'Makefile.am' with a 'SUBDIRS' pointing to deeper subdirectories.
+Automake can be used to construct packages of arbitrary depth this way.
+
+   By default, Automake generates 'Makefiles' that work depth-first in
+postfix order: the subdirectories are built before the current
+directory.  However, it is possible to change this ordering.  You can do
+this by putting '.' into 'SUBDIRS'.  For instance, putting '.' first
+will cause a prefix ordering of directories.
+
+   Using
+
+     SUBDIRS = lib src . test
+
+will cause 'lib/' to be built before 'src/', then the current directory
+will be built, finally the 'test/' directory will be built.  It is
+customary to arrange test directories to be built after everything else
+since they are meant to test what has been constructed.
+
+   In addition to the built-in recursive targets defined by Automake
+('all', 'check', etc.), the developer can also define his own recursive
+targets.  That is done by passing the names of such targets as arguments
+to the m4 macro 'AM_EXTRA_RECURSIVE_TARGETS' in 'configure.ac'.
+Automake generates rules to handle the recursion for such targets; and
+the developer can define real actions for them by defining corresponding
+'-local' targets.
+
+     % cat configure.ac
+     AC_INIT([pkg-name], [1.0]
+     AM_INIT_AUTOMAKE
+     AM_EXTRA_RECURSIVE_TARGETS([foo])
+     AC_CONFIG_FILES([Makefile sub/Makefile sub/src/Makefile])
+     AC_OUTPUT
+     % cat Makefile.am
+     SUBDIRS = sub
+     foo-local:
+             @echo This will be run by "make foo".
+     % cat sub/Makefile.am
+     SUBDIRS = src
+     % cat sub/src/Makefile.am
+     foo-local:
+             @echo This too will be run by a "make foo" issued either in
+             @echo the 'sub/src/' directory, the 'sub/' directory, or the
+             @echo top-level directory.
+
+
+File: automake.info,  Node: Conditional Subdirectories,  Next: Alternative,  Prev: Subdirectories,  Up: Directories
+
+7.2 Conditional Subdirectories
+==============================
+
+It is possible to define the 'SUBDIRS' variable conditionally if, like
+in the case of GNU Inetutils, you want to only build a subset of the
+entire package.
+
+   To illustrate how this works, let's assume we have two directories
+'src/' and 'opt/'.  'src/' should always be built, but we want to decide
+in 'configure' whether 'opt/' will be built or not.  (For this example
+we will assume that 'opt/' should be built when the variable '$want_opt'
+was set to 'yes'.)
+
+   Running 'make' should thus recurse into 'src/' always, and then maybe
+in 'opt/'.
+
+   However 'make dist' should always recurse into both 'src/' and
+'opt/'.  Because 'opt/' should be distributed even if it is not needed
+in the current configuration.  This means 'opt/Makefile' should be
+created _unconditionally_.
+
+   There are two ways to setup a project like this.  You can use
+Automake conditionals (*note Conditionals::) or use Autoconf 'AC_SUBST'
+variables (*note Setting Output Variables: (autoconf)Setting Output
+Variables.).  Using Automake conditionals is the preferred solution.
+Before we illustrate these two possibilities, let's introduce
+'DIST_SUBDIRS'.
+
+* Menu:
+
+* SUBDIRS vs DIST_SUBDIRS::     Two sets of directories
+* Subdirectories with AM_CONDITIONAL::  Specifying conditional subdirectories
+* Subdirectories with AC_SUBST::  Another way for conditional recursion
+* Unconfigured Subdirectories::  Not even creating a 'Makefile'
+
+
+File: automake.info,  Node: SUBDIRS vs DIST_SUBDIRS,  Next: Subdirectories with AM_CONDITIONAL,  Up: Conditional Subdirectories
+
+7.2.1 'SUBDIRS' vs. 'DIST_SUBDIRS'
+----------------------------------
+
+Automake considers two sets of directories, defined by the variables
+'SUBDIRS' and 'DIST_SUBDIRS'.
+
+   'SUBDIRS' contains the subdirectories of the current directory that
+must be built (*note Subdirectories::).  It must be defined manually;
+Automake will never guess a directory is to be built.  As we will see in
+the next two sections, it is possible to define it conditionally so that
+some directory will be omitted from the build.
+
+   'DIST_SUBDIRS' is used in rules that need to recurse in all
+directories, even those that have been conditionally left out of the
+build.  Recall our example where we may not want to build subdirectory
+'opt/', but yet we want to distribute it?  This is where 'DIST_SUBDIRS'
+comes into play: 'opt' may not appear in 'SUBDIRS', but it must appear
+in 'DIST_SUBDIRS'.
+
+   Precisely, 'DIST_SUBDIRS' is used by 'make maintainer-clean', 'make
+distclean' and 'make dist'.  All other recursive rules use 'SUBDIRS'.
+
+   If 'SUBDIRS' is defined conditionally using Automake conditionals,
+Automake will define 'DIST_SUBDIRS' automatically from the possible
+values of 'SUBDIRS' in all conditions.
+
+   If 'SUBDIRS' contains 'AC_SUBST' variables, 'DIST_SUBDIRS' will not
+be defined correctly because Automake does not know the possible values
+of these variables.  In this case 'DIST_SUBDIRS' needs to be defined
+manually.
+
+
+File: automake.info,  Node: Subdirectories with AM_CONDITIONAL,  Next: Subdirectories with AC_SUBST,  Prev: SUBDIRS vs DIST_SUBDIRS,  Up: Conditional Subdirectories
+
+7.2.2 Subdirectories with 'AM_CONDITIONAL'
+------------------------------------------
+
+'configure' should output the 'Makefile' for each directory and define a
+condition into which 'opt/' should be built.
+
+     ...
+     AM_CONDITIONAL([COND_OPT], [test "$want_opt" = yes])
+     AC_CONFIG_FILES([Makefile src/Makefile opt/Makefile])
+     ...
+
+   Then 'SUBDIRS' can be defined in the top-level 'Makefile.am' as
+follows.
+
+     if COND_OPT
+       MAYBE_OPT = opt
+     endif
+     SUBDIRS = src $(MAYBE_OPT)
+
+   As you can see, running 'make' will rightly recurse into 'src/' and
+maybe 'opt/'.
+
+   As you can't see, running 'make dist' will recurse into both 'src/'
+and 'opt/' directories because 'make dist', unlike 'make all', doesn't
+use the 'SUBDIRS' variable.  It uses the 'DIST_SUBDIRS' variable.
+
+   In this case Automake will define 'DIST_SUBDIRS = src opt'
+automatically because it knows that 'MAYBE_OPT' can contain 'opt' in
+some condition.
+
+
+File: automake.info,  Node: Subdirectories with AC_SUBST,  Next: Unconfigured Subdirectories,  Prev: Subdirectories with AM_CONDITIONAL,  Up: Conditional Subdirectories
+
+7.2.3 Subdirectories with 'AC_SUBST'
+------------------------------------
+
+Another possibility is to define 'MAYBE_OPT' from './configure' using
+'AC_SUBST':
+
+     ...
+     if test "$want_opt" = yes; then
+       MAYBE_OPT=opt
+     else
+       MAYBE_OPT=
+     fi
+     AC_SUBST([MAYBE_OPT])
+     AC_CONFIG_FILES([Makefile src/Makefile opt/Makefile])
+     ...
+
+   In this case the top-level 'Makefile.am' should look as follows.
+
+     SUBDIRS = src $(MAYBE_OPT)
+     DIST_SUBDIRS = src opt
+
+   The drawback is that since Automake cannot guess what the possible
+values of 'MAYBE_OPT' are, it is necessary to define 'DIST_SUBDIRS'.
+
+
+File: automake.info,  Node: Unconfigured Subdirectories,  Prev: Subdirectories with AC_SUBST,  Up: Conditional Subdirectories
+
+7.2.4 Unconfigured Subdirectories
+---------------------------------
+
+The semantics of 'DIST_SUBDIRS' are often misunderstood by some users
+that try to _configure and build_ subdirectories conditionally.  Here by
+configuring we mean creating the 'Makefile' (it might also involve
+running a nested 'configure' script: this is a costly operation that
+explains why people want to do it conditionally, but only the 'Makefile'
+is relevant to the discussion).
+
+   The above examples all assume that every 'Makefile' is created, even
+in directories that are not going to be built.  The simple reason is
+that we want 'make dist' to distribute even the directories that are not
+being built (e.g., platform-dependent code), hence 'make dist' must
+recurse into the subdirectory, hence this directory must be configured
+and appear in 'DIST_SUBDIRS'.
+
+   Building packages that do not configure every subdirectory is a
+tricky business, and we do not recommend it to the novice as it is easy
+to produce an incomplete tarball by mistake.  We will not discuss this
+topic in depth here, yet for the adventurous here are a few rules to
+remember.
+
+   * 'SUBDIRS' should always be a subset of 'DIST_SUBDIRS'.
+
+     It makes little sense to have a directory in 'SUBDIRS' that is not
+     in 'DIST_SUBDIRS'.  Think of the former as a way to tell which
+     directories listed in the latter should be built.
+   * Any directory listed in 'DIST_SUBDIRS' and 'SUBDIRS' must be
+     configured.
+
+     I.e., the 'Makefile' must exists or the recursive 'make' rules will
+     not be able to process the directory.
+   * Any configured directory must be listed in 'DIST_SUBDIRS'.
+
+     So that the cleaning rules remove the generated 'Makefile's.  It
+     would be correct to see 'DIST_SUBDIRS' as a variable that lists all
+     the directories that have been configured.
+
+   In order to prevent recursion in some unconfigured directory you must
+therefore ensure that this directory does not appear in 'DIST_SUBDIRS'
+(and 'SUBDIRS').  For instance, if you define 'SUBDIRS' conditionally
+using 'AC_SUBST' and do not define 'DIST_SUBDIRS' explicitly, it will be
+default to '$(SUBDIRS)'; another possibility is to force 'DIST_SUBDIRS =
+$(SUBDIRS)'.
+
+   Of course, directories that are omitted from 'DIST_SUBDIRS' will not
+be distributed unless you make other arrangements for this to happen
+(for instance, always running 'make dist' in a configuration where all
+directories are known to appear in 'DIST_SUBDIRS'; or writing a
+'dist-hook' target to distribute these directories).
+
+   In few packages, unconfigured directories are not even expected to be
+distributed.  Although these packages do not require the aforementioned
+extra arrangements, there is another pitfall.  If the name of a
+directory appears in 'SUBDIRS' or 'DIST_SUBDIRS', 'automake' will make
+sure the directory exists.  Consequently 'automake' cannot be run on
+such a distribution when one directory has been omitted.  One way to
+avoid this check is to use the 'AC_SUBST' method to declare conditional
+directories; since 'automake' does not know the values of 'AC_SUBST'
+variables it cannot ensure the corresponding directory exists.
+
+
+File: automake.info,  Node: Alternative,  Next: Subpackages,  Prev: Conditional Subdirectories,  Up: Directories
+
+7.3 An Alternative Approach to Subdirectories
+=============================================
+
+If you've ever read Peter Miller's excellent paper, Recursive Make
+Considered Harmful (http://miller.emu.id.au/pmiller/books/rmch/), the
+preceding sections on the use of make recursion will probably come as
+unwelcome advice.  For those who haven't read the paper, Miller's main
+thesis is that recursive 'make' invocations are both slow and
+error-prone.
+
+   Automake provides sufficient cross-directory support (1) to enable
+you to write a single 'Makefile.am' for a complex multi-directory
+package.
+
+   By default an installable file specified in a subdirectory will have
+its directory name stripped before installation.  For instance, in this
+example, the header file will be installed as '$(includedir)/stdio.h':
+
+     include_HEADERS = inc/stdio.h
+
+   However, the 'nobase_' prefix can be used to circumvent this path
+stripping.  In this example, the header file will be installed as
+'$(includedir)/sys/types.h':
+
+     nobase_include_HEADERS = sys/types.h
+
+   'nobase_' should be specified first when used in conjunction with
+either 'dist_' or 'nodist_' (*note Fine-grained Distribution Control::).
+For instance:
+
+     nobase_dist_pkgdata_DATA = images/vortex.pgm sounds/whirl.ogg
+
+   Finally, note that a variable using the 'nobase_' prefix can often be
+replaced by several variables, one for each destination directory (*note
+Uniform::).  For instance, the last example could be rewritten as
+follows:
+
+     imagesdir = $(pkgdatadir)/images
+     soundsdir = $(pkgdatadir)/sounds
+     dist_images_DATA = images/vortex.pgm
+     dist_sounds_DATA = sounds/whirl.ogg
+
+This latter syntax makes it possible to change one destination directory
+without changing the layout of the source tree.
+
+   Currently, 'nobase_*_LTLIBRARIES' are the only exception to this
+rule, in that there is no particular installation order guarantee for an
+otherwise equivalent set of variables without 'nobase_' prefix.
+
+   ---------- Footnotes ----------
+
+   (1) We believe.  This work is new and there are probably warts.
+*Note Introduction::, for information on reporting bugs.
+
+
+File: automake.info,  Node: Subpackages,  Prev: Alternative,  Up: Directories
+
+7.4 Nesting Packages
+====================
+
+In the GNU Build System, packages can be nested to arbitrary depth.
+This means that a package can embed other packages with their own
+'configure', 'Makefile's, etc.
+
+   These other packages should just appear as subdirectories of their
+parent package.  They must be listed in 'SUBDIRS' like other ordinary
+directories.  However the subpackage's 'Makefile's should be output by
+its own 'configure' script, not by the parent's 'configure'.  This is
+achieved using the 'AC_CONFIG_SUBDIRS' Autoconf macro (*note
+AC_CONFIG_SUBDIRS: (autoconf)Subdirectories.).
+
+   Here is an example package for an 'arm' program that links with a
+'hand' library that is a nested package in subdirectory 'hand/'.
+
+   'arm''s 'configure.ac':
+
+     AC_INIT([arm], [1.0])
+     AC_CONFIG_AUX_DIR([.])
+     AM_INIT_AUTOMAKE
+     AC_PROG_CC
+     AC_CONFIG_FILES([Makefile])
+     # Call hand's ./configure script recursively.
+     AC_CONFIG_SUBDIRS([hand])
+     AC_OUTPUT
+
+   'arm''s 'Makefile.am':
+
+     # Build the library in the hand subdirectory first.
+     SUBDIRS = hand
+
+     # Include hand's header when compiling this directory.
+     AM_CPPFLAGS = -I$(srcdir)/hand
+
+     bin_PROGRAMS = arm
+     arm_SOURCES = arm.c
+     # link with the hand library.
+     arm_LDADD = hand/libhand.a
+
+   Now here is 'hand''s 'hand/configure.ac':
+
+     AC_INIT([hand], [1.2])
+     AC_CONFIG_AUX_DIR([.])
+     AM_INIT_AUTOMAKE
+     AC_PROG_CC
+     AM_PROG_AR
+     AC_PROG_RANLIB
+     AC_CONFIG_FILES([Makefile])
+     AC_OUTPUT
+
+and its 'hand/Makefile.am':
+
+     lib_LIBRARIES = libhand.a
+     libhand_a_SOURCES = hand.c
+
+   When 'make dist' is run from the top-level directory it will create
+an archive 'arm-1.0.tar.gz' that contains the 'arm' code as well as the
+'hand' subdirectory.  This package can be built and installed like any
+ordinary package, with the usual './configure && make && make install'
+sequence (the 'hand' subpackage will be built and installed by the
+process).
+
+   When 'make dist' is run from the hand directory, it will create a
+self-contained 'hand-1.2.tar.gz' archive.  So although it appears to be
+embedded in another package, it can still be used separately.
+
+   The purpose of the 'AC_CONFIG_AUX_DIR([.])' instruction is to force
+Automake and Autoconf to search for auxiliary scripts in the current
+directory.  For instance, this means that there will be two copies of
+'install-sh': one in the top-level of the 'arm' package, and another one
+in the 'hand/' subdirectory for the 'hand' package.
+
+   The historical default is to search for these auxiliary scripts in
+the parent directory and the grandparent directory.  So if the
+'AC_CONFIG_AUX_DIR([.])' line was removed from 'hand/configure.ac', that
+subpackage would share the auxiliary script of the 'arm' package.  This
+may looks like a gain in size (a few kilobytes), but it is actually a
+loss of modularity as the 'hand' subpackage is no longer self-contained
+('make dist' in the subdirectory will not work anymore).
+
+   Packages that do not use Automake need more work to be integrated
+this way.  *Note Third-Party Makefiles::.
+
+
+File: automake.info,  Node: Programs,  Next: Other Objects,  Prev: Directories,  Up: Top
+
+8 Building Programs and Libraries
+*********************************
+
+A large part of Automake's functionality is dedicated to making it easy
+to build programs and libraries.
+
+* Menu:
+
+* A Program::                   Building a program
+* A Library::                   Building a library
+* A Shared Library::            Building a Libtool library
+* Program and Library Variables::  Variables controlling program and
+                                library builds
+* Default _SOURCES::            Default source files
+* LIBOBJS::                     Special handling for LIBOBJS and ALLOCA
+* Program Variables::           Variables used when building a program
+* Yacc and Lex::                Yacc and Lex support
+* C++ Support::                 Compiling C++ sources
+* Objective C Support::         Compiling Objective C sources
+* Objective C++ Support::       Compiling Objective C++ sources
+* Unified Parallel C Support::  Compiling Unified Parallel C sources
+* Assembly Support::            Compiling assembly sources
+* Fortran 77 Support::          Compiling Fortran 77 sources
+* Fortran 9x Support::          Compiling Fortran 9x sources
+* Java Support with gcj::       Compiling Java sources using gcj
+* Vala Support::                Compiling Vala sources
+* Support for Other Languages::  Compiling other languages
+* Dependencies::                Automatic dependency tracking
+* EXEEXT::                      Support for executable extensions
+
+
+File: automake.info,  Node: A Program,  Next: A Library,  Up: Programs
+
+8.1 Building a program
+======================
+
+In order to build a program, you need to tell Automake which sources are
+part of it, and which libraries it should be linked with.
+
+   This section also covers conditional compilation of sources or
+programs.  Most of the comments about these also apply to libraries
+(*note A Library::) and libtool libraries (*note A Shared Library::).
+
+* Menu:
+
+* Program Sources::             Defining program sources
+* Linking::                     Linking with libraries or extra objects
+* Conditional Sources::         Handling conditional sources
+* Conditional Programs::        Building a program conditionally
+
+
+File: automake.info,  Node: Program Sources,  Next: Linking,  Up: A Program
+
+8.1.1 Defining program sources
+------------------------------
+
+In a directory containing source that gets built into a program (as
+opposed to a library or a script), the 'PROGRAMS' primary is used.
+Programs can be installed in 'bindir', 'sbindir', 'libexecdir',
+'pkglibexecdir', or not at all ('noinst_').  They can also be built only
+for 'make check', in which case the prefix is 'check_'.
+
+   For instance:
+
+     bin_PROGRAMS = hello
+
+   In this simple case, the resulting 'Makefile.in' will contain code to
+generate a program named 'hello'.
+
+   Associated with each program are several assisting variables that are
+named after the program.  These variables are all optional, and have
+reasonable defaults.  Each variable, its use, and default is spelled out
+below; we use the "hello" example throughout.
+
+   The variable 'hello_SOURCES' is used to specify which source files
+get built into an executable:
+
+     hello_SOURCES = hello.c version.c getopt.c getopt1.c getopt.h system.h
+
+   This causes each mentioned '.c' file to be compiled into the
+corresponding '.o'.  Then all are linked to produce 'hello'.
+
+   If 'hello_SOURCES' is not specified, then it defaults to the single
+file 'hello.c' (*note Default _SOURCES::).
+
+   Multiple programs can be built in a single directory.  Multiple
+programs can share a single source file, which must be listed in each
+'_SOURCES' definition.
+
+   Header files listed in a '_SOURCES' definition will be included in
+the distribution but otherwise ignored.  In case it isn't obvious, you
+should not include the header file generated by 'configure' in a
+'_SOURCES' variable; this file should not be distributed.  Lex ('.l')
+and Yacc ('.y') files can also be listed; see *note Yacc and Lex::.
+
+
+File: automake.info,  Node: Linking,  Next: Conditional Sources,  Prev: Program Sources,  Up: A Program
+
+8.1.2 Linking the program
+-------------------------
+
+If you need to link against libraries that are not found by 'configure',
+you can use 'LDADD' to do so.  This variable is used to specify
+additional objects or libraries to link with; it is inappropriate for
+specifying specific linker flags, you should use 'AM_LDFLAGS' for this
+purpose.
+
+   Sometimes, multiple programs are built in one directory but do not
+share the same link-time requirements.  In this case, you can use the
+'PROG_LDADD' variable (where PROG is the name of the program as it
+appears in some '_PROGRAMS' variable, and usually written in lowercase)
+to override 'LDADD'.  If this variable exists for a given program, then
+that program is not linked using 'LDADD'.
+
+   For instance, in GNU cpio, 'pax', 'cpio' and 'mt' are linked against
+the library 'libcpio.a'.  However, 'rmt' is built in the same directory,
+and has no such link requirement.  Also, 'mt' and 'rmt' are only built
+on certain architectures.  Here is what cpio's 'src/Makefile.am' looks
+like (abridged):
+
+     bin_PROGRAMS = cpio pax $(MT)
+     libexec_PROGRAMS = $(RMT)
+     EXTRA_PROGRAMS = mt rmt
+
+     LDADD = ../lib/libcpio.a $(INTLLIBS)
+     rmt_LDADD =
+
+     cpio_SOURCES = ...
+     pax_SOURCES = ...
+     mt_SOURCES = ...
+     rmt_SOURCES = ...
+
+   'PROG_LDADD' is inappropriate for passing program-specific linker
+flags (except for '-l', '-L', '-dlopen' and '-dlpreopen').  So, use the
+'PROG_LDFLAGS' variable for this purpose.
+
+   It is also occasionally useful to have a program depend on some other
+target that is not actually part of that program.  This can be done
+using either the 'PROG_DEPENDENCIES' or the 'EXTRA_PROG_DEPENDENCIES'
+variable.  Each program depends on the contents both variables, but no
+further interpretation is done.
+
+   Since these dependencies are associated to the link rule used to
+create the programs they should normally list files used by the link
+command.  That is '*.$(OBJEXT)', '*.a', or '*.la' files.  In rare cases
+you may need to add other kinds of files such as linker scripts, but
+_listing a source file in '_DEPENDENCIES' is wrong_.  If some source
+file needs to be built before all the components of a program are built,
+consider using the 'BUILT_SOURCES' variable instead (*note Sources::).
+
+   If 'PROG_DEPENDENCIES' is not supplied, it is computed by Automake.
+The automatically-assigned value is the contents of 'PROG_LDADD', with
+most configure substitutions, '-l', '-L', '-dlopen' and '-dlpreopen'
+options removed.  The configure substitutions that are left in are only
+'$(LIBOBJS)' and '$(ALLOCA)'; these are left because it is known that
+they will not cause an invalid value for 'PROG_DEPENDENCIES' to be
+generated.
+
+   *note Conditional Sources:: shows a situation where '_DEPENDENCIES'
+may be used.
+
+   The 'EXTRA_PROG_DEPENDENCIES' may be useful for cases where you
+merely want to augment the 'automake'-generated 'PROG_DEPENDENCIES'
+rather than replacing it.
+
+   We recommend that you avoid using '-l' options in 'LDADD' or
+'PROG_LDADD' when referring to libraries built by your package.
+Instead, write the file name of the library explicitly as in the above
+'cpio' example.  Use '-l' only to list third-party libraries.  If you
+follow this rule, the default value of 'PROG_DEPENDENCIES' will list all
+your local libraries and omit the other ones.
+
+
+File: automake.info,  Node: Conditional Sources,  Next: Conditional Programs,  Prev: Linking,  Up: A Program
+
+8.1.3 Conditional compilation of sources
+----------------------------------------
+
+You can't put a configure substitution (e.g., '@FOO@' or '$(FOO)' where
+'FOO' is defined via 'AC_SUBST') into a '_SOURCES' variable.  The reason
+for this is a bit hard to explain, but suffice to say that it simply
+won't work.  Automake will give an error if you try to do this.
+
+   Fortunately there are two other ways to achieve the same result.  One
+is to use configure substitutions in '_LDADD' variables, the other is to
+use an Automake conditional.
+
+Conditional Compilation using '_LDADD' Substitutions
+....................................................
+
+Automake must know all the source files that could possibly go into a
+program, even if not all the files are built in every circumstance.  Any
+files that are only conditionally built should be listed in the
+appropriate 'EXTRA_' variable.  For instance, if 'hello-linux.c' or
+'hello-generic.c' were conditionally included in 'hello', the
+'Makefile.am' would contain:
+
+     bin_PROGRAMS = hello
+     hello_SOURCES = hello-common.c
+     EXTRA_hello_SOURCES = hello-linux.c hello-generic.c
+     hello_LDADD = $(HELLO_SYSTEM)
+     hello_DEPENDENCIES = $(HELLO_SYSTEM)
+
+You can then setup the '$(HELLO_SYSTEM)' substitution from
+'configure.ac':
+
+     ...
+     case $host in
+       *linux*) HELLO_SYSTEM='hello-linux.$(OBJEXT)' ;;
+       *)       HELLO_SYSTEM='hello-generic.$(OBJEXT)' ;;
+     esac
+     AC_SUBST([HELLO_SYSTEM])
+     ...
+
+   In this case, the variable 'HELLO_SYSTEM' should be replaced by
+either 'hello-linux.o' or 'hello-generic.o', and added to both
+'hello_DEPENDENCIES' and 'hello_LDADD' in order to be built and linked
+in.
+
+Conditional Compilation using Automake Conditionals
+...................................................
+
+An often simpler way to compile source files conditionally is to use
+Automake conditionals.  For instance, you could use this 'Makefile.am'
+construct to build the same 'hello' example:
+
+     bin_PROGRAMS = hello
+     if LINUX
+     hello_SOURCES = hello-linux.c hello-common.c
+     else
+     hello_SOURCES = hello-generic.c hello-common.c
+     endif
+
+   In this case, 'configure.ac' should setup the 'LINUX' conditional
+using 'AM_CONDITIONAL' (*note Conditionals::).
+
+   When using conditionals like this you don't need to use the 'EXTRA_'
+variable, because Automake will examine the contents of each variable to
+construct the complete list of source files.
+
+   If your program uses a lot of files, you will probably prefer a
+conditional '+='.
+
+     bin_PROGRAMS = hello
+     hello_SOURCES = hello-common.c
+     if LINUX
+     hello_SOURCES += hello-linux.c
+     else
+     hello_SOURCES += hello-generic.c
+     endif
+
+
+File: automake.info,  Node: Conditional Programs,  Prev: Conditional Sources,  Up: A Program
+
+8.1.4 Conditional compilation of programs
+-----------------------------------------
+
+Sometimes it is useful to determine the programs that are to be built at
+configure time.  For instance, GNU 'cpio' only builds 'mt' and 'rmt'
+under special circumstances.  The means to achieve conditional
+compilation of programs are the same you can use to compile source files
+conditionally: substitutions or conditionals.
+
+Conditional Programs using 'configure' Substitutions
+....................................................
+
+In this case, you must notify Automake of all the programs that can
+possibly be built, but at the same time cause the generated
+'Makefile.in' to use the programs specified by 'configure'.  This is
+done by having 'configure' substitute values into each '_PROGRAMS'
+definition, while listing all optionally built programs in
+'EXTRA_PROGRAMS'.
+
+     bin_PROGRAMS = cpio pax $(MT)
+     libexec_PROGRAMS = $(RMT)
+     EXTRA_PROGRAMS = mt rmt
+
+   As explained in *note EXEEXT::, Automake will rewrite 'bin_PROGRAMS',
+'libexec_PROGRAMS', and 'EXTRA_PROGRAMS', appending '$(EXEEXT)' to each
+binary.  Obviously it cannot rewrite values obtained at run-time through
+'configure' substitutions, therefore you should take care of appending
+'$(EXEEXT)' yourself, as in 'AC_SUBST([MT], ['mt${EXEEXT}'])'.
+
+Conditional Programs using Automake Conditionals
+................................................
+
+You can also use Automake conditionals (*note Conditionals::) to select
+programs to be built.  In this case you don't have to worry about
+'$(EXEEXT)' or 'EXTRA_PROGRAMS'.
+
+     bin_PROGRAMS = cpio pax
+     if WANT_MT
+       bin_PROGRAMS += mt
+     endif
+     if WANT_RMT
+       libexec_PROGRAMS = rmt
+     endif
+
+
+File: automake.info,  Node: A Library,  Next: A Shared Library,  Prev: A Program,  Up: Programs
+
+8.2 Building a library
+======================
+
+Building a library is much like building a program.  In this case, the
+name of the primary is 'LIBRARIES'.  Libraries can be installed in
+'libdir' or 'pkglibdir'.
+
+   *Note A Shared Library::, for information on how to build shared
+libraries using libtool and the 'LTLIBRARIES' primary.
+
+   Each '_LIBRARIES' variable is a list of the libraries to be built.
+For instance, to create a library named 'libcpio.a', but not install it,
+you would write:
+
+     noinst_LIBRARIES = libcpio.a
+     libcpio_a_SOURCES = ...
+
+   The sources that go into a library are determined exactly as they are
+for programs, via the '_SOURCES' variables.  Note that the library name
+is canonicalized (*note Canonicalization::), so the '_SOURCES' variable
+corresponding to 'libcpio.a' is 'libcpio_a_SOURCES', not
+'libcpio.a_SOURCES'.
+
+   Extra objects can be added to a library using the 'LIBRARY_LIBADD'
+variable.  This should be used for objects determined by 'configure'.
+Again from 'cpio':
+
+     libcpio_a_LIBADD = $(LIBOBJS) $(ALLOCA)
+
+   In addition, sources for extra objects that will not exist until
+configure-time must be added to the 'BUILT_SOURCES' variable (*note
+Sources::).
+
+   Building a static library is done by compiling all object files, then
+by invoking '$(AR) $(ARFLAGS)' followed by the name of the library and
+the list of objects, and finally by calling '$(RANLIB)' on that library.
+You should call 'AC_PROG_RANLIB' from your 'configure.ac' to define
+'RANLIB' (Automake will complain otherwise).  You should also call
+'AM_PROG_AR' to define 'AR', in order to support unusual archivers such
+as Microsoft lib.  'ARFLAGS' will default to 'cru'; you can override
+this variable by setting it in your 'Makefile.am' or by 'AC_SUBST'ing it
+from your 'configure.ac'.  You can override the 'AR' variable by
+defining a per-library 'maude_AR' variable (*note Program and Library
+Variables::).
+
+   Be careful when selecting library components conditionally.  Because
+building an empty library is not portable, you should ensure that any
+library always contains at least one object.
+
+   To use a static library when building a program, add it to 'LDADD'
+for this program.  In the following example, the program 'cpio' is
+statically linked with the library 'libcpio.a'.
+
+     noinst_LIBRARIES = libcpio.a
+     libcpio_a_SOURCES = ...
+
+     bin_PROGRAMS = cpio
+     cpio_SOURCES = cpio.c ...
+     cpio_LDADD = libcpio.a
+
+
+File: automake.info,  Node: A Shared Library,  Next: Program and Library Variables,  Prev: A Library,  Up: Programs
+
+8.3 Building a Shared Library
+=============================
+
+Building shared libraries portably is a relatively complex matter.  For
+this reason, GNU Libtool (*note Introduction: (libtool)Top.) was created
+to help build shared libraries in a platform-independent way.
+
+* Menu:
+
+* Libtool Concept::             Introducing Libtool
+* Libtool Libraries::           Declaring Libtool Libraries
+* Conditional Libtool Libraries::  Building Libtool Libraries Conditionally
+* Conditional Libtool Sources::  Choosing Library Sources Conditionally
+* Libtool Convenience Libraries::  Building Convenience Libtool Libraries
+* Libtool Modules::             Building Libtool Modules
+* Libtool Flags::               Using _LIBADD, _LDFLAGS, and _LIBTOOLFLAGS
+* LTLIBOBJS::                   Using $(LTLIBOBJS) and $(LTALLOCA)
+* Libtool Issues::              Common Issues Related to Libtool's Use
+
+
+File: automake.info,  Node: Libtool Concept,  Next: Libtool Libraries,  Up: A Shared Library
+
+8.3.1 The Libtool Concept
+-------------------------
+
+Libtool abstracts shared and static libraries into a unified concept
+henceforth called "libtool libraries".  Libtool libraries are files
+using the '.la' suffix, and can designate a static library, a shared
+library, or maybe both.  Their exact nature cannot be determined until
+'./configure' is run: not all platforms support all kinds of libraries,
+and users can explicitly select which libraries should be built.
+(However the package's maintainers can tune the default, *note The
+'AC_PROG_LIBTOOL' macro: (libtool)AC_PROG_LIBTOOL.)
+
+   Because object files for shared and static libraries must be compiled
+differently, libtool is also used during compilation.  Object files
+built by libtool are called "libtool objects": these are files using the
+'.lo' suffix.  Libtool libraries are built from these libtool objects.
+
+   You should not assume anything about the structure of '.la' or '.lo'
+files and how libtool constructs them: this is libtool's concern, and
+the last thing one wants is to learn about libtool's guts.  However the
+existence of these files matters, because they are used as targets and
+dependencies in 'Makefile's rules when building libtool libraries.
+There are situations where you may have to refer to these, for instance
+when expressing dependencies for building source files conditionally
+(*note Conditional Libtool Sources::).
+
+   People considering writing a plug-in system, with dynamically loaded
+modules, should look into 'libltdl': libtool's dlopening library (*note
+Using libltdl: (libtool)Using libltdl.).  This offers a portable
+dlopening facility to load libtool libraries dynamically, and can also
+achieve static linking where unavoidable.
+
+   Before we discuss how to use libtool with Automake in details, it
+should be noted that the libtool manual also has a section about how to
+use Automake with libtool (*note Using Automake with Libtool:
+(libtool)Using Automake.).
+
+
+File: automake.info,  Node: Libtool Libraries,  Next: Conditional Libtool Libraries,  Prev: Libtool Concept,  Up: A Shared Library
+
+8.3.2 Building Libtool Libraries
+--------------------------------
+
+Automake uses libtool to build libraries declared with the 'LTLIBRARIES'
+primary.  Each '_LTLIBRARIES' variable is a list of libtool libraries to
+build.  For instance, to create a libtool library named 'libgettext.la',
+and install it in 'libdir', write:
+
+     lib_LTLIBRARIES = libgettext.la
+     libgettext_la_SOURCES = gettext.c gettext.h ...
+
+   Automake predefines the variable 'pkglibdir', so you can use
+'pkglib_LTLIBRARIES' to install libraries in '$(libdir)/@PACKAGE@/'.
+
+   If 'gettext.h' is a public header file that needs to be installed in
+order for people to use the library, it should be declared using a
+'_HEADERS' variable, not in 'libgettext_la_SOURCES'.  Headers listed in
+the latter should be internal headers that are not part of the public
+interface.
+
+     lib_LTLIBRARIES = libgettext.la
+     libgettext_la_SOURCES = gettext.c ...
+     include_HEADERS = gettext.h ...
+
+   A package can build and install such a library along with other
+programs that use it.  This dependency should be specified using
+'LDADD'.  The following example builds a program named 'hello' that is
+linked with 'libgettext.la'.
+
+     lib_LTLIBRARIES = libgettext.la
+     libgettext_la_SOURCES = gettext.c ...
+
+     bin_PROGRAMS = hello
+     hello_SOURCES = hello.c ...
+     hello_LDADD = libgettext.la
+
+Whether 'hello' is statically or dynamically linked with 'libgettext.la'
+is not yet known: this will depend on the configuration of libtool and
+the capabilities of the host.
+
+
+File: automake.info,  Node: Conditional Libtool Libraries,  Next: Conditional Libtool Sources,  Prev: Libtool Libraries,  Up: A Shared Library
+
+8.3.3 Building Libtool Libraries Conditionally
+----------------------------------------------
+
+Like conditional programs (*note Conditional Programs::), there are two
+main ways to build conditional libraries: using Automake conditionals or
+using Autoconf 'AC_SUBST'itutions.
+
+   The important implementation detail you have to be aware of is that
+the place where a library will be installed matters to libtool: it needs
+to be indicated _at link-time_ using the '-rpath' option.
+
+   For libraries whose destination directory is known when Automake
+runs, Automake will automatically supply the appropriate '-rpath' option
+to libtool.  This is the case for libraries listed explicitly in some
+installable '_LTLIBRARIES' variables such as 'lib_LTLIBRARIES'.
+
+   However, for libraries determined at configure time (and thus
+mentioned in 'EXTRA_LTLIBRARIES'), Automake does not know the final
+installation directory.  For such libraries you must add the '-rpath'
+option to the appropriate '_LDFLAGS' variable by hand.
+
+   The examples below illustrate the differences between these two
+methods.
+
+   Here is an example where 'WANTEDLIBS' is an 'AC_SUBST'ed variable set
+at './configure'-time to either 'libfoo.la', 'libbar.la', both, or none.
+Although '$(WANTEDLIBS)' appears in the 'lib_LTLIBRARIES', Automake
+cannot guess it relates to 'libfoo.la' or 'libbar.la' at the time it
+creates the link rule for these two libraries.  Therefore the '-rpath'
+argument must be explicitly supplied.
+
+     EXTRA_LTLIBRARIES = libfoo.la libbar.la
+     lib_LTLIBRARIES = $(WANTEDLIBS)
+     libfoo_la_SOURCES = foo.c ...
+     libfoo_la_LDFLAGS = -rpath '$(libdir)'
+     libbar_la_SOURCES = bar.c ...
+     libbar_la_LDFLAGS = -rpath '$(libdir)'
+
+   Here is how the same 'Makefile.am' would look using Automake
+conditionals named 'WANT_LIBFOO' and 'WANT_LIBBAR'.  Now Automake is
+able to compute the '-rpath' setting itself, because it's clear that
+both libraries will end up in '$(libdir)' if they are installed.
+
+     lib_LTLIBRARIES =
+     if WANT_LIBFOO
+     lib_LTLIBRARIES += libfoo.la
+     endif
+     if WANT_LIBBAR
+     lib_LTLIBRARIES += libbar.la
+     endif
+     libfoo_la_SOURCES = foo.c ...
+     libbar_la_SOURCES = bar.c ...
+
+
+File: automake.info,  Node: Conditional Libtool Sources,  Next: Libtool Convenience Libraries,  Prev: Conditional Libtool Libraries,  Up: A Shared Library
+
+8.3.4 Libtool Libraries with Conditional Sources
+------------------------------------------------
+
+Conditional compilation of sources in a library can be achieved in the
+same way as conditional compilation of sources in a program (*note
+Conditional Sources::).  The only difference is that '_LIBADD' should be
+used instead of '_LDADD' and that it should mention libtool objects
+('.lo' files).
+
+   So, to mimic the 'hello' example from *note Conditional Sources::, we
+could build a 'libhello.la' library using either 'hello-linux.c' or
+'hello-generic.c' with the following 'Makefile.am'.
+
+     lib_LTLIBRARIES = libhello.la
+     libhello_la_SOURCES = hello-common.c
+     EXTRA_libhello_la_SOURCES = hello-linux.c hello-generic.c
+     libhello_la_LIBADD = $(HELLO_SYSTEM)
+     libhello_la_DEPENDENCIES = $(HELLO_SYSTEM)
+
+And make sure 'configure' defines 'HELLO_SYSTEM' as either
+'hello-linux.lo' or 'hello-generic.lo'.
+
+   Or we could simply use an Automake conditional as follows.
+
+     lib_LTLIBRARIES = libhello.la
+     libhello_la_SOURCES = hello-common.c
+     if LINUX
+     libhello_la_SOURCES += hello-linux.c
+     else
+     libhello_la_SOURCES += hello-generic.c
+     endif
+
+
+File: automake.info,  Node: Libtool Convenience Libraries,  Next: Libtool Modules,  Prev: Conditional Libtool Sources,  Up: A Shared Library
+
+8.3.5 Libtool Convenience Libraries
+-----------------------------------
+
+Sometimes you want to build libtool libraries that should not be
+installed.  These are called "libtool convenience libraries" and are
+typically used to encapsulate many sublibraries, later gathered into one
+big installed library.
+
+   Libtool convenience libraries are declared by directory-less
+variables such as 'noinst_LTLIBRARIES', 'check_LTLIBRARIES', or even
+'EXTRA_LTLIBRARIES'.  Unlike installed libtool libraries they do not
+need an '-rpath' flag at link time (actually this is the only
+difference).
+
+   Convenience libraries listed in 'noinst_LTLIBRARIES' are always
+built.  Those listed in 'check_LTLIBRARIES' are built only upon 'make
+check'.  Finally, libraries listed in 'EXTRA_LTLIBRARIES' are never
+built explicitly: Automake outputs rules to build them, but if the
+library does not appear as a Makefile dependency anywhere it won't be
+built (this is why 'EXTRA_LTLIBRARIES' is used for conditional
+compilation).
+
+   Here is a sample setup merging libtool convenience libraries from
+subdirectories into one main 'libtop.la' library.
+
+     # -- Top-level Makefile.am --
+     SUBDIRS = sub1 sub2 ...
+     lib_LTLIBRARIES = libtop.la
+     libtop_la_SOURCES =
+     libtop_la_LIBADD = \
+       sub1/libsub1.la \
+       sub2/libsub2.la \
+       ...
+
+     # -- sub1/Makefile.am --
+     noinst_LTLIBRARIES = libsub1.la
+     libsub1_la_SOURCES = ...
+
+     # -- sub2/Makefile.am --
+     # showing nested convenience libraries
+     SUBDIRS = sub2.1 sub2.2 ...
+     noinst_LTLIBRARIES = libsub2.la
+     libsub2_la_SOURCES =
+     libsub2_la_LIBADD = \
+       sub21/libsub21.la \
+       sub22/libsub22.la \
+       ...
+
+   When using such setup, beware that 'automake' will assume 'libtop.la'
+is to be linked with the C linker.  This is because 'libtop_la_SOURCES'
+is empty, so 'automake' picks C as default language.  If
+'libtop_la_SOURCES' was not empty, 'automake' would select the linker as
+explained in *note How the Linker is Chosen::.
+
+   If one of the sublibraries contains non-C source, it is important
+that the appropriate linker be chosen.  One way to achieve this is to
+pretend that there is such a non-C file among the sources of the
+library, thus forcing 'automake' to select the appropriate linker.  Here
+is the top-level 'Makefile' of our example updated to force C++ linking.
+
+     SUBDIRS = sub1 sub2 ...
+     lib_LTLIBRARIES = libtop.la
+     libtop_la_SOURCES =
+     # Dummy C++ source to cause C++ linking.
+     nodist_EXTRA_libtop_la_SOURCES = dummy.cxx
+     libtop_la_LIBADD = \
+       sub1/libsub1.la \
+       sub2/libsub2.la \
+       ...
+
+   'EXTRA_*_SOURCES' variables are used to keep track of source files
+that might be compiled (this is mostly useful when doing conditional
+compilation using 'AC_SUBST', *note Conditional Libtool Sources::), and
+the 'nodist_' prefix means the listed sources are not to be distributed
+(*note Program and Library Variables::).  In effect the file 'dummy.cxx'
+does not need to exist in the source tree.  Of course if you have some
+real source file to list in 'libtop_la_SOURCES' there is no point in
+cheating with 'nodist_EXTRA_libtop_la_SOURCES'.
+
+
+File: automake.info,  Node: Libtool Modules,  Next: Libtool Flags,  Prev: Libtool Convenience Libraries,  Up: A Shared Library
+
+8.3.6 Libtool Modules
+---------------------
+
+These are libtool libraries meant to be dlopened.  They are indicated to
+libtool by passing '-module' at link-time.
+
+     pkglib_LTLIBRARIES = mymodule.la
+     mymodule_la_SOURCES = doit.c
+     mymodule_la_LDFLAGS = -module
+
+   Ordinarily, Automake requires that a library's name start with 'lib'.
+However, when building a dynamically loadable module you might wish to
+use a "nonstandard" name.  Automake will not complain about such
+nonstandard names if it knows the library being built is a libtool
+module, i.e., if '-module' explicitly appears in the library's
+'_LDFLAGS' variable (or in the common 'AM_LDFLAGS' variable when no
+per-library '_LDFLAGS' variable is defined).
+
+   As always, 'AC_SUBST' variables are black boxes to Automake since
+their values are not yet known when 'automake' is run.  Therefore if
+'-module' is set via such a variable, Automake cannot notice it and will
+proceed as if the library was an ordinary libtool library, with strict
+naming.
+
+   If 'mymodule_la_SOURCES' is not specified, then it defaults to the
+single file 'mymodule.c' (*note Default _SOURCES::).
+
+
+File: automake.info,  Node: Libtool Flags,  Next: LTLIBOBJS,  Prev: Libtool Modules,  Up: A Shared Library
+
+8.3.7 '_LIBADD', '_LDFLAGS', and '_LIBTOOLFLAGS'
+------------------------------------------------
+
+As shown in previous sections, the 'LIBRARY_LIBADD' variable should be
+used to list extra libtool objects ('.lo' files) or libtool libraries
+('.la') to add to LIBRARY.
+
+   The 'LIBRARY_LDFLAGS' variable is the place to list additional
+libtool linking flags, such as '-version-info', '-static', and a lot
+more.  *Note Link mode: (libtool)Link mode.
+
+   The 'libtool' command has two kinds of options: mode-specific options
+and generic options.  Mode-specific options such as the aforementioned
+linking flags should be lumped with the other flags passed to the tool
+invoked by 'libtool' (hence the use of 'LIBRARY_LDFLAGS' for libtool
+linking flags).  Generic options include '--tag=TAG' and '--silent'
+(*note Invoking 'libtool': (libtool)Invoking libtool. for more options)
+should appear before the mode selection on the command line; in
+'Makefile.am's they should be listed in the 'LIBRARY_LIBTOOLFLAGS'
+variable.
+
+   If 'LIBRARY_LIBTOOLFLAGS' is not defined, then the variable
+'AM_LIBTOOLFLAGS' is used instead.
+
+   These flags are passed to libtool after the '--tag=TAG' option
+computed by Automake (if any), so 'LIBRARY_LIBTOOLFLAGS' (or
+'AM_LIBTOOLFLAGS') is a good place to override or supplement the
+'--tag=TAG' setting.
+
+   The libtool rules also use a 'LIBTOOLFLAGS' variable that should not
+be set in 'Makefile.am': this is a user variable (*note Flag Variables
+Ordering::.  It allows users to run 'make LIBTOOLFLAGS=--silent', for
+instance.  Note that the verbosity of 'libtool' can also be influenced
+by the Automake support for silent rules (*note Automake Silent
+Rules::).
+
+
+File: automake.info,  Node: LTLIBOBJS,  Next: Libtool Issues,  Prev: Libtool Flags,  Up: A Shared Library
+
+8.3.8 'LTLIBOBJS' and 'LTALLOCA'
+--------------------------------
+
+Where an ordinary library might include '$(LIBOBJS)' or '$(ALLOCA)'
+(*note LIBOBJS::), a libtool library must use '$(LTLIBOBJS)' or
+'$(LTALLOCA)'.  This is required because the object files that libtool
+operates on do not necessarily end in '.o'.
+
+   Nowadays, the computation of 'LTLIBOBJS' from 'LIBOBJS' is performed
+automatically by Autoconf (*note 'AC_LIBOBJ' vs. 'LIBOBJS':
+(autoconf)AC_LIBOBJ vs LIBOBJS.).
+
+
+File: automake.info,  Node: Libtool Issues,  Prev: LTLIBOBJS,  Up: A Shared Library
+
+8.3.9 Common Issues Related to Libtool's Use
+--------------------------------------------
+
+* Menu:
+
+* Error required file ltmain.sh not found::  The need to run libtoolize
+* Objects created both with libtool and without::  Avoid a specific build race
+
+
+File: automake.info,  Node: Error required file ltmain.sh not found,  Next: Objects created both with libtool and without,  Up: Libtool Issues
+
+8.3.9.1 Error: 'required file `./ltmain.sh' not found'
+......................................................
+
+Libtool comes with a tool called 'libtoolize' that will install
+libtool's supporting files into a package.  Running this command will
+install 'ltmain.sh'.  You should execute it before 'aclocal' and
+'automake'.
+
+   People upgrading old packages to newer autotools are likely to face
+this issue because older Automake versions used to call 'libtoolize'.
+Therefore old build scripts do not call 'libtoolize'.
+
+   Since Automake 1.6, it has been decided that running 'libtoolize' was
+none of Automake's business.  Instead, that functionality has been moved
+into the 'autoreconf' command (*note Using 'autoreconf':
+(autoconf)autoreconf Invocation.).  If you do not want to remember what
+to run and when, just learn the 'autoreconf' command.  Hopefully,
+replacing existing 'bootstrap.sh' or 'autogen.sh' scripts by a call to
+'autoreconf' should also free you from any similar incompatible change
+in the future.
+
+
+File: automake.info,  Node: Objects created both with libtool and without,  Prev: Error required file ltmain.sh not found,  Up: Libtool Issues
+
+8.3.9.2 Objects 'created with both libtool and without'
+.......................................................
+
+Sometimes, the same source file is used both to build a libtool library
+and to build another non-libtool target (be it a program or another
+library).
+
+   Let's consider the following 'Makefile.am'.
+
+     bin_PROGRAMS = prog
+     prog_SOURCES = prog.c foo.c ...
+
+     lib_LTLIBRARIES = libfoo.la
+     libfoo_la_SOURCES = foo.c ...
+
+(In this trivial case the issue could be avoided by linking 'libfoo.la'
+with 'prog' instead of listing 'foo.c' in 'prog_SOURCES'.  But let's
+assume we really want to keep 'prog' and 'libfoo.la' separate.)
+
+   Technically, it means that we should build 'foo.$(OBJEXT)' for
+'prog', and 'foo.lo' for 'libfoo.la'.  The problem is that in the course
+of creating 'foo.lo', libtool may erase (or replace) 'foo.$(OBJEXT)',
+and this cannot be avoided.
+
+   Therefore, when Automake detects this situation it will complain with
+a message such as
+     object 'foo.$(OBJEXT)' created both with libtool and without
+
+   A workaround for this issue is to ensure that these two objects get
+different basenames.  As explained in *note Renamed Objects::, this
+happens automatically when per-targets flags are used.
+
+     bin_PROGRAMS = prog
+     prog_SOURCES = prog.c foo.c ...
+     prog_CFLAGS = $(AM_CFLAGS)
+
+     lib_LTLIBRARIES = libfoo.la
+     libfoo_la_SOURCES = foo.c ...
+
+Adding 'prog_CFLAGS = $(AM_CFLAGS)' is almost a no-op, because when the
+'prog_CFLAGS' is defined, it is used instead of 'AM_CFLAGS'.  However as
+a side effect it will cause 'prog.c' and 'foo.c' to be compiled as
+'prog-prog.$(OBJEXT)' and 'prog-foo.$(OBJEXT)', which solves the issue.
+
+
+File: automake.info,  Node: Program and Library Variables,  Next: Default _SOURCES,  Prev: A Shared Library,  Up: Programs
+
+8.4 Program and Library Variables
+=================================
+
+Associated with each program is a collection of variables that can be
+used to modify how that program is built.  There is a similar list of
+such variables for each library.  The canonical name of the program (or
+library) is used as a base for naming these variables.
+
+   In the list below, we use the name "maude" to refer to the program or
+library.  In your 'Makefile.am' you would replace this with the
+canonical name of your program.  This list also refers to "maude" as a
+program, but in general the same rules apply for both static and dynamic
+libraries; the documentation below notes situations where programs and
+libraries differ.
+
+'maude_SOURCES'
+     This variable, if it exists, lists all the source files that are
+     compiled to build the program.  These files are added to the
+     distribution by default.  When building the program, Automake will
+     cause each source file to be compiled to a single '.o' file (or
+     '.lo' when using libtool).  Normally these object files are named
+     after the source file, but other factors can change this.  If a
+     file in the '_SOURCES' variable has an unrecognized extension,
+     Automake will do one of two things with it.  If a suffix rule
+     exists for turning files with the unrecognized extension into '.o'
+     files, then 'automake' will treat this file as it will any other
+     source file (*note Support for Other Languages::).  Otherwise, the
+     file will be ignored as though it were a header file.
+
+     The prefixes 'dist_' and 'nodist_' can be used to control whether
+     files listed in a '_SOURCES' variable are distributed.  'dist_' is
+     redundant, as sources are distributed by default, but it can be
+     specified for clarity if desired.
+
+     It is possible to have both 'dist_' and 'nodist_' variants of a
+     given '_SOURCES' variable at once; this lets you easily distribute
+     some files and not others, for instance:
+
+          nodist_maude_SOURCES = nodist.c
+          dist_maude_SOURCES = dist-me.c
+
+     By default the output file (on Unix systems, the '.o' file) will be
+     put into the current build directory.  However, if the option
+     'subdir-objects' is in effect in the current directory then the
+     '.o' file will be put into the subdirectory named after the source
+     file.  For instance, with 'subdir-objects' enabled,
+     'sub/dir/file.c' will be compiled to 'sub/dir/file.o'.  Some people
+     prefer this mode of operation.  You can specify 'subdir-objects' in
+     'AUTOMAKE_OPTIONS' (*note Options::).
+
+'EXTRA_maude_SOURCES'
+     Automake needs to know the list of files you intend to compile
+     _statically_.  For one thing, this is the only way Automake has of
+     knowing what sort of language support a given 'Makefile.in'
+     requires.  (1) This means that, for example, you can't put a
+     configure substitution like '@my_sources@' into a '_SOURCES'
+     variable.  If you intend to conditionally compile source files and
+     use 'configure' to substitute the appropriate object names into,
+     e.g., '_LDADD' (see below), then you should list the corresponding
+     source files in the 'EXTRA_' variable.
+
+     This variable also supports 'dist_' and 'nodist_' prefixes.  For
+     instance, 'nodist_EXTRA_maude_SOURCES' would list extra sources
+     that may need to be built, but should not be distributed.
+
+'maude_AR'
+     A static library is created by default by invoking '$(AR)
+     $(ARFLAGS)' followed by the name of the library and then the
+     objects being put into the library.  You can override this by
+     setting the '_AR' variable.  This is usually used with C++; some
+     C++ compilers require a special invocation in order to instantiate
+     all the templates that should go into a library.  For instance, the
+     SGI C++ compiler likes this variable set like so:
+          libmaude_a_AR = $(CXX) -ar -o
+
+'maude_LIBADD'
+     Extra objects can be added to a _library_ using the '_LIBADD'
+     variable.  For instance, this should be used for objects determined
+     by 'configure' (*note A Library::).
+
+     In the case of libtool libraries, 'maude_LIBADD' can also refer to
+     other libtool libraries.
+
+'maude_LDADD'
+     Extra objects ('*.$(OBJEXT)') and libraries ('*.a', '*.la') can be
+     added to a _program_ by listing them in the '_LDADD' variable.  For
+     instance, this should be used for objects determined by 'configure'
+     (*note Linking::).
+
+     '_LDADD' and '_LIBADD' are inappropriate for passing
+     program-specific linker flags (except for '-l', '-L', '-dlopen' and
+     '-dlpreopen').  Use the '_LDFLAGS' variable for this purpose.
+
+     For instance, if your 'configure.ac' uses 'AC_PATH_XTRA', you could
+     link your program against the X libraries like so:
+
+          maude_LDADD = $(X_PRE_LIBS) $(X_LIBS) $(X_EXTRA_LIBS)
+
+     We recommend that you use '-l' and '-L' only when referring to
+     third-party libraries, and give the explicit file names of any
+     library built by your package.  Doing so will ensure that
+     'maude_DEPENDENCIES' (see below) is correctly defined by default.
+
+'maude_LDFLAGS'
+     This variable is used to pass extra flags to the link step of a
+     program or a shared library.  It overrides the 'AM_LDFLAGS'
+     variable.
+
+'maude_LIBTOOLFLAGS'
+     This variable is used to pass extra options to 'libtool'.  It
+     overrides the 'AM_LIBTOOLFLAGS' variable.  These options are output
+     before 'libtool''s '--mode=MODE' option, so they should not be
+     mode-specific options (those belong to the compiler or linker
+     flags).  *Note Libtool Flags::.
+
+'maude_DEPENDENCIES'
+'EXTRA_maude_DEPENDENCIES'
+     It is also occasionally useful to have a target (program or
+     library) depend on some other file that is not actually part of
+     that target.  This can be done using the '_DEPENDENCIES' variable.
+     Each target depends on the contents of such a variable, but no
+     further interpretation is done.
+
+     Since these dependencies are associated to the link rule used to
+     create the programs they should normally list files used by the
+     link command.  That is '*.$(OBJEXT)', '*.a', or '*.la' files for
+     programs; '*.lo' and '*.la' files for Libtool libraries; and
+     '*.$(OBJEXT)' files for static libraries.  In rare cases you may
+     need to add other kinds of files such as linker scripts, but
+     _listing a source file in '_DEPENDENCIES' is wrong_.  If some
+     source file needs to be built before all the components of a
+     program are built, consider using the 'BUILT_SOURCES' variable
+     (*note Sources::).
+
+     If '_DEPENDENCIES' is not supplied, it is computed by Automake.
+     The automatically-assigned value is the contents of '_LDADD' or
+     '_LIBADD', with most configure substitutions, '-l', '-L', '-dlopen'
+     and '-dlpreopen' options removed.  The configure substitutions that
+     are left in are only '$(LIBOBJS)' and '$(ALLOCA)'; these are left
+     because it is known that they will not cause an invalid value for
+     '_DEPENDENCIES' to be generated.
+
+     '_DEPENDENCIES' is more likely used to perform conditional
+     compilation using an 'AC_SUBST' variable that contains a list of
+     objects.  *Note Conditional Sources::, and *note Conditional
+     Libtool Sources::.
+
+     The 'EXTRA_*_DEPENDENCIES' variable may be useful for cases where
+     you merely want to augment the 'automake'-generated '_DEPENDENCIES'
+     variable rather than replacing it.
+
+'maude_LINK'
+     You can override the linker on a per-program basis.  By default the
+     linker is chosen according to the languages used by the program.
+     For instance, a program that includes C++ source code would use the
+     C++ compiler to link.  The '_LINK' variable must hold the name of a
+     command that can be passed all the '.o' file names and libraries to
+     link against as arguments.  Note that the name of the underlying
+     program is _not_ passed to '_LINK'; typically one uses '$@':
+
+          maude_LINK = $(CCLD) -magic -o $@
+
+     If a '_LINK' variable is not supplied, it may still be generated
+     and used by Automake due to the use of per-target link flags such
+     as '_CFLAGS', '_LDFLAGS' or '_LIBTOOLFLAGS', in cases where they
+     apply.
+
+'maude_CCASFLAGS'
+'maude_CFLAGS'
+'maude_CPPFLAGS'
+'maude_CXXFLAGS'
+'maude_FFLAGS'
+'maude_GCJFLAGS'
+'maude_LFLAGS'
+'maude_OBJCFLAGS'
+'maude_OBJCXXFLAGS'
+'maude_RFLAGS'
+'maude_UPCFLAGS'
+'maude_YFLAGS'
+     Automake allows you to set compilation flags on a per-program (or
+     per-library) basis.  A single source file can be included in
+     several programs, and it will potentially be compiled with
+     different flags for each program.  This works for any language
+     directly supported by Automake.  These "per-target compilation
+     flags" are '_CCASFLAGS', '_CFLAGS', '_CPPFLAGS', '_CXXFLAGS',
+     '_FFLAGS', '_GCJFLAGS', '_LFLAGS', '_OBJCFLAGS', '_OBJCXXFLAGS',
+     '_RFLAGS', '_UPCFLAGS', and '_YFLAGS'.
+
+     When using a per-target compilation flag, Automake will choose a
+     different name for the intermediate object files.  Ordinarily a
+     file like 'sample.c' will be compiled to produce 'sample.o'.
+     However, if the program's '_CFLAGS' variable is set, then the
+     object file will be named, for instance, 'maude-sample.o'.  (See
+     also *note Renamed Objects::).
+
+     In compilations with per-target flags, the ordinary 'AM_' form of
+     the flags variable is _not_ automatically included in the
+     compilation (however, the user form of the variable _is_ included).
+     So for instance, if you want the hypothetical 'maude' compilations
+     to also use the value of 'AM_CFLAGS', you would need to write:
+
+          maude_CFLAGS = ... your flags ... $(AM_CFLAGS)
+
+     *Note Flag Variables Ordering::, for more discussion about the
+     interaction between user variables, 'AM_' shadow variables, and
+     per-target variables.
+
+'maude_SHORTNAME'
+     On some platforms the allowable file names are very short.  In
+     order to support these systems and per-target compilation flags at
+     the same time, Automake allows you to set a "short name" that will
+     influence how intermediate object files are named.  For instance,
+     in the following example,
+
+          bin_PROGRAMS = maude
+          maude_CPPFLAGS = -DSOMEFLAG
+          maude_SHORTNAME = m
+          maude_SOURCES = sample.c ...
+
+     the object file would be named 'm-sample.o' rather than
+     'maude-sample.o'.
+
+     This facility is rarely needed in practice, and we recommend
+     avoiding it until you find it is required.
+
+   ---------- Footnotes ----------
+
+   (1) There are other, more obscure reasons for this limitation as
+well.
+
+
+File: automake.info,  Node: Default _SOURCES,  Next: LIBOBJS,  Prev: Program and Library Variables,  Up: Programs
+
+8.5 Default '_SOURCES'
+======================
+
+'_SOURCES' variables are used to specify source files of programs (*note
+A Program::), libraries (*note A Library::), and Libtool libraries
+(*note A Shared Library::).
+
+   When no such variable is specified for a target, Automake will define
+one itself.  The default is to compile a single C file whose base name
+is the name of the target itself, with any extension replaced by
+'AM_DEFAULT_SOURCE_EXT', which defaults to '.c'.
+
+   For example if you have the following somewhere in your 'Makefile.am'
+with no corresponding 'libfoo_a_SOURCES':
+
+     lib_LIBRARIES = libfoo.a sub/libc++.a
+
+'libfoo.a' will be built using a default source file named 'libfoo.c',
+and 'sub/libc++.a' will be built from 'sub/libc++.c'.  (In older
+versions 'sub/libc++.a' would be built from 'sub_libc___a.c', i.e., the
+default source was the canonized name of the target, with '.c' appended.
+We believe the new behavior is more sensible, but for backward
+compatibility 'automake' will use the old name if a file or a rule with
+that name exists and 'AM_DEFAULT_SOURCE_EXT' is not used.)
+
+   Default sources are mainly useful in test suites, when building many
+test programs each from a single source.  For instance, in
+
+     check_PROGRAMS = test1 test2 test3
+     AM_DEFAULT_SOURCE_EXT = .cpp
+
+'test1', 'test2', and 'test3' will be built from 'test1.cpp',
+'test2.cpp', and 'test3.cpp'.  Without the last line, they will be built
+from 'test1.c', 'test2.c', and 'test3.c'.
+
+   Another case where this is convenient is building many Libtool
+modules ('moduleN.la'), each defined in its own file ('moduleN.c').
+
+     AM_LDFLAGS = -module
+     lib_LTLIBRARIES = module1.la module2.la module3.la
+
+   Finally, there is one situation where this default source computation
+needs to be avoided: when a target should not be built from sources.  We
+already saw such an example in *note true::; this happens when all the
+constituents of a target have already been compiled and just need to be
+combined using a '_LDADD' variable.  Then it is necessary to define an
+empty '_SOURCES' variable, so that 'automake' does not compute a
+default.
+
+     bin_PROGRAMS = target
+     target_SOURCES =
+     target_LDADD = libmain.a libmisc.a
+
+
+File: automake.info,  Node: LIBOBJS,  Next: Program Variables,  Prev: Default _SOURCES,  Up: Programs
+
+8.6 Special handling for 'LIBOBJS' and 'ALLOCA'
+===============================================
+
+The '$(LIBOBJS)' and '$(ALLOCA)' variables list object files that should
+be compiled into the project to provide an implementation for functions
+that are missing or broken on the host system.  They are substituted by
+'configure'.
+
+   These variables are defined by Autoconf macros such as 'AC_LIBOBJ',
+'AC_REPLACE_FUNCS' (*note Generic Function Checks: (autoconf)Generic
+Functions.), or 'AC_FUNC_ALLOCA' (*note Particular Function Checks:
+(autoconf)Particular Functions.).  Many other Autoconf macros call
+'AC_LIBOBJ' or 'AC_REPLACE_FUNCS' to populate '$(LIBOBJS)'.
+
+   Using these variables is very similar to doing conditional
+compilation using 'AC_SUBST' variables, as described in *note
+Conditional Sources::.  That is, when building a program, '$(LIBOBJS)'
+and '$(ALLOCA)' should be added to the associated '*_LDADD' variable, or
+to the '*_LIBADD' variable when building a library.  However there is no
+need to list the corresponding sources in 'EXTRA_*_SOURCES' nor to
+define '*_DEPENDENCIES'.  Automake automatically adds '$(LIBOBJS)' and
+'$(ALLOCA)' to the dependencies, and it will discover the list of
+corresponding source files automatically (by tracing the invocations of
+the 'AC_LIBSOURCE' Autoconf macros).  If you have already defined
+'*_DEPENDENCIES' explicitly for an unrelated reason, then you either
+need to add these variables manually, or use 'EXTRA_*_DEPENDENCIES'
+instead of '*_DEPENDENCIES'.
+
+   These variables are usually used to build a portability library that
+is linked with all the programs of the project.  We now review a sample
+setup.  First, 'configure.ac' contains some checks that affect either
+'LIBOBJS' or 'ALLOCA'.
+
+     # configure.ac
+     ...
+     AC_CONFIG_LIBOBJ_DIR([lib])
+     ...
+     AC_FUNC_MALLOC             dnl May add malloc.$(OBJEXT) to LIBOBJS
+     AC_FUNC_MEMCMP             dnl May add memcmp.$(OBJEXT) to LIBOBJS
+     AC_REPLACE_FUNCS([strdup]) dnl May add strdup.$(OBJEXT) to LIBOBJS
+     AC_FUNC_ALLOCA             dnl May add alloca.$(OBJEXT) to ALLOCA
+     ...
+     AC_CONFIG_FILES([
+       lib/Makefile
+       src/Makefile
+     ])
+     AC_OUTPUT
+
+   The 'AC_CONFIG_LIBOBJ_DIR' tells Autoconf that the source files of
+these object files are to be found in the 'lib/' directory.  Automake
+can also use this information, otherwise it expects the source files are
+to be in the directory where the '$(LIBOBJS)' and '$(ALLOCA)' variables
+are used.
+
+   The 'lib/' directory should therefore contain 'malloc.c', 'memcmp.c',
+'strdup.c', 'alloca.c'.  Here is its 'Makefile.am':
+
+     # lib/Makefile.am
+
+     noinst_LIBRARIES = libcompat.a
+     libcompat_a_SOURCES =
+     libcompat_a_LIBADD = $(LIBOBJS) $(ALLOCA)
+
+   The library can have any name, of course, and anyway it is not going
+to be installed: it just holds the replacement versions of the missing
+or broken functions so we can later link them in.  Many projects also
+include extra functions, specific to the project, in that library: they
+are simply added on the '_SOURCES' line.
+
+   There is a small trap here, though: '$(LIBOBJS)' and '$(ALLOCA)'
+might be empty, and building an empty library is not portable.  You
+should ensure that there is always something to put in 'libcompat.a'.
+Most projects will also add some utility functions in that directory,
+and list them in 'libcompat_a_SOURCES', so in practice 'libcompat.a'
+cannot be empty.
+
+   Finally here is how this library could be used from the 'src/'
+directory.
+
+     # src/Makefile.am
+
+     # Link all programs in this directory with libcompat.a
+     LDADD = ../lib/libcompat.a
+
+     bin_PROGRAMS = tool1 tool2 ...
+     tool1_SOURCES = ...
+     tool2_SOURCES = ...
+
+   When option 'subdir-objects' is not used, as in the above example,
+the variables '$(LIBOBJS)' or '$(ALLOCA)' can only be used in the
+directory where their sources lie.  E.g., here it would be wrong to use
+'$(LIBOBJS)' or '$(ALLOCA)' in 'src/Makefile.am'.  However if both
+'subdir-objects' and 'AC_CONFIG_LIBOBJ_DIR' are used, it is OK to use
+these variables in other directories.  For instance 'src/Makefile.am'
+could be changed as follows.
+
+     # src/Makefile.am
+
+     AUTOMAKE_OPTIONS = subdir-objects
+     LDADD = $(LIBOBJS) $(ALLOCA)
+
+     bin_PROGRAMS = tool1 tool2 ...
+     tool1_SOURCES = ...
+     tool2_SOURCES = ...
+
+   Because '$(LIBOBJS)' and '$(ALLOCA)' contain object file names that
+end with '.$(OBJEXT)', they are not suitable for Libtool libraries
+(where the expected object extension is '.lo'): 'LTLIBOBJS' and
+'LTALLOCA' should be used instead.
+
+   'LTLIBOBJS' is defined automatically by Autoconf and should not be
+defined by hand (as in the past), however at the time of writing
+'LTALLOCA' still needs to be defined from 'ALLOCA' manually.  *Note
+'AC_LIBOBJ' vs. 'LIBOBJS': (autoconf)AC_LIBOBJ vs LIBOBJS.
+
+
+File: automake.info,  Node: Program Variables,  Next: Yacc and Lex,  Prev: LIBOBJS,  Up: Programs
+
+8.7 Variables used when building a program
+==========================================
+
+Occasionally it is useful to know which 'Makefile' variables Automake
+uses for compilations, and in which order (*note Flag Variables
+Ordering::); for instance, you might need to do your own compilation in
+some special cases.
+
+   Some variables are inherited from Autoconf; these are 'CC', 'CFLAGS',
+'CPPFLAGS', 'DEFS', 'LDFLAGS', and 'LIBS'.
+
+   There are some additional variables that Automake defines on its own:
+
+'AM_CPPFLAGS'
+     The contents of this variable are passed to every compilation that
+     invokes the C preprocessor; it is a list of arguments to the
+     preprocessor.  For instance, '-I' and '-D' options should be listed
+     here.
+
+     Automake already provides some '-I' options automatically, in a
+     separate variable that is also passed to every compilation that
+     invokes the C preprocessor.  In particular it generates '-I.',
+     '-I$(srcdir)', and a '-I' pointing to the directory holding
+     'config.h' (if you've used 'AC_CONFIG_HEADERS').  You can disable
+     the default '-I' options using the 'nostdinc' option.
+
+     When a file to be included is generated during the build and not
+     part of a distribution tarball, its location is under
+     '$(builddir)', not under '$(srcdir)'.  This matters especially for
+     packages that use header files placed in sub-directories and want
+     to allow builds outside the source tree (*note VPATH Builds::).  In
+     that case we recommend to use a pair of '-I' options, such as,
+     e.g., '-Isome/subdir -I$(srcdir)/some/subdir' or
+     '-I$(top_builddir)/some/subdir -I$(top_srcdir)/some/subdir'.  Note
+     that the reference to the build tree should come before the
+     reference to the source tree, so that accidentally leftover
+     generated files in the source directory are ignored.
+
+     'AM_CPPFLAGS' is ignored in preference to a per-executable (or
+     per-library) '_CPPFLAGS' variable if it is defined.
+
+'INCLUDES'
+     This does the same job as 'AM_CPPFLAGS' (or any per-target
+     '_CPPFLAGS' variable if it is used).  It is an older name for the
+     same functionality.  This variable is deprecated; we suggest using
+     'AM_CPPFLAGS' and per-target '_CPPFLAGS' instead.
+
+'AM_CFLAGS'
+     This is the variable the 'Makefile.am' author can use to pass in
+     additional C compiler flags.  In some situations, this is not used,
+     in preference to the per-executable (or per-library) '_CFLAGS'.
+
+'COMPILE'
+     This is the command used to actually compile a C source file.  The
+     file name is appended to form the complete command line.
+
+'AM_LDFLAGS'
+     This is the variable the 'Makefile.am' author can use to pass in
+     additional linker flags.  In some situations, this is not used, in
+     preference to the per-executable (or per-library) '_LDFLAGS'.
+
+'LINK'
+     This is the command used to actually link a C program.  It already
+     includes '-o $@' and the usual variable references (for instance,
+     'CFLAGS'); it takes as "arguments" the names of the object files
+     and libraries to link in.  This variable is not used when the
+     linker is overridden with a per-target '_LINK' variable or
+     per-target flags cause Automake to define such a '_LINK' variable.
+
+
+File: automake.info,  Node: Yacc and Lex,  Next: C++ Support,  Prev: Program Variables,  Up: Programs
+
+8.8 Yacc and Lex support
+========================
+
+Automake has somewhat idiosyncratic support for Yacc and Lex.
+
+   Automake assumes that the '.c' file generated by 'yacc' (or 'lex')
+should be named using the basename of the input file.  That is, for a
+yacc source file 'foo.y', Automake will cause the intermediate file to
+be named 'foo.c' (as opposed to 'y.tab.c', which is more traditional).
+
+   The extension of a yacc source file is used to determine the
+extension of the resulting C or C++ source and header files.  Note that
+header files are generated only when the '-d' Yacc option is used; see
+below for more information about this flag, and how to specify it.
+Files with the extension '.y' will thus be turned into '.c' sources and
+'.h' headers; likewise, '.yy' will become '.cc' and '.hh', '.y++' will
+become 'c++' and 'h++', '.yxx' will become '.cxx' and '.hxx', and '.ypp'
+will become '.cpp' and '.hpp'.
+
+   Similarly, lex source files can be used to generate C or C++; the
+extensions '.l', '.ll', '.l++', '.lxx', and '.lpp' are recognized.
+
+   You should never explicitly mention the intermediate (C or C++) file
+in any 'SOURCES' variable; only list the source file.
+
+   The intermediate files generated by 'yacc' (or 'lex') will be
+included in any distribution that is made.  That way the user doesn't
+need to have 'yacc' or 'lex'.
+
+   If a 'yacc' source file is seen, then your 'configure.ac' must define
+the variable 'YACC'.  This is most easily done by invoking the macro
+'AC_PROG_YACC' (*note Particular Program Checks: (autoconf)Particular
+Programs.).
+
+   When 'yacc' is invoked, it is passed 'AM_YFLAGS' and 'YFLAGS'.  The
+latter is a user variable and the former is intended for the
+'Makefile.am' author.
+
+   'AM_YFLAGS' is usually used to pass the '-d' option to 'yacc'.
+Automake knows what this means and will automatically adjust its rules
+to update and distribute the header file built by 'yacc -d'(1).  What
+Automake cannot guess, though, is where this header will be used: it is
+up to you to ensure the header gets built before it is first used.
+Typically this is necessary in order for dependency tracking to work
+when the header is included by another file.  The common solution is
+listing the header file in 'BUILT_SOURCES' (*note Sources::) as follows.
+
+     BUILT_SOURCES = parser.h
+     AM_YFLAGS = -d
+     bin_PROGRAMS = foo
+     foo_SOURCES = ... parser.y ...
+
+   If a 'lex' source file is seen, then your 'configure.ac' must define
+the variable 'LEX'.  You can use 'AC_PROG_LEX' to do this (*note
+Particular Program Checks: (autoconf)Particular Programs.), but using
+'AM_PROG_LEX' macro (*note Macros::) is recommended.
+
+   When 'lex' is invoked, it is passed 'AM_LFLAGS' and 'LFLAGS'.  The
+latter is a user variable and the former is intended for the
+'Makefile.am' author.
+
+   When 'AM_MAINTAINER_MODE' (*note maintainer-mode::) is used, the
+rebuild rule for distributed Yacc and Lex sources are only used when
+'maintainer-mode' is enabled, or when the files have been erased.
+
+   When 'lex' or 'yacc' sources are used, 'automake -a' automatically
+installs an auxiliary program called 'ylwrap' in your package (*note
+Auxiliary Programs::).  This program is used by the build rules to
+rename the output of these tools, and makes it possible to include
+multiple 'yacc' (or 'lex') source files in a single directory.  (This is
+necessary because yacc's output file name is fixed, and a parallel make
+could conceivably invoke more than one instance of 'yacc'
+simultaneously.)
+
+   For 'yacc', simply managing locking is insufficient.  The output of
+'yacc' always uses the same symbol names internally, so it isn't
+possible to link two 'yacc' parsers into the same executable.
+
+   We recommend using the following renaming hack used in 'gdb':
+     #define yymaxdepth c_maxdepth
+     #define yyparse c_parse
+     #define yylex   c_lex
+     #define yyerror c_error
+     #define yylval  c_lval
+     #define yychar  c_char
+     #define yydebug c_debug
+     #define yypact  c_pact
+     #define yyr1    c_r1
+     #define yyr2    c_r2
+     #define yydef   c_def
+     #define yychk   c_chk
+     #define yypgo   c_pgo
+     #define yyact   c_act
+     #define yyexca  c_exca
+     #define yyerrflag c_errflag
+     #define yynerrs c_nerrs
+     #define yyps    c_ps
+     #define yypv    c_pv
+     #define yys     c_s
+     #define yy_yys  c_yys
+     #define yystate c_state
+     #define yytmp   c_tmp
+     #define yyv     c_v
+     #define yy_yyv  c_yyv
+     #define yyval   c_val
+     #define yylloc  c_lloc
+     #define yyreds  c_reds
+     #define yytoks  c_toks
+     #define yylhs   c_yylhs
+     #define yylen   c_yylen
+     #define yydefred c_yydefred
+     #define yydgoto  c_yydgoto
+     #define yysindex c_yysindex
+     #define yyrindex c_yyrindex
+     #define yygindex c_yygindex
+     #define yytable  c_yytable
+     #define yycheck  c_yycheck
+     #define yyname   c_yyname
+     #define yyrule   c_yyrule
+
+   For each define, replace the 'c_' prefix with whatever you like.
+These defines work for 'bison', 'byacc', and traditional 'yacc's.  If
+you find a parser generator that uses a symbol not covered here, please
+report the new name so it can be added to the list.
+
+   ---------- Footnotes ----------
+
+   (1) Please note that 'automake' recognizes '-d' in 'AM_YFLAGS' only
+if it is not clustered with other options; for example, it won't be
+recognized if 'AM_YFLAGS' is '-dt', but it will be if 'AM_YFLAGS' is '-d
+-t' or '-t -d'.
+
+
+File: automake.info,  Node: C++ Support,  Next: Objective C Support,  Prev: Yacc and Lex,  Up: Programs
+
+8.9 C++ Support
+===============
+
+Automake includes full support for C++.
+
+   Any package including C++ code must define the output variable 'CXX'
+in 'configure.ac'; the simplest way to do this is to use the
+'AC_PROG_CXX' macro (*note Particular Program Checks:
+(autoconf)Particular Programs.).
+
+   A few additional variables are defined when a C++ source file is
+seen:
+
+'CXX'
+     The name of the C++ compiler.
+
+'CXXFLAGS'
+     Any flags to pass to the C++ compiler.
+
+'AM_CXXFLAGS'
+     The maintainer's variant of 'CXXFLAGS'.
+
+'CXXCOMPILE'
+     The command used to actually compile a C++ source file.  The file
+     name is appended to form the complete command line.
+
+'CXXLINK'
+     The command used to actually link a C++ program.
+
+
+File: automake.info,  Node: Objective C Support,  Next: Objective C++ Support,  Prev: C++ Support,  Up: Programs
+
+8.10 Objective C Support
+========================
+
+Automake includes some support for Objective C.
+
+   Any package including Objective C code must define the output
+variable 'OBJC' in 'configure.ac'; the simplest way to do this is to use
+the 'AC_PROG_OBJC' macro (*note Particular Program Checks:
+(autoconf)Particular Programs.).
+
+   A few additional variables are defined when an Objective C source
+file is seen:
+
+'OBJC'
+     The name of the Objective C compiler.
+
+'OBJCFLAGS'
+     Any flags to pass to the Objective C compiler.
+
+'AM_OBJCFLAGS'
+     The maintainer's variant of 'OBJCFLAGS'.
+
+'OBJCCOMPILE'
+     The command used to actually compile an Objective C source file.
+     The file name is appended to form the complete command line.
+
+'OBJCLINK'
+     The command used to actually link an Objective C program.
+
+
+File: automake.info,  Node: Objective C++ Support,  Next: Unified Parallel C Support,  Prev: Objective C Support,  Up: Programs
+
+8.11 Objective C++ Support
+==========================
+
+Automake includes some support for Objective C++.
+
+   Any package including Objective C++ code must define the output
+variable 'OBJCXX' in 'configure.ac'; the simplest way to do this is to
+use the 'AC_PROG_OBJCXX' macro (*note Particular Program Checks:
+(autoconf)Particular Programs.).
+
+   A few additional variables are defined when an Objective C++ source
+file is seen:
+
+'OBJCXX'
+     The name of the Objective C++ compiler.
+
+'OBJCXXFLAGS'
+     Any flags to pass to the Objective C++ compiler.
+
+'AM_OBJCXXFLAGS'
+     The maintainer's variant of 'OBJCXXFLAGS'.
+
+'OBJCXXCOMPILE'
+     The command used to actually compile an Objective C++ source file.
+     The file name is appended to form the complete command line.
+
+'OBJCXXLINK'
+     The command used to actually link an Objective C++ program.
+
+
+File: automake.info,  Node: Unified Parallel C Support,  Next: Assembly Support,  Prev: Objective C++ Support,  Up: Programs
+
+8.12 Unified Parallel C Support
+===============================
+
+Automake includes some support for Unified Parallel C.
+
+   Any package including Unified Parallel C code must define the output
+variable 'UPC' in 'configure.ac'; the simplest way to do this is to use
+the 'AM_PROG_UPC' macro (*note Public Macros::).
+
+   A few additional variables are defined when a Unified Parallel C
+source file is seen:
+
+'UPC'
+     The name of the Unified Parallel C compiler.
+
+'UPCFLAGS'
+     Any flags to pass to the Unified Parallel C compiler.
+
+'AM_UPCFLAGS'
+     The maintainer's variant of 'UPCFLAGS'.
+
+'UPCCOMPILE'
+     The command used to actually compile a Unified Parallel C source
+     file.  The file name is appended to form the complete command line.
+
+'UPCLINK'
+     The command used to actually link a Unified Parallel C program.
+
+
+File: automake.info,  Node: Assembly Support,  Next: Fortran 77 Support,  Prev: Unified Parallel C Support,  Up: Programs
+
+8.13 Assembly Support
+=====================
+
+Automake includes some support for assembly code.  There are two forms
+of assembler files: normal ('*.s') and preprocessed by 'CPP' ('*.S' or
+'*.sx').
+
+   The variable 'CCAS' holds the name of the compiler used to build
+assembly code.  This compiler must work a bit like a C compiler; in
+particular it must accept '-c' and '-o'.  The values of 'CCASFLAGS' and
+'AM_CCASFLAGS' (or its per-target definition) is passed to the
+compilation.  For preprocessed files, 'DEFS', 'DEFAULT_INCLUDES',
+'INCLUDES', 'CPPFLAGS' and 'AM_CPPFLAGS' are also used.
+
+   The autoconf macro 'AM_PROG_AS' will define 'CCAS' and 'CCASFLAGS'
+for you (unless they are already set, it simply sets 'CCAS' to the C
+compiler and 'CCASFLAGS' to the C compiler flags), but you are free to
+define these variables by other means.
+
+   Only the suffixes '.s', '.S', and '.sx' are recognized by 'automake'
+as being files containing assembly code.
+
+
+File: automake.info,  Node: Fortran 77 Support,  Next: Fortran 9x Support,  Prev: Assembly Support,  Up: Programs
+
+8.14 Fortran 77 Support
+=======================
+
+Automake includes full support for Fortran 77.
+
+   Any package including Fortran 77 code must define the output variable
+'F77' in 'configure.ac'; the simplest way to do this is to use the
+'AC_PROG_F77' macro (*note Particular Program Checks:
+(autoconf)Particular Programs.).
+
+   A few additional variables are defined when a Fortran 77 source file
+is seen:
+
+'F77'
+     The name of the Fortran 77 compiler.
+
+'FFLAGS'
+     Any flags to pass to the Fortran 77 compiler.
+
+'AM_FFLAGS'
+     The maintainer's variant of 'FFLAGS'.
+
+'RFLAGS'
+     Any flags to pass to the Ratfor compiler.
+
+'AM_RFLAGS'
+     The maintainer's variant of 'RFLAGS'.
+
+'F77COMPILE'
+     The command used to actually compile a Fortran 77 source file.  The
+     file name is appended to form the complete command line.
+
+'FLINK'
+     The command used to actually link a pure Fortran 77 program or
+     shared library.
+
+   Automake can handle preprocessing Fortran 77 and Ratfor source files
+in addition to compiling them(1).  Automake also contains some support
+for creating programs and shared libraries that are a mixture of Fortran
+77 and other languages (*note Mixing Fortran 77 With C and C++::).
+
+   These issues are covered in the following sections.
+
+* Menu:
+
+* Preprocessing Fortran 77::    Preprocessing Fortran 77 sources
+* Compiling Fortran 77 Files::  Compiling Fortran 77 sources
+* Mixing Fortran 77 With C and C++::  Mixing Fortran 77 With C and C++
+
+   ---------- Footnotes ----------
+
+   (1) Much, if not most, of the information in the following sections
+pertaining to preprocessing Fortran 77 programs was taken almost
+verbatim from *note Catalogue of Rules: (make)Catalogue of Rules.
+
+
+File: automake.info,  Node: Preprocessing Fortran 77,  Next: Compiling Fortran 77 Files,  Up: Fortran 77 Support
+
+8.14.1 Preprocessing Fortran 77
+-------------------------------
+
+'N.f' is made automatically from 'N.F' or 'N.r'.  This rule runs just
+the preprocessor to convert a preprocessable Fortran 77 or Ratfor source
+file into a strict Fortran 77 source file.  The precise command used is
+as follows:
+
+'.F'
+     '$(F77) -F $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS)
+     $(AM_FFLAGS) $(FFLAGS)'
+
+'.r'
+     '$(F77) -F $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)'
+
+
+File: automake.info,  Node: Compiling Fortran 77 Files,  Next: Mixing Fortran 77 With C and C++,  Prev: Preprocessing Fortran 77,  Up: Fortran 77 Support
+
+8.14.2 Compiling Fortran 77 Files
+---------------------------------
+
+'N.o' is made automatically from 'N.f', 'N.F' or 'N.r' by running the
+Fortran 77 compiler.  The precise command used is as follows:
+
+'.f'
+     '$(F77) -c $(AM_FFLAGS) $(FFLAGS)'
+
+'.F'
+     '$(F77) -c $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS)
+     $(AM_FFLAGS) $(FFLAGS)'
+
+'.r'
+     '$(F77) -c $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)'
+
+
+File: automake.info,  Node: Mixing Fortran 77 With C and C++,  Prev: Compiling Fortran 77 Files,  Up: Fortran 77 Support
+
+8.14.3 Mixing Fortran 77 With C and C++
+---------------------------------------
+
+Automake currently provides _limited_ support for creating programs and
+shared libraries that are a mixture of Fortran 77 and C and/or C++.
+However, there are many other issues related to mixing Fortran 77 with
+other languages that are _not_ (currently) handled by Automake, but that
+are handled by other packages(1).
+
+   Automake can help in two ways:
+
+  1. Automatic selection of the linker depending on which combinations
+     of source code.
+
+  2. Automatic selection of the appropriate linker flags (e.g., '-L' and
+     '-l') to pass to the automatically selected linker in order to link
+     in the appropriate Fortran 77 intrinsic and run-time libraries.
+
+     These extra Fortran 77 linker flags are supplied in the output
+     variable 'FLIBS' by the 'AC_F77_LIBRARY_LDFLAGS' Autoconf macro.
+     *Note Fortran Compiler Characteristics: (autoconf)Fortran Compiler.
+
+   If Automake detects that a program or shared library (as mentioned in
+some '_PROGRAMS' or '_LTLIBRARIES' primary) contains source code that is
+a mixture of Fortran 77 and C and/or C++, then it requires that the
+macro 'AC_F77_LIBRARY_LDFLAGS' be called in 'configure.ac', and that
+either '$(FLIBS)' appear in the appropriate '_LDADD' (for programs) or
+'_LIBADD' (for shared libraries) variables.  It is the responsibility of
+the person writing the 'Makefile.am' to make sure that '$(FLIBS)'
+appears in the appropriate '_LDADD' or '_LIBADD' variable.
+
+   For example, consider the following 'Makefile.am':
+
+     bin_PROGRAMS = foo
+     foo_SOURCES  = main.cc foo.f
+     foo_LDADD    = libfoo.la $(FLIBS)
+
+     pkglib_LTLIBRARIES = libfoo.la
+     libfoo_la_SOURCES  = bar.f baz.c zardoz.cc
+     libfoo_la_LIBADD   = $(FLIBS)
+
+   In this case, Automake will insist that 'AC_F77_LIBRARY_LDFLAGS' is
+mentioned in 'configure.ac'.  Also, if '$(FLIBS)' hadn't been mentioned
+in 'foo_LDADD' and 'libfoo_la_LIBADD', then Automake would have issued a
+warning.
+
+* Menu:
+
+* How the Linker is Chosen::    Automatic linker selection
+
+   ---------- Footnotes ----------
+
+   (1) For example, the cfortran package
+(http://www-zeus.desy.de/~burow/cfortran/) addresses all of these
+inter-language issues, and runs under nearly all Fortran 77, C and C++
+compilers on nearly all platforms.  However, 'cfortran' is not yet Free
+Software, but it will be in the next major release.
+
+
+File: automake.info,  Node: How the Linker is Chosen,  Up: Mixing Fortran 77 With C and C++
+
+8.14.3.1 How the Linker is Chosen
+.................................
+
+When a program or library mixes several languages, Automake choose the
+linker according to the following priorities.  (The names in parentheses
+are the variables containing the link command.)
+
+  1. Native Java ('GCJLINK')
+  2. Objective C++ ('OBJCXXLINK')
+  3. C++ ('CXXLINK')
+  4. Fortran 77 ('F77LINK')
+  5. Fortran ('FCLINK')
+  6. Objective C ('OBJCLINK')
+  7. Unified Parallel C ('UPCLINK')
+  8. C ('LINK')
+
+   For example, if Fortran 77, C and C++ source code is compiled into a
+program, then the C++ linker will be used.  In this case, if the C or
+Fortran 77 linkers required any special libraries that weren't included
+by the C++ linker, then they must be manually added to an '_LDADD' or
+'_LIBADD' variable by the user writing the 'Makefile.am'.
+
+   Automake only looks at the file names listed in '_SOURCES' variables
+to choose the linker, and defaults to the C linker.  Sometimes this is
+inconvenient because you are linking against a library written in
+another language and would like to set the linker more appropriately.
+*Note Libtool Convenience Libraries::, for a trick with
+'nodist_EXTRA_..._SOURCES'.
+
+   A per-target '_LINK' variable will override the above selection.
+Per-target link flags will cause Automake to write a per-target '_LINK'
+variable according to the language chosen as above.
+
+
+File: automake.info,  Node: Fortran 9x Support,  Next: Java Support with gcj,  Prev: Fortran 77 Support,  Up: Programs
+
+8.15 Fortran 9x Support
+=======================
+
+Automake includes support for Fortran 9x.
+
+   Any package including Fortran 9x code must define the output variable
+'FC' in 'configure.ac'; the simplest way to do this is to use the
+'AC_PROG_FC' macro (*note Particular Program Checks:
+(autoconf)Particular Programs.).
+
+   A few additional variables are defined when a Fortran 9x source file
+is seen:
+
+'FC'
+     The name of the Fortran 9x compiler.
+
+'FCFLAGS'
+     Any flags to pass to the Fortran 9x compiler.
+
+'AM_FCFLAGS'
+     The maintainer's variant of 'FCFLAGS'.
+
+'FCCOMPILE'
+     The command used to actually compile a Fortran 9x source file.  The
+     file name is appended to form the complete command line.
+
+'FCLINK'
+     The command used to actually link a pure Fortran 9x program or
+     shared library.
+
+* Menu:
+
+* Compiling Fortran 9x Files::  Compiling Fortran 9x sources
+
+
+File: automake.info,  Node: Compiling Fortran 9x Files,  Up: Fortran 9x Support
+
+8.15.1 Compiling Fortran 9x Files
+---------------------------------
+
+'FILE.o' is made automatically from 'FILE.f90', 'FILE.f95', 'FILE.f03',
+or 'FILE.f08' by running the Fortran 9x compiler.  The precise command
+used is as follows:
+
+'.f90'
+     '$(FC) $(AM_FCFLAGS) $(FCFLAGS) -c $(FCFLAGS_f90) $<'
+
+'.f95'
+     '$(FC) $(AM_FCFLAGS) $(FCFLAGS) -c $(FCFLAGS_f95) $<'
+
+'.f03'
+     '$(FC) $(AM_FCFLAGS) $(FCFLAGS) -c $(FCFLAGS_f03) $<'
+
+'.f08'
+     '$(FC) $(AM_FCFLAGS) $(FCFLAGS) -c $(FCFLAGS_f08) $<'
+
+
+File: automake.info,  Node: Java Support with gcj,  Next: Vala Support,  Prev: Fortran 9x Support,  Up: Programs
+
+8.16 Compiling Java sources using gcj
+=====================================
+
+Automake includes support for natively compiled Java, using 'gcj', the
+Java front end to the GNU Compiler Collection (rudimentary support for
+compiling Java to bytecode using the 'javac' compiler is also present,
+_albeit deprecated_; *note Java::).
+
+   Any package including Java code to be compiled must define the output
+variable 'GCJ' in 'configure.ac'; the variable 'GCJFLAGS' must also be
+defined somehow (either in 'configure.ac' or 'Makefile.am').  The
+simplest way to do this is to use the 'AM_PROG_GCJ' macro.
+
+   By default, programs including Java source files are linked with
+'gcj'.
+
+   As always, the contents of 'AM_GCJFLAGS' are passed to every
+compilation invoking 'gcj' (in its role as an ahead-of-time compiler,
+when invoking it to create '.class' files, 'AM_JAVACFLAGS' is used
+instead).  If it is necessary to pass options to 'gcj' from
+'Makefile.am', this variable, and not the user variable 'GCJFLAGS',
+should be used.
+
+   'gcj' can be used to compile '.java', '.class', '.zip', or '.jar'
+files.
+
+   When linking, 'gcj' requires that the main class be specified using
+the '--main=' option.  The easiest way to do this is to use the
+'_LDFLAGS' variable for the program.
+
+
+File: automake.info,  Node: Vala Support,  Next: Support for Other Languages,  Prev: Java Support with gcj,  Up: Programs
+
+8.17 Vala Support
+=================
+
+Automake provides initial support for Vala
+(<http://www.vala-project.org/>).  This requires valac version 0.7.0 or
+later, and currently requires the user to use GNU 'make'.
+
+     foo_SOURCES = foo.vala bar.vala zardoc.c
+
+   Any '.vala' file listed in a '_SOURCES' variable will be compiled
+into C code by the Vala compiler.  The generated '.c' files are
+distributed.  The end user does not need to have a Vala compiler
+installed.
+
+   Automake ships with an Autoconf macro called 'AM_PROG_VALAC' that
+will locate the Vala compiler and optionally check its version number.
+
+ -- Macro: AM_PROG_VALAC ([MINIMUM-VERSION], [ACTION-IF-FOUND],
+     [ACTION-IF-NOT-FOUND]) Search for a Vala compiler in 'PATH'.  If it
+     is found, the variable 'VALAC' is set to point to it (see below for
+     more details).  This macro takes three optional arguments.  The
+     first argument, if present, is the minimum version of the Vala
+     compiler required to compile this package.  If a compiler is found
+     and satisfies MINIMUM-VERSION, then ACTION-IF-FOUND is run (this
+     defaults to do nothing).  Otherwise, ACTION-IF-NOT-FOUND is run.
+     If ACTION-IF-NOT-FOUND is not specified, the default value is to
+     print a warning in case no compiler is found, or if a too-old
+     version of the compiler is found.
+
+   There are a few variables that are used when compiling Vala sources:
+
+'VALAC'
+     Absolute path to the Vala compiler, or simply 'valac' if no
+     suitable compiler Vala could be found at configure runtime.
+
+'VALAFLAGS'
+     Additional arguments for the Vala compiler.
+
+'AM_VALAFLAGS'
+     The maintainer's variant of 'VALAFLAGS'.
+
+          lib_LTLIBRARIES = libfoo.la
+          libfoo_la_SOURCES = foo.vala
+
+   Note that currently, you cannot use per-target '*_VALAFLAGS' (*note
+Renamed Objects::) to produce different C files from one Vala source
+file.
+
+
+File: automake.info,  Node: Support for Other Languages,  Next: Dependencies,  Prev: Vala Support,  Up: Programs
+
+8.18 Support for Other Languages
+================================
+
+Automake currently only includes full support for C, C++ (*note C++
+Support::), Objective C (*note Objective C Support::), Objective C++
+(*note Objective C++ Support::), Fortran 77 (*note Fortran 77
+Support::), Fortran 9x (*note Fortran 9x Support::), and Java (*note
+Java Support with gcj::).  There is only rudimentary support for other
+languages, support for which will be improved based on user demand.
+
+   Some limited support for adding your own languages is available via
+the suffix rule handling (*note Suffixes::).
+
+
+File: automake.info,  Node: Dependencies,  Next: EXEEXT,  Prev: Support for Other Languages,  Up: Programs
+
+8.19 Automatic dependency tracking
+==================================
+
+As a developer it is often painful to continually update the
+'Makefile.am' whenever the include-file dependencies change in a
+project.  Automake supplies a way to automatically track dependency
+changes (*note Dependency Tracking::).
+
+   Automake always uses complete dependencies for a compilation,
+including system headers.  Automake's model is that dependency
+computation should be a side effect of the build.  To this end,
+dependencies are computed by running all compilations through a special
+wrapper program called 'depcomp'.  'depcomp' understands how to coax
+many different C and C++ compilers into generating dependency
+information in the format it requires.  'automake -a' will install
+'depcomp' into your source tree for you.  If 'depcomp' can't figure out
+how to properly invoke your compiler, dependency tracking will simply be
+disabled for your build.
+
+   Experience with earlier versions of Automake (*note Dependency
+Tracking Evolution: (automake-history)Dependency Tracking Evolution.)
+taught us that it is not reliable to generate dependencies only on the
+maintainer's system, as configurations vary too much.  So instead
+Automake implements dependency tracking at build time.
+
+   Automatic dependency tracking can be suppressed by putting
+'no-dependencies' in the variable 'AUTOMAKE_OPTIONS', or passing
+'no-dependencies' as an argument to 'AM_INIT_AUTOMAKE' (this should be
+the preferred way).  Or, you can invoke 'automake' with the '-i' option.
+Dependency tracking is enabled by default.
+
+   The person building your package also can choose to disable
+dependency tracking by configuring with '--disable-dependency-tracking'.
+
+
+File: automake.info,  Node: EXEEXT,  Prev: Dependencies,  Up: Programs
+
+8.20 Support for executable extensions
+======================================
+
+On some platforms, such as Windows, executables are expected to have an
+extension such as '.exe'.  On these platforms, some compilers (GCC among
+them) will automatically generate 'foo.exe' when asked to generate
+'foo'.
+
+   Automake provides mostly-transparent support for this.  Unfortunately
+_mostly_ doesn't yet mean _fully_.  Until the English dictionary is
+revised, you will have to assist Automake if your package must support
+those platforms.
+
+   One thing you must be aware of is that, internally, Automake rewrites
+something like this:
+
+     bin_PROGRAMS = liver
+
+   to this:
+
+     bin_PROGRAMS = liver$(EXEEXT)
+
+   The targets Automake generates are likewise given the '$(EXEEXT)'
+extension.
+
+   The variables 'TESTS' and 'XFAIL_TESTS' (*note Simple Tests::) are
+also rewritten if they contain filenames that have been declared as
+programs in the same 'Makefile'.  (This is mostly useful when some
+programs from 'check_PROGRAMS' are listed in 'TESTS'.)
+
+   However, Automake cannot apply this rewriting to 'configure'
+substitutions.  This means that if you are conditionally building a
+program using such a substitution, then your 'configure.ac' must take
+care to add '$(EXEEXT)' when constructing the output variable.
+
+   Sometimes maintainers like to write an explicit link rule for their
+program.  Without executable extension support, this is easy--you simply
+write a rule whose target is the name of the program.  However, when
+executable extension support is enabled, you must instead add the
+'$(EXEEXT)' suffix.
+
+   This might be a nuisance for maintainers who know their package will
+never run on a platform that has executable extensions.  For those
+maintainers, the 'no-exeext' option (*note Options::) will disable this
+feature.  This works in a fairly ugly way; if 'no-exeext' is seen, then
+the presence of a rule for a target named 'foo' in 'Makefile.am' will
+override an 'automake'-generated rule for 'foo$(EXEEXT)'.  Without the
+'no-exeext' option, this use will give a diagnostic.
+
+
+File: automake.info,  Node: Other Objects,  Next: Other GNU Tools,  Prev: Programs,  Up: Top
+
+9 Other Derived Objects
+***********************
+
+Automake can handle derived objects that are not C programs.  Sometimes
+the support for actually building such objects must be explicitly
+supplied, but Automake will still automatically handle installation and
+distribution.
+
+* Menu:
+
+* Scripts::                     Executable scripts
+* Headers::                     Header files
+* Data::                        Architecture-independent data files
+* Sources::                     Derived sources
+
+
+File: automake.info,  Node: Scripts,  Next: Headers,  Up: Other Objects
+
+9.1 Executable Scripts
+======================
+
+It is possible to define and install programs that are scripts.  Such
+programs are listed using the 'SCRIPTS' primary name.  When the script
+is distributed in its final, installable form, the 'Makefile' usually
+looks as follows:
+
+     # Install my_script in $(bindir) and distribute it.
+     dist_bin_SCRIPTS = my_script
+
+   Scripts are not distributed by default; as we have just seen, those
+that should be distributed can be specified using a 'dist_' prefix as
+with other primaries.
+
+   Scripts can be installed in 'bindir', 'sbindir', 'libexecdir',
+'pkglibexecdir', or 'pkgdatadir'.
+
+   Scripts that need not be installed can be listed in 'noinst_SCRIPTS',
+and among them, those which are needed only by 'make check' should go in
+'check_SCRIPTS'.
+
+   When a script needs to be built, the 'Makefile.am' should include the
+appropriate rules.  For instance the 'automake' program itself is a Perl
+script that is generated from 'automake.in'.  Here is how this is
+handled:
+
+     bin_SCRIPTS = automake
+     CLEANFILES = $(bin_SCRIPTS)
+     EXTRA_DIST = automake.in
+
+     do_subst = sed -e 's,[@]datadir[@],$(datadir),g' \
+                 -e 's,[@]PERL[@],$(PERL),g' \
+                 -e 's,[@]PACKAGE[@],$(PACKAGE),g' \
+                 -e 's,[@]VERSION[@],$(VERSION),g' \
+                 ...
+
+     automake: automake.in Makefile
+             $(do_subst) < $(srcdir)/automake.in > automake
+             chmod +x automake
+
+   Such scripts for which a build rule has been supplied need to be
+deleted explicitly using 'CLEANFILES' (*note Clean::), and their sources
+have to be distributed, usually with 'EXTRA_DIST' (*note Basics of
+Distribution::).
+
+   Another common way to build scripts is to process them from
+'configure' with 'AC_CONFIG_FILES'.  In this situation Automake knows
+which files should be cleaned and distributed, and what the rebuild
+rules should look like.
+
+   For instance if 'configure.ac' contains
+
+     AC_CONFIG_FILES([src/my_script], [chmod +x src/my_script])
+
+to build 'src/my_script' from 'src/my_script.in', then a
+'src/Makefile.am' to install this script in '$(bindir)' can be as simple
+as
+
+     bin_SCRIPTS = my_script
+     CLEANFILES = $(bin_SCRIPTS)
+
+There is no need for 'EXTRA_DIST' or any build rule: Automake infers
+them from 'AC_CONFIG_FILES' (*note Requirements::).  'CLEANFILES' is
+still useful, because by default Automake will clean targets of
+'AC_CONFIG_FILES' in 'distclean', not 'clean'.
+
+   Although this looks simpler, building scripts this way has one
+drawback: directory variables such as '$(datadir)' are not fully
+expanded and may refer to other directory variables.
+
+
+File: automake.info,  Node: Headers,  Next: Data,  Prev: Scripts,  Up: Other Objects
+
+9.2 Header files
+================
+
+Header files that must be installed are specified by the 'HEADERS'
+family of variables.  Headers can be installed in 'includedir',
+'oldincludedir', 'pkgincludedir' or any other directory you may have
+defined (*note Uniform::).  For instance,
+
+     include_HEADERS = foo.h bar/bar.h
+
+will install the two files as '$(includedir)/foo.h' and
+'$(includedir)/bar.h'.
+
+   The 'nobase_' prefix is also supported,
+
+     nobase_include_HEADERS = foo.h bar/bar.h
+
+will install the two files as '$(includedir)/foo.h' and
+'$(includedir)/bar/bar.h' (*note Alternative::).
+
+   Usually, only header files that accompany installed libraries need to
+be installed.  Headers used by programs or convenience libraries are not
+installed.  The 'noinst_HEADERS' variable can be used for such headers.
+However when the header actually belongs to a single convenience library
+or program, we recommend listing it in the program's or library's
+'_SOURCES' variable (*note Program Sources::) instead of in
+'noinst_HEADERS'.  This is clearer for the 'Makefile.am' reader.
+'noinst_HEADERS' would be the right variable to use in a directory
+containing only headers and no associated library or program.
+
+   All header files must be listed somewhere; in a '_SOURCES' variable
+or in a '_HEADERS' variable.  Missing ones will not appear in the
+distribution.
+
+   For header files that are built and must not be distributed, use the
+'nodist_' prefix as in 'nodist_include_HEADERS' or
+'nodist_prog_SOURCES'.  If these generated headers are needed during the
+build, you must also ensure they exist before they are used (*note
+Sources::).
+
+
+File: automake.info,  Node: Data,  Next: Sources,  Prev: Headers,  Up: Other Objects
+
+9.3 Architecture-independent data files
+=======================================
+
+Automake supports the installation of miscellaneous data files using the
+'DATA' family of variables.
+
+   Such data can be installed in the directories 'datadir',
+'sysconfdir', 'sharedstatedir', 'localstatedir', or 'pkgdatadir'.
+
+   By default, data files are _not_ included in a distribution.  Of
+course, you can use the 'dist_' prefix to change this on a per-variable
+basis.
+
+   Here is how Automake declares its auxiliary data files:
+
+     dist_pkgdata_DATA = clean-kr.am clean.am ...
+
+
+File: automake.info,  Node: Sources,  Prev: Data,  Up: Other Objects
+
+9.4 Built Sources
+=================
+
+Because Automake's automatic dependency tracking works as a side-effect
+of compilation (*note Dependencies::) there is a bootstrap issue: a
+target should not be compiled before its dependencies are made, but
+these dependencies are unknown until the target is first compiled.
+
+   Ordinarily this is not a problem, because dependencies are
+distributed sources: they preexist and do not need to be built.  Suppose
+that 'foo.c' includes 'foo.h'.  When it first compiles 'foo.o', 'make'
+only knows that 'foo.o' depends on 'foo.c'.  As a side-effect of this
+compilation 'depcomp' records the 'foo.h' dependency so that following
+invocations of 'make' will honor it.  In these conditions, it's clear
+there is no problem: either 'foo.o' doesn't exist and has to be built
+(regardless of the dependencies), or accurate dependencies exist and
+they can be used to decide whether 'foo.o' should be rebuilt.
+
+   It's a different story if 'foo.h' doesn't exist by the first 'make'
+run.  For instance, there might be a rule to build 'foo.h'.  This time
+'file.o''s build will fail because the compiler can't find 'foo.h'.
+'make' failed to trigger the rule to build 'foo.h' first by lack of
+dependency information.
+
+   The 'BUILT_SOURCES' variable is a workaround for this problem.  A
+source file listed in 'BUILT_SOURCES' is made on 'make all' or 'make
+check' (or even 'make install') before other targets are processed.
+However, such a source file is not _compiled_ unless explicitly
+requested by mentioning it in some other '_SOURCES' variable.
+
+   So, to conclude our introductory example, we could use 'BUILT_SOURCES
+= foo.h' to ensure 'foo.h' gets built before any other target (including
+'foo.o') during 'make all' or 'make check'.
+
+   'BUILT_SOURCES' is actually a bit of a misnomer, as any file which
+must be created early in the build process can be listed in this
+variable.  Moreover, all built sources do not necessarily have to be
+listed in 'BUILT_SOURCES'.  For instance, a generated '.c' file doesn't
+need to appear in 'BUILT_SOURCES' (unless it is included by another
+source), because it's a known dependency of the associated object.
+
+   It might be important to emphasize that 'BUILT_SOURCES' is honored
+only by 'make all', 'make check' and 'make install'.  This means you
+cannot build a specific target (e.g., 'make foo') in a clean tree if it
+depends on a built source.  However it will succeed if you have run
+'make all' earlier, because accurate dependencies are already available.
+
+   The next section illustrates and discusses the handling of built
+sources on a toy example.
+
+* Menu:
+
+* Built Sources Example::       Several ways to handle built sources.
+
+
+File: automake.info,  Node: Built Sources Example,  Up: Sources
+
+9.4.1 Built Sources Example
+---------------------------
+
+Suppose that 'foo.c' includes 'bindir.h', which is
+installation-dependent and not distributed: it needs to be built.  Here
+'bindir.h' defines the preprocessor macro 'bindir' to the value of the
+'make' variable 'bindir' (inherited from 'configure').
+
+   We suggest several implementations below.  It's not meant to be an
+exhaustive listing of all ways to handle built sources, but it will give
+you a few ideas if you encounter this issue.
+
+First Try
+.........
+
+This first implementation will illustrate the bootstrap issue mentioned
+in the previous section (*note Sources::).
+
+   Here is a tentative 'Makefile.am'.
+
+     # This won't work.
+     bin_PROGRAMS = foo
+     foo_SOURCES = foo.c
+     nodist_foo_SOURCES = bindir.h
+     CLEANFILES = bindir.h
+     bindir.h: Makefile
+             echo '#define bindir "$(bindir)"' >$@
+
+   This setup doesn't work, because Automake doesn't know that 'foo.c'
+includes 'bindir.h'.  Remember, automatic dependency tracking works as a
+side-effect of compilation, so the dependencies of 'foo.o' will be known
+only after 'foo.o' has been compiled (*note Dependencies::).  The
+symptom is as follows.
+
+     % make
+     source='foo.c' object='foo.o' libtool=no \
+     depfile='.deps/foo.Po' tmpdepfile='.deps/foo.TPo' \
+     depmode=gcc /bin/sh ./depcomp \
+     gcc -I. -I. -g -O2 -c `test -f 'foo.c' || echo './'`foo.c
+     foo.c:2: bindir.h: No such file or directory
+     make: *** [foo.o] Error 1
+
+   In this example 'bindir.h' is not distributed nor installed, and it
+is not even being built on-time.  One may wonder if the
+'nodist_foo_SOURCES = bindir.h' line has any use at all.  This line
+simply states that 'bindir.h' is a source of 'foo', so for instance, it
+should be inspected while generating tags (*note Tags::).  In other
+words, it does not help our present problem, and the build would fail
+identically without it.
+
+Using 'BUILT_SOURCES'
+.....................
+
+A solution is to require 'bindir.h' to be built before anything else.
+This is what 'BUILT_SOURCES' is meant for (*note Sources::).
+
+     bin_PROGRAMS = foo
+     foo_SOURCES = foo.c
+     nodist_foo_SOURCES = bindir.h
+     BUILT_SOURCES = bindir.h
+     CLEANFILES = bindir.h
+     bindir.h: Makefile
+             echo '#define bindir "$(bindir)"' >$@
+
+   See how 'bindir.h' gets built first:
+
+     % make
+     echo '#define bindir "/usr/local/bin"' >bindir.h
+     make  all-am
+     make[1]: Entering directory `/home/adl/tmp'
+     source='foo.c' object='foo.o' libtool=no \
+     depfile='.deps/foo.Po' tmpdepfile='.deps/foo.TPo' \
+     depmode=gcc /bin/sh ./depcomp \
+     gcc -I. -I. -g -O2 -c `test -f 'foo.c' || echo './'`foo.c
+     gcc  -g -O2   -o foo  foo.o
+     make[1]: Leaving directory `/home/adl/tmp'
+
+   However, as said earlier, 'BUILT_SOURCES' applies only to the 'all',
+'check', and 'install' targets.  It still fails if you try to run 'make
+foo' explicitly:
+
+     % make clean
+     test -z "bindir.h" || rm -f bindir.h
+     test -z "foo" || rm -f foo
+     rm -f *.o
+     % : > .deps/foo.Po # Suppress previously recorded dependencies
+     % make foo
+     source='foo.c' object='foo.o' libtool=no \
+     depfile='.deps/foo.Po' tmpdepfile='.deps/foo.TPo' \
+     depmode=gcc /bin/sh ./depcomp \
+     gcc -I. -I. -g -O2 -c `test -f 'foo.c' || echo './'`foo.c
+     foo.c:2: bindir.h: No such file or directory
+     make: *** [foo.o] Error 1
+
+Recording Dependencies manually
+...............................
+
+Usually people are happy enough with 'BUILT_SOURCES' because they never
+build targets such as 'make foo' before 'make all', as in the previous
+example.  However if this matters to you, you can avoid 'BUILT_SOURCES'
+and record such dependencies explicitly in the 'Makefile.am'.
+
+     bin_PROGRAMS = foo
+     foo_SOURCES = foo.c
+     nodist_foo_SOURCES = bindir.h
+     foo.$(OBJEXT): bindir.h
+     CLEANFILES = bindir.h
+     bindir.h: Makefile
+             echo '#define bindir "$(bindir)"' >$@
+
+   You don't have to list _all_ the dependencies of 'foo.o' explicitly,
+only those that might need to be built.  If a dependency already exists,
+it will not hinder the first compilation and will be recorded by the
+normal dependency tracking code.  (Note that after this first
+compilation the dependency tracking code will also have recorded the
+dependency between 'foo.o' and 'bindir.h'; so our explicit dependency is
+really useful to the first build only.)
+
+   Adding explicit dependencies like this can be a bit dangerous if you
+are not careful enough.  This is due to the way Automake tries not to
+overwrite your rules (it assumes you know better than it).
+'foo.$(OBJEXT): bindir.h' supersedes any rule Automake may want to
+output to build 'foo.$(OBJEXT)'.  It happens to work in this case
+because Automake doesn't have to output any 'foo.$(OBJEXT):' target: it
+relies on a suffix rule instead (i.e., '.c.$(OBJEXT):').  Always check
+the generated 'Makefile.in' if you do this.
+
+Build 'bindir.h' from 'configure'
+.................................
+
+It's possible to define this preprocessor macro from 'configure', either
+in 'config.h' (*note Defining Directories: (autoconf)Defining
+Directories.), or by processing a 'bindir.h.in' file using
+'AC_CONFIG_FILES' (*note Configuration Actions: (autoconf)Configuration
+Actions.).
+
+   At this point it should be clear that building 'bindir.h' from
+'configure' works well for this example.  'bindir.h' will exist before
+you build any target, hence will not cause any dependency issue.
+
+   The Makefile can be shrunk as follows.  We do not even have to
+mention 'bindir.h'.
+
+     bin_PROGRAMS = foo
+     foo_SOURCES = foo.c
+
+   However, it's not always possible to build sources from 'configure',
+especially when these sources are generated by a tool that needs to be
+built first.
+
+Build 'bindir.c', not 'bindir.h'.
+.................................
+
+Another attractive idea is to define 'bindir' as a variable or function
+exported from 'bindir.o', and build 'bindir.c' instead of 'bindir.h'.
+
+     noinst_PROGRAMS = foo
+     foo_SOURCES = foo.c bindir.h
+     nodist_foo_SOURCES = bindir.c
+     CLEANFILES = bindir.c
+     bindir.c: Makefile
+             echo 'const char bindir[] = "$(bindir)";' >$@
+
+   'bindir.h' contains just the variable's declaration and doesn't need
+to be built, so it won't cause any trouble.  'bindir.o' is always
+dependent on 'bindir.c', so 'bindir.c' will get built first.
+
+Which is best?
+..............
+
+There is no panacea, of course.  Each solution has its merits and
+drawbacks.
+
+   You cannot use 'BUILT_SOURCES' if the ability to run 'make foo' on a
+clean tree is important to you.
+
+   You won't add explicit dependencies if you are leery of overriding an
+Automake rule by mistake.
+
+   Building files from './configure' is not always possible, neither is
+converting '.h' files into '.c' files.
+
+
+File: automake.info,  Node: Other GNU Tools,  Next: Documentation,  Prev: Other Objects,  Up: Top
+
+10 Other GNU Tools
+******************
+
+Since Automake is primarily intended to generate 'Makefile.in's for use
+in GNU programs, it tries hard to interoperate with other GNU tools.
+
+* Menu:
+
+* Emacs Lisp::                  Emacs Lisp
+* gettext::                     Gettext
+* Libtool::                     Libtool
+* Java::                        Java bytecode compilation (deprecated)
+* Python::                      Python
+
+
+File: automake.info,  Node: Emacs Lisp,  Next: gettext,  Up: Other GNU Tools
+
+10.1 Emacs Lisp
+===============
+
+Automake provides some support for Emacs Lisp.  The 'LISP' primary is
+used to hold a list of '.el' files.  Possible prefixes for this primary
+are 'lisp_' and 'noinst_'.  Note that if 'lisp_LISP' is defined, then
+'configure.ac' must run 'AM_PATH_LISPDIR' (*note Macros::).
+
+   Lisp sources are not distributed by default.  You can prefix the
+'LISP' primary with 'dist_', as in 'dist_lisp_LISP' or
+'dist_noinst_LISP', to indicate that these files should be distributed.
+
+   Automake will byte-compile all Emacs Lisp source files using the
+Emacs found by 'AM_PATH_LISPDIR', if any was found.  When performing
+such byte-compilation, the flags specified in the (developer-reserved)
+'AM_ELCFLAGS' and (user-reserved) 'ELCFLAGS' make variables will be
+passed to the Emacs invocation.
+
+   Byte-compiled Emacs Lisp files are not portable among all versions of
+Emacs, so it makes sense to turn this off if you expect sites to have
+more than one version of Emacs installed.  Furthermore, many packages
+don't actually benefit from byte-compilation.  Still, we recommend that
+you byte-compile your Emacs Lisp sources.  It is probably better for
+sites with strange setups to cope for themselves than to make the
+installation less nice for everybody else.
+
+   There are two ways to avoid byte-compiling.  Historically, we have
+recommended the following construct.
+
+     lisp_LISP = file1.el file2.el
+     ELCFILES =
+
+'ELCFILES' is an internal Automake variable that normally lists all
+'.elc' files that must be byte-compiled.  Automake defines 'ELCFILES'
+automatically from 'lisp_LISP'.  Emptying this variable explicitly
+prevents byte-compilation.
+
+   Since Automake 1.8, we now recommend using 'lisp_DATA' instead:
+
+     lisp_DATA = file1.el file2.el
+
+   Note that these two constructs are not equivalent.  '_LISP' will not
+install a file if Emacs is not installed, while '_DATA' will always
+install its files.
+
+
+File: automake.info,  Node: gettext,  Next: Libtool,  Prev: Emacs Lisp,  Up: Other GNU Tools
+
+10.2 Gettext
+============
+
+If 'AM_GNU_GETTEXT' is seen in 'configure.ac', then Automake turns on
+support for GNU gettext, a message catalog system for
+internationalization (*note Introduction: (gettext)Top.).
+
+   The 'gettext' support in Automake requires the addition of one or two
+subdirectories to the package: 'po' and possibly also 'intl'.  The
+latter is needed if 'AM_GNU_GETTEXT' is not invoked with the 'external'
+argument, or if 'AM_GNU_GETTEXT_INTL_SUBDIR' is used.  Automake ensures
+that these directories exist and are mentioned in 'SUBDIRS'.
+
+
+File: automake.info,  Node: Libtool,  Next: Java,  Prev: gettext,  Up: Other GNU Tools
+
+10.3 Libtool
+============
+
+Automake provides support for GNU Libtool (*note Introduction:
+(libtool)Top.) with the 'LTLIBRARIES' primary.  *Note A Shared
+Library::.
+
+
+File: automake.info,  Node: Java,  Next: Python,  Prev: Libtool,  Up: Other GNU Tools
+
+10.4 Java bytecode compilation (deprecated)
+===========================================
+
+Automake provides some minimal support for Java bytecode compilation
+with the 'JAVA' primary (in addition to the support for compiling Java
+to native machine code; *note Java Support with gcj::).  Note however
+that _the interface and most features described here are deprecated_.
+Future Automake releases will strive to provide a better and cleaner
+interface, which however _won't be backward-compatible_; the present
+interface will probably be removed altogether some time after the
+introduction of the new interface (if that ever materializes).
+
+   Any '.java' files listed in a '_JAVA' variable will be compiled with
+'JAVAC' at build time.  By default, '.java' files are not included in
+the distribution, you should use the 'dist_' prefix to distribute them.
+
+   Here is a typical setup for distributing '.java' files and installing
+the '.class' files resulting from their compilation.
+
+     javadir = $(datadir)/java
+     dist_java_JAVA = a.java b.java ...
+
+   Currently Automake enforces the restriction that only one '_JAVA'
+primary can be used in a given 'Makefile.am'.  The reason for this
+restriction is that, in general, it isn't possible to know which
+'.class' files were generated from which '.java' files, so it would be
+impossible to know which files to install where.  For instance, a
+'.java' file can define multiple classes; the resulting '.class' file
+names cannot be predicted without parsing the '.java' file.
+
+   There are a few variables that are used when compiling Java sources:
+
+'JAVAC'
+     The name of the Java compiler.  This defaults to 'javac'.
+
+'JAVACFLAGS'
+     The flags to pass to the compiler.  This is considered to be a user
+     variable (*note User Variables::).
+
+'AM_JAVACFLAGS'
+     More flags to pass to the Java compiler.  This, and not
+     'JAVACFLAGS', should be used when it is necessary to put Java
+     compiler flags into 'Makefile.am'.
+
+'JAVAROOT'
+     The value of this variable is passed to the '-d' option to 'javac'.
+     It defaults to '$(top_builddir)'.
+
+'CLASSPATH_ENV'
+     This variable is a shell expression that is used to set the
+     'CLASSPATH' environment variable on the 'javac' command line.  (In
+     the future we will probably handle class path setting differently.)
+
+
+File: automake.info,  Node: Python,  Prev: Java,  Up: Other GNU Tools
+
+10.5 Python
+===========
+
+Automake provides support for Python compilation with the 'PYTHON'
+primary.  A typical setup is to call 'AM_PATH_PYTHON' in 'configure.ac'
+and use a line like the following in 'Makefile.am':
+
+     python_PYTHON = tree.py leave.py
+
+   Any files listed in a '_PYTHON' variable will be byte-compiled with
+'py-compile' at install time.  'py-compile' actually creates both
+standard ('.pyc') and optimized ('.pyo') byte-compiled versions of the
+source files.  Note that because byte-compilation occurs at install
+time, any files listed in 'noinst_PYTHON' will not be compiled.  Python
+source files are included in the distribution by default, prepend
+'nodist_' (as in 'nodist_python_PYTHON') to omit them.
+
+   Automake ships with an Autoconf macro called 'AM_PATH_PYTHON' that
+will determine some Python-related directory variables (see below).  If
+you have called 'AM_PATH_PYTHON' from 'configure.ac', then you may use
+the variables 'python_PYTHON' or 'pkgpython_PYTHON' to list Python
+source files in your 'Makefile.am', depending on where you want your
+files installed (see the definitions of 'pythondir' and 'pkgpythondir'
+below).
+
+ -- Macro: AM_PATH_PYTHON ([VERSION], [ACTION-IF-FOUND],
+     [ACTION-IF-NOT-FOUND])
+
+     Search for a Python interpreter on the system.  This macro takes
+     three optional arguments.  The first argument, if present, is the
+     minimum version of Python required for this package:
+     'AM_PATH_PYTHON' will skip any Python interpreter that is older
+     than VERSION.  If an interpreter is found and satisfies VERSION,
+     then ACTION-IF-FOUND is run.  Otherwise, ACTION-IF-NOT-FOUND is
+     run.
+
+     If ACTION-IF-NOT-FOUND is not specified, as in the following
+     example, the default is to abort 'configure'.
+
+          AM_PATH_PYTHON([2.2])
+
+     This is fine when Python is an absolute requirement for the
+     package.  If Python >= 2.5 was only _optional_ to the package,
+     'AM_PATH_PYTHON' could be called as follows.
+
+          AM_PATH_PYTHON([2.5],, [:])
+
+     If the 'PYTHON' variable is set when 'AM_PATH_PYTHON' is called,
+     then that will be the only Python interpreter that is tried.
+
+     'AM_PATH_PYTHON' creates the following output variables based on
+     the Python installation found during configuration.
+
+'PYTHON'
+     The name of the Python executable, or ':' if no suitable
+     interpreter could be found.
+
+     Assuming ACTION-IF-NOT-FOUND is used (otherwise './configure' will
+     abort if Python is absent), the value of 'PYTHON' can be used to
+     setup a conditional in order to disable the relevant part of a
+     build as follows.
+
+          AM_PATH_PYTHON(,, [:])
+          AM_CONDITIONAL([HAVE_PYTHON], [test "$PYTHON" != :])
+
+'PYTHON_VERSION'
+     The Python version number, in the form MAJOR.MINOR (e.g., '2.5').
+     This is currently the value of 'sys.version[:3]'.
+
+'PYTHON_PREFIX'
+     The string '${prefix}'.  This term may be used in future work that
+     needs the contents of Python's 'sys.prefix', but general consensus
+     is to always use the value from 'configure'.
+
+'PYTHON_EXEC_PREFIX'
+     The string '${exec_prefix}'.  This term may be used in future work
+     that needs the contents of Python's 'sys.exec_prefix', but general
+     consensus is to always use the value from 'configure'.
+
+'PYTHON_PLATFORM'
+     The canonical name used by Python to describe the operating system,
+     as given by 'sys.platform'.  This value is sometimes needed when
+     building Python extensions.
+
+'pythondir'
+     The directory name for the 'site-packages' subdirectory of the
+     standard Python install tree.
+
+'pkgpythondir'
+     This is the directory under 'pythondir' that is named after the
+     package.  That is, it is '$(pythondir)/$(PACKAGE)'.  It is provided
+     as a convenience.
+
+'pyexecdir'
+     This is the directory where Python extension modules (shared
+     libraries) should be installed.  An extension module written in C
+     could be declared as follows to Automake:
+
+          pyexec_LTLIBRARIES = quaternion.la
+          quaternion_la_SOURCES = quaternion.c support.c support.h
+          quaternion_la_LDFLAGS = -avoid-version -module
+
+'pkgpyexecdir'
+     This is a convenience variable that is defined as
+     '$(pyexecdir)/$(PACKAGE)'.
+
+   All of these directory variables have values that start with either
+'${prefix}' or '${exec_prefix}' unexpanded.  This works fine in
+'Makefiles', but it makes these variables hard to use in 'configure'.
+This is mandated by the GNU coding standards, so that the user can run
+'make prefix=/foo install'.  The Autoconf manual has a section with more
+details on this topic (*note Installation Directory Variables:
+(autoconf)Installation Directory Variables.).  See also *note Hard-Coded
+Install Paths::.
+
+
+File: automake.info,  Node: Documentation,  Next: Install,  Prev: Other GNU Tools,  Up: Top
+
+11 Building documentation
+*************************
+
+Currently Automake provides support for Texinfo and man pages.
+
+* Menu:
+
+* Texinfo::                     Texinfo
+* Man Pages::                   Man pages
+
+
+File: automake.info,  Node: Texinfo,  Next: Man Pages,  Up: Documentation
+
+11.1 Texinfo
+============
+
+If the current directory contains Texinfo source, you must declare it
+with the 'TEXINFOS' primary.  Generally Texinfo files are converted into
+info, and thus the 'info_TEXINFOS' variable is most commonly used here.
+Any Texinfo source file should have the '.texi' extension.  Automake
+also accepts '.txi' or '.texinfo' extensions, but their use is
+discouraged now, and will elicit runtime warnings.
+
+   Automake generates rules to build '.info', '.dvi', '.ps', '.pdf' and
+'.html' files from your Texinfo sources.  Following the GNU Coding
+Standards, only the '.info' files are built by 'make all' and installed
+by 'make install' (unless you use 'no-installinfo', see below).
+Furthermore, '.info' files are automatically distributed so that Texinfo
+is not a prerequisite for installing your package.
+
+   It is worth noting that, contrary to what happens with the other
+formats, the generated '.info' files are by default placed in 'srcdir'
+rather than in the 'builddir'.  This can be changed with the
+'info-in-builddir' option.
+
+   Other documentation formats can be built on request by 'make dvi',
+'make ps', 'make pdf' and 'make html', and they can be installed with
+'make install-dvi', 'make install-ps', 'make install-pdf' and 'make
+install-html' explicitly.  'make uninstall' will remove everything: the
+Texinfo documentation installed by default as well as all the above
+optional formats.
+
+   All of these targets can be extended using '-local' rules (*note
+Extending::).
+
+   If the '.texi' file '@include's 'version.texi', then that file will
+be automatically generated.  The file 'version.texi' defines four
+Texinfo flag you can reference using '@value{EDITION}',
+'@value{VERSION}', '@value{UPDATED}', and '@value{UPDATED-MONTH}'.
+
+'EDITION'
+'VERSION'
+     Both of these flags hold the version number of your program.  They
+     are kept separate for clarity.
+
+'UPDATED'
+     This holds the date the primary '.texi' file was last modified.
+
+'UPDATED-MONTH'
+     This holds the name of the month in which the primary '.texi' file
+     was last modified.
+
+   The 'version.texi' support requires the 'mdate-sh' script; this
+script is supplied with Automake and automatically included when
+'automake' is invoked with the '--add-missing' option.
+
+   If you have multiple Texinfo files, and you want to use the
+'version.texi' feature, then you have to have a separate version file
+for each Texinfo file.  Automake will treat any include in a Texinfo
+file that matches 'vers*.texi' just as an automatically generated
+version file.
+
+   Sometimes an info file actually depends on more than one '.texi'
+file.  For instance, in GNU Hello, 'hello.texi' includes the file
+'fdl.texi'.  You can tell Automake about these dependencies using the
+'TEXI_TEXINFOS' variable.  Here is how GNU Hello does it:
+
+     info_TEXINFOS = hello.texi
+     hello_TEXINFOS = fdl.texi
+
+   By default, Automake requires the file 'texinfo.tex' to appear in the
+same directory as the 'Makefile.am' file that lists the '.texi' files.
+If you used 'AC_CONFIG_AUX_DIR' in 'configure.ac' (*note Finding
+'configure' Input: (autoconf)Input.), then 'texinfo.tex' is looked for
+there.  In both cases, 'automake' then supplies 'texinfo.tex' if
+'--add-missing' is given, and takes care of its distribution.  However,
+if you set the 'TEXINFO_TEX' variable (see below), it overrides the
+location of the file and turns off its installation into the source as
+well as its distribution.
+
+   The option 'no-texinfo.tex' can be used to eliminate the requirement
+for the file 'texinfo.tex'.  Use of the variable 'TEXINFO_TEX' is
+preferable, however, because that allows the 'dvi', 'ps', and 'pdf'
+targets to still work.
+
+   Automake generates an 'install-info' rule; some people apparently use
+this.  By default, info pages are installed by 'make install', so
+running 'make install-info' is pointless.  This can be prevented via the
+'no-installinfo' option.  In this case, '.info' files are not installed
+by default, and user must request this explicitly using 'make
+install-info'.
+
+   By default, 'make install-info' and 'make uninstall-info' will try to
+run the 'install-info' program (if available) to update (or
+create/remove) the '${infodir}/dir' index.  If this is undesired, it can
+be prevented by exporting the 'AM_UPDATE_INFO_DIR' variable to "'no'".
+
+   The following variables are used by the Texinfo build rules.
+
+'MAKEINFO'
+     The name of the program invoked to build '.info' files.  This
+     variable is defined by Automake.  If the 'makeinfo' program is
+     found on the system then it will be used by default; otherwise
+     'missing' will be used instead.
+
+'MAKEINFOHTML'
+     The command invoked to build '.html' files.  Automake defines this
+     to '$(MAKEINFO) --html'.
+
+'MAKEINFOFLAGS'
+     User flags passed to each invocation of '$(MAKEINFO)' and
+     '$(MAKEINFOHTML)'.  This user variable (*note User Variables::) is
+     not expected to be defined in any 'Makefile'; it can be used by
+     users to pass extra flags to suit their needs.
+
+'AM_MAKEINFOFLAGS'
+'AM_MAKEINFOHTMLFLAGS'
+     Maintainer flags passed to each 'makeinfo' invocation.  Unlike
+     'MAKEINFOFLAGS', these variables are meant to be defined by
+     maintainers in 'Makefile.am'.  '$(AM_MAKEINFOFLAGS)' is passed to
+     'makeinfo' when building '.info' files; and
+     '$(AM_MAKEINFOHTMLFLAGS)' is used when building '.html' files.
+
+     For instance, the following setting can be used to obtain one
+     single '.html' file per manual, without node separators.
+          AM_MAKEINFOHTMLFLAGS = --no-headers --no-split
+
+     'AM_MAKEINFOHTMLFLAGS' defaults to '$(AM_MAKEINFOFLAGS)'.  This
+     means that defining 'AM_MAKEINFOFLAGS' without defining
+     'AM_MAKEINFOHTMLFLAGS' will impact builds of both '.info' and
+     '.html' files.
+
+'TEXI2DVI'
+     The name of the command that converts a '.texi' file into a '.dvi'
+     file.  This defaults to 'texi2dvi', a script that ships with the
+     Texinfo package.
+
+'TEXI2PDF'
+     The name of the command that translates a '.texi' file into a
+     '.pdf' file.  This defaults to '$(TEXI2DVI) --pdf --batch'.
+
+'DVIPS'
+     The name of the command that builds a '.ps' file out of a '.dvi'
+     file.  This defaults to 'dvips'.
+
+'TEXINFO_TEX'
+
+     If your package has Texinfo files in many directories, you can use
+     the variable 'TEXINFO_TEX' to tell Automake where to find the
+     canonical 'texinfo.tex' for your package.  The value of this
+     variable should be the relative path from the current 'Makefile.am'
+     to 'texinfo.tex':
+
+          TEXINFO_TEX = ../doc/texinfo.tex
+
+
+File: automake.info,  Node: Man Pages,  Prev: Texinfo,  Up: Documentation
+
+11.2 Man Pages
+==============
+
+A package can also include man pages (but see the GNU standards on this
+matter, *note (standards)Man Pages::.)  Man pages are declared using the
+'MANS' primary.  Generally the 'man_MANS' variable is used.  Man pages
+are automatically installed in the correct subdirectory of 'mandir',
+based on the file extension.
+
+   File extensions such as '.1c' are handled by looking for the valid
+part of the extension and using that to determine the correct
+subdirectory of 'mandir'.  Valid section names are the digits '0'
+through '9', and the letters 'l' and 'n'.
+
+   Sometimes developers prefer to name a man page something like
+'foo.man' in the source, and then rename it to have the correct suffix,
+for example 'foo.1', when installing the file.  Automake also supports
+this mode.  For a valid section named SECTION, there is a corresponding
+directory named 'manSECTIONdir', and a corresponding '_MANS' variable.
+Files listed in such a variable are installed in the indicated section.
+If the file already has a valid suffix, then it is installed as-is;
+otherwise the file suffix is changed to match the section.
+
+   For instance, consider this example:
+     man1_MANS = rename.man thesame.1 alsothesame.1c
+
+In this case, 'rename.man' will be renamed to 'rename.1' when installed,
+but the other files will keep their names.
+
+   By default, man pages are installed by 'make install'.  However,
+since the GNU project does not require man pages, many maintainers do
+not expend effort to keep the man pages up to date.  In these cases, the
+'no-installman' option will prevent the man pages from being installed
+by default.  The user can still explicitly install them via 'make
+install-man'.
+
+   For fast installation, with many files it is preferable to use
+'manSECTION_MANS' over 'man_MANS' as well as files that do not need to
+be renamed.
+
+   Man pages are not currently considered to be source, because it is
+not uncommon for man pages to be automatically generated.  Therefore
+they are not automatically included in the distribution.  However, this
+can be changed by use of the 'dist_' prefix.  For instance here is how
+to distribute and install the two man pages of GNU 'cpio' (which
+includes both Texinfo documentation and man pages):
+
+     dist_man_MANS = cpio.1 mt.1
+
+   The 'nobase_' prefix is meaningless for man pages and is disallowed.
+
+   Executables and manpages may be renamed upon installation (*note
+Renaming::).  For manpages this can be avoided by use of the 'notrans_'
+prefix.  For instance, suppose an executable 'foo' allowing to access a
+library function 'foo' from the command line.  The way to avoid renaming
+of the 'foo.3' manpage is:
+
+     man_MANS = foo.1
+     notrans_man_MANS = foo.3
+
+   'notrans_' must be specified first when used in conjunction with
+either 'dist_' or 'nodist_' (*note Fine-grained Distribution Control::).
+For instance:
+
+     notrans_dist_man3_MANS = bar.3
+
+
+File: automake.info,  Node: Install,  Next: Clean,  Prev: Documentation,  Up: Top
+
+12 What Gets Installed
+**********************
+
+Naturally, Automake handles the details of actually installing your
+program once it has been built.  All files named by the various
+primaries are automatically installed in the appropriate places when the
+user runs 'make install'.
+
+* Menu:
+
+* Basics of Installation::      What gets installed where
+* The Two Parts of Install::    Installing data and programs separately
+* Extending Installation::      Adding your own rules for installation
+* Staged Installs::             Installation in a temporary location
+* Install Rules for the User::  Useful additional rules
+
+
+File: automake.info,  Node: Basics of Installation,  Next: The Two Parts of Install,  Up: Install
+
+12.1 Basics of Installation
+===========================
+
+A file named in a primary is installed by copying the built file into
+the appropriate directory.  The base name of the file is used when
+installing.
+
+     bin_PROGRAMS = hello subdir/goodbye
+
+   In this example, both 'hello' and 'goodbye' will be installed in
+'$(bindir)'.
+
+   Sometimes it is useful to avoid the basename step at install time.
+For instance, you might have a number of header files in subdirectories
+of the source tree that are laid out precisely how you want to install
+them.  In this situation you can use the 'nobase_' prefix to suppress
+the base name step.  For example:
+
+     nobase_include_HEADERS = stdio.h sys/types.h
+
+will install 'stdio.h' in '$(includedir)' and 'types.h' in
+'$(includedir)/sys'.
+
+   For most file types, Automake will install multiple files at once,
+while avoiding command line length issues (*note Length Limitations::).
+Since some 'install' programs will not install the same file twice in
+one invocation, you may need to ensure that file lists are unique within
+one variable such as 'nobase_include_HEADERS' above.
+
+   You should not rely on the order in which files listed in one
+variable are installed.  Likewise, to cater for parallel make, you
+should not rely on any particular file installation order even among
+different file types (library dependencies are an exception here).
+
+
+File: automake.info,  Node: The Two Parts of Install,  Next: Extending Installation,  Prev: Basics of Installation,  Up: Install
+
+12.2 The Two Parts of Install
+=============================
+
+Automake generates separate 'install-data' and 'install-exec' rules, in
+case the installer is installing on multiple machines that share
+directory structure--these targets allow the machine-independent parts
+to be installed only once.  'install-exec' installs platform-dependent
+files, and 'install-data' installs platform-independent files.  The
+'install' target depends on both of these targets.  While Automake tries
+to automatically segregate objects into the correct category, the
+'Makefile.am' author is, in the end, responsible for making sure this is
+done correctly.
+
+   Variables using the standard directory prefixes 'data', 'info',
+'man', 'include', 'oldinclude', 'pkgdata', or 'pkginclude' are installed
+by 'install-data'.
+
+   Variables using the standard directory prefixes 'bin', 'sbin',
+'libexec', 'sysconf', 'localstate', 'lib', or 'pkglib' are installed by
+'install-exec'.
+
+   For instance, 'data_DATA' files are installed by 'install-data',
+while 'bin_PROGRAMS' files are installed by 'install-exec'.
+
+   Any variable using a user-defined directory prefix with 'exec' in the
+name (e.g., 'myexecbin_PROGRAMS') is installed by 'install-exec'.  All
+other user-defined prefixes are installed by 'install-data'.
+
+
+File: automake.info,  Node: Extending Installation,  Next: Staged Installs,  Prev: The Two Parts of Install,  Up: Install
+
+12.3 Extending Installation
+===========================
+
+It is possible to extend this mechanism by defining an
+'install-exec-local' or 'install-data-local' rule.  If these rules
+exist, they will be run at 'make install' time.  These rules can do
+almost anything; care is required.
+
+   Automake also supports two install hooks, 'install-exec-hook' and
+'install-data-hook'.  These hooks are run after all other install rules
+of the appropriate type, exec or data, have completed.  So, for
+instance, it is possible to perform post-installation modifications
+using an install hook.  *Note Extending::, for some examples.
+
+
+File: automake.info,  Node: Staged Installs,  Next: Install Rules for the User,  Prev: Extending Installation,  Up: Install
+
+12.4 Staged Installs
+====================
+
+Automake generates support for the 'DESTDIR' variable in all install
+rules.  'DESTDIR' is used during the 'make install' step to relocate
+install objects into a staging area.  Each object and path is prefixed
+with the value of 'DESTDIR' before being copied into the install area.
+Here is an example of typical DESTDIR usage:
+
+     mkdir /tmp/staging &&
+     make DESTDIR=/tmp/staging install
+
+   The 'mkdir' command avoids a security problem if the attacker creates
+a symbolic link from '/tmp/staging' to a victim area; then 'make' places
+install objects in a directory tree built under '/tmp/staging'.  If
+'/gnu/bin/foo' and '/gnu/share/aclocal/foo.m4' are to be installed, the
+above command would install '/tmp/staging/gnu/bin/foo' and
+'/tmp/staging/gnu/share/aclocal/foo.m4'.
+
+   This feature is commonly used to build install images and packages
+(*note DESTDIR::).
+
+   Support for 'DESTDIR' is implemented by coding it directly into the
+install rules.  If your 'Makefile.am' uses a local install rule (e.g.,
+'install-exec-local') or an install hook, then you must write that code
+to respect 'DESTDIR'.
+
+   *Note (standards)Makefile Conventions::, for another usage example.
+
+
+File: automake.info,  Node: Install Rules for the User,  Prev: Staged Installs,  Up: Install
+
+12.5 Install Rules for the User
+===============================
+
+Automake also generates rules for targets 'uninstall', 'installdirs',
+and 'install-strip'.
+
+   Automake supports 'uninstall-local' and 'uninstall-hook'.  There is
+no notion of separate uninstalls for "exec" and "data", as these
+features would not provide additional functionality.
+
+   Note that 'uninstall' is not meant as a replacement for a real
+packaging tool.
+
+
+File: automake.info,  Node: Clean,  Next: Dist,  Prev: Install,  Up: Top
+
+13 What Gets Cleaned
+********************
+
+The GNU Makefile Standards specify a number of different clean rules.
+*Note Standard Targets for Users: (standards)Standard Targets.
+
+   Generally the files that can be cleaned are determined automatically
+by Automake.  Of course, Automake also recognizes some variables that
+can be defined to specify additional files to clean.  These variables
+are 'MOSTLYCLEANFILES', 'CLEANFILES', 'DISTCLEANFILES', and
+'MAINTAINERCLEANFILES'.
+
+   When cleaning involves more than deleting some hard-coded list of
+files, it is also possible to supplement the cleaning rules with your
+own commands.  Simply define a rule for any of the 'mostlyclean-local',
+'clean-local', 'distclean-local', or 'maintainer-clean-local' targets
+(*note Extending::).  A common case is deleting a directory, for
+instance, a directory created by the test suite:
+
+     clean-local:
+             -rm -rf testSubDir
+
+   Since 'make' allows only one set of rules for a given target, a more
+extensible way of writing this is to use a separate target listed as a
+dependency:
+
+     clean-local: clean-local-check
+     .PHONY: clean-local-check
+     clean-local-check:
+             -rm -rf testSubDir
+
+   As the GNU Standards aren't always explicit as to which files should
+be removed by which rule, we've adopted a heuristic that we believe was
+first formulated by Franc,ois Pinard:
+
+   * If 'make' built it, and it is commonly something that one would
+     want to rebuild (for instance, a '.o' file), then 'mostlyclean'
+     should delete it.
+
+   * Otherwise, if 'make' built it, then 'clean' should delete it.
+
+   * If 'configure' built it, then 'distclean' should delete it.
+
+   * If the maintainer built it (for instance, a '.info' file), then
+     'maintainer-clean' should delete it.  However 'maintainer-clean'
+     should not delete anything that needs to exist in order to run
+     './configure && make'.
+
+   We recommend that you follow this same set of heuristics in your
+'Makefile.am'.
+
+
+File: automake.info,  Node: Dist,  Next: Tests,  Prev: Clean,  Up: Top
+
+14 What Goes in a Distribution
+******************************
+
+* Menu:
+
+* Basics of Distribution::      Files distributed by default
+* Fine-grained Distribution Control::  'dist_' and 'nodist_' prefixes
+* The dist Hook::               A target for last-minute distribution changes
+* Checking the Distribution::   'make distcheck' explained
+* The Types of Distributions::  A variety of formats and compression methods
+
+
+File: automake.info,  Node: Basics of Distribution,  Next: Fine-grained Distribution Control,  Up: Dist
+
+14.1 Basics of Distribution
+===========================
+
+The 'dist' rule in the generated 'Makefile.in' can be used to generate a
+gzipped 'tar' file and other flavors of archive for distribution.  The
+file is named based on the 'PACKAGE' and 'VERSION' variables defined by
+'AM_INIT_AUTOMAKE' (*note Macros::); more precisely the gzipped 'tar'
+file is named 'PACKAGE-VERSION.tar.gz'.  You can use the 'make' variable
+'GZIP_ENV' to control how gzip is run.  The default setting is '--best'.
+
+   For the most part, the files to distribute are automatically found by
+Automake: all source files are automatically included in a distribution,
+as are all 'Makefile.am' and 'Makefile.in' files.  Automake also has a
+built-in list of commonly used files that are automatically included if
+they are found in the current directory (either physically, or as the
+target of a 'Makefile.am' rule); this list is printed by 'automake
+--help'.  Note that some files in this list are actually distributed
+only if other certain conditions hold (for example, the 'config.h.top'
+and 'config.h.bot' files are automatically distributed only if, e.g.,
+'AC_CONFIG_HEADERS([config.h])' is used in 'configure.ac').  Also, files
+that are read by 'configure' (i.e. the source files corresponding to the
+files specified in various Autoconf macros such as 'AC_CONFIG_FILES' and
+siblings) are automatically distributed.  Files included in a
+'Makefile.am' (using 'include') or in 'configure.ac' (using
+'m4_include'), and helper scripts installed with 'automake
+--add-missing' are also distributed.
+
+   Still, sometimes there are files that must be distributed, but which
+are not covered in the automatic rules.  These files should be listed in
+the 'EXTRA_DIST' variable.  You can mention files from subdirectories in
+'EXTRA_DIST'.
+
+   You can also mention a directory in 'EXTRA_DIST'; in this case the
+entire directory will be recursively copied into the distribution.
+Please note that this will also copy _everything_ in the directory,
+including, e.g., Subversion's '.svn' private directories or CVS/RCS
+version control files; thus we recommend against using this feature
+as-is.  However, you can use the 'dist-hook' feature to ameliorate the
+problem; *note The dist Hook::.
+
+   If you define 'SUBDIRS', Automake will recursively include the
+subdirectories in the distribution.  If 'SUBDIRS' is defined
+conditionally (*note Conditionals::), Automake will normally include all
+directories that could possibly appear in 'SUBDIRS' in the distribution.
+If you need to specify the set of directories conditionally, you can set
+the variable 'DIST_SUBDIRS' to the exact list of subdirectories to
+include in the distribution (*note Conditional Subdirectories::).
+
+
+File: automake.info,  Node: Fine-grained Distribution Control,  Next: The dist Hook,  Prev: Basics of Distribution,  Up: Dist
+
+14.2 Fine-grained Distribution Control
+======================================
+
+Sometimes you need tighter control over what does _not_ go into the
+distribution; for instance, you might have source files that are
+generated and that you do not want to distribute.  In this case Automake
+gives fine-grained control using the 'dist' and 'nodist' prefixes.  Any
+primary or '_SOURCES' variable can be prefixed with 'dist_' to add the
+listed files to the distribution.  Similarly, 'nodist_' can be used to
+omit the files from the distribution.
+
+   As an example, here is how you would cause some data to be
+distributed while leaving some source code out of the distribution:
+
+     dist_data_DATA = distribute-this
+     bin_PROGRAMS = foo
+     nodist_foo_SOURCES = do-not-distribute.c
+
+
+File: automake.info,  Node: The dist Hook,  Next: Checking the Distribution,  Prev: Fine-grained Distribution Control,  Up: Dist
+
+14.3 The dist Hook
+==================
+
+Occasionally it is useful to be able to change the distribution before
+it is packaged up.  If the 'dist-hook' rule exists, it is run after the
+distribution directory is filled, but before the actual distribution
+archives are created.  One way to use this is for removing unnecessary
+files that get recursively included by specifying a directory in
+'EXTRA_DIST':
+
+     EXTRA_DIST = doc
+     dist-hook:
+             rm -rf `find $(distdir)/doc -type d -name .svn`
+
+Note that the 'dist-hook' recipe shouldn't assume that the regular files
+in the distribution directory are writable; this might not be the case
+if one is packaging from a read-only source tree, or when a 'make
+distcheck' is being done.  For similar reasons, the recipe shouldn't
+assume that the subdirectories put into the distribution directory as
+effect of having them listed in 'EXTRA_DIST' are writable.  So, if the
+'dist-hook' recipe wants to modify the content of an existing file (or
+'EXTRA_DIST' subdirectory) in the distribution directory, it should
+explicitly to make it writable first:
+
+     EXTRA_DIST = README doc
+     dist-hook:
+             chmod u+w $(distdir)/README $(distdir)/doc
+             echo "Distribution date: `date`" >> README
+             rm -f $(distdir)/doc/HACKING
+
+   Two variables that come handy when writing 'dist-hook' rules are
+'$(distdir)' and '$(top_distdir)'.
+
+   '$(distdir)' points to the directory where the 'dist' rule will copy
+files from the current directory before creating the tarball.  If you
+are at the top-level directory, then 'distdir = $(PACKAGE)-$(VERSION)'.
+When used from subdirectory named 'foo/', then 'distdir =
+../$(PACKAGE)-$(VERSION)/foo'.  '$(distdir)' can be a relative or
+absolute path, do not assume any form.
+
+   '$(top_distdir)' always points to the root directory of the
+distributed tree.  At the top-level it's equal to '$(distdir)'.  In the
+'foo/' subdirectory 'top_distdir = ../$(PACKAGE)-$(VERSION)'.
+'$(top_distdir)' too can be a relative or absolute path.
+
+   Note that when packages are nested using 'AC_CONFIG_SUBDIRS' (*note
+Subpackages::), then '$(distdir)' and '$(top_distdir)' are relative to
+the package where 'make dist' was run, not to any sub-packages involved.
+
+
+File: automake.info,  Node: Checking the Distribution,  Next: The Types of Distributions,  Prev: The dist Hook,  Up: Dist
+
+14.4 Checking the Distribution
+==============================
+
+Automake also generates a 'distcheck' rule that can be of help to ensure
+that a given distribution will actually work.  Simplifying a bit, we can
+say this rule first makes a distribution, and then, _operating from it_,
+takes the following steps:
+   * tries to do a 'VPATH' build (*note VPATH Builds::), with the
+     'srcdir' and all its content made _read-only_;
+   * runs the test suite (with 'make check') on this fresh build;
+   * installs the package in a temporary directory (with 'make
+     install'), and tries runs the test suite on the resulting
+     installation (with 'make installcheck');
+   * checks that the package can be correctly uninstalled (by 'make
+     uninstall') and cleaned (by 'make distclean');
+   * finally, makes another tarball to ensure the distribution is
+     self-contained.
+
+DISTCHECK_CONFIGURE_FLAGS
+-------------------------
+
+Building the package involves running './configure'.  If you need to
+supply additional flags to 'configure', define them in the
+'AM_DISTCHECK_CONFIGURE_FLAGS' variable in your top-level 'Makefile.am'.
+The user can still extend or override the flags provided there by
+defining the 'DISTCHECK_CONFIGURE_FLAGS' variable, on the command line
+when invoking 'make'.  It's worth nothing that 'make distcheck' needs
+complete control over the 'configure' options '--srcdir' and '--prefix',
+so those options cannot be overridden by 'AM_DISTCHECK_CONFIGURE_FLAGS'
+nor by 'DISTCHECK_CONFIGURE_FLAGS'.
+
+   Also note that developers are encouraged to strive to make their code
+buildable without requiring any special configure option; thus, in
+general, you shouldn't define 'AM_DISTCHECK_CONFIGURE_FLAGS'.  However,
+there might be few scenarios in which the use of this variable is
+justified.  GNU 'm4' offers an example.  GNU 'm4' configures by default
+with its experimental and seldom used "changeword" feature disabled; so
+in its case it is useful to have 'make distcheck' run configure with the
+'--with-changeword' option, to ensure that the code for changeword
+support still compiles correctly.  GNU 'm4' also employs the
+'AM_DISTCHECK_CONFIGURE_FLAGS' variable to stress-test the use of
+'--program-prefix=g', since at one point the 'm4' build system had a bug
+where 'make installcheck' was wrongly assuming it could blindly test
+"'m4'", rather than the just-installed "'gm4'".
+
+distcheck-hook
+--------------
+
+If the 'distcheck-hook' rule is defined in your top-level 'Makefile.am',
+then it will be invoked by 'distcheck' after the new distribution has
+been unpacked, but before the unpacked copy is configured and built.
+Your 'distcheck-hook' can do almost anything, though as always caution
+is advised.  Generally this hook is used to check for potential
+distribution errors not caught by the standard mechanism.  Note that
+'distcheck-hook' as well as 'AM_DISTCHECK_CONFIGURE_FLAGS' and
+'DISTCHECK_CONFIGURE_FLAGS' are not honored in a subpackage
+'Makefile.am', but the flags from 'AM_DISTCHECK_CONFIGURE_FLAGS' and
+'DISTCHECK_CONFIGURE_FLAGS' are passed down to the 'configure' script of
+the subpackage.
+
+distcleancheck
+--------------
+
+Speaking of potential distribution errors, 'distcheck' also ensures that
+the 'distclean' rule actually removes all built files.  This is done by
+running 'make distcleancheck' at the end of the 'VPATH' build.  By
+default, 'distcleancheck' will run 'distclean' and then make sure the
+build tree has been emptied by running '$(distcleancheck_listfiles)'.
+Usually this check will find generated files that you forgot to add to
+the 'DISTCLEANFILES' variable (*note Clean::).
+
+   The 'distcleancheck' behavior should be OK for most packages,
+otherwise you have the possibility to override the definition of either
+the 'distcleancheck' rule, or the '$(distcleancheck_listfiles)'
+variable.  For instance, to disable 'distcleancheck' completely, add the
+following rule to your top-level 'Makefile.am':
+
+     distcleancheck:
+             @:
+
+   If you want 'distcleancheck' to ignore built files that have not been
+cleaned because they are also part of the distribution, add the
+following definition instead:
+
+     distcleancheck_listfiles = \
+       find . -type f -exec sh -c 'test -f $(srcdir)/$$1 || echo $$1' \
+            sh '{}' ';'
+
+   The above definition is not the default because it's usually an error
+if your Makefiles cause some distributed files to be rebuilt when the
+user build the package.  (Think about the user missing the tool required
+to build the file; or if the required tool is built by your package,
+consider the cross-compilation case where it can't be run.)  There is an
+entry in the FAQ about this (*note Errors with distclean::), make sure
+you read it before playing with 'distcleancheck_listfiles'.
+
+distuninstallcheck
+------------------
+
+'distcheck' also checks that the 'uninstall' rule works properly, both
+for ordinary and 'DESTDIR' builds.  It does this by invoking 'make
+uninstall', and then it checks the install tree to see if any files are
+left over.  This check will make sure that you correctly coded your
+'uninstall'-related rules.
+
+   By default, the checking is done by the 'distuninstallcheck' rule,
+and the list of files in the install tree is generated by
+'$(distuninstallcheck_listfiles)' (this is a variable whose value is a
+shell command to run that prints the list of files to stdout).
+
+   Either of these can be overridden to modify the behavior of
+'distcheck'.  For instance, to disable this check completely, you would
+write:
+
+     distuninstallcheck:
+             @:
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-2 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-2
new file mode 100644
index 0000000..213968d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/automake.info-2
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/coreutils.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/coreutils.info
new file mode 100644
index 0000000..27bba64
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/coreutils.info
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/dir b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/dir
new file mode 100644
index 0000000..b5b43ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/dir
@@ -0,0 +1,56 @@
+This is the file .../info/dir, which contains the
+topmost node of the Info hierarchy, called (dir)Top.
+The first time you invoke Info you start off looking at this node.
+
+File: dir,	Node: Top	This is the top of the INFO tree
+
+  This (the Directory node) gives a menu of major topics.
+  Typing "q" exits, "?" lists all Info commands, "d" returns here,
+  "h" gives a primer for first-timers,
+  "mEmacs<Return>" visits the Emacs manual, etc.
+
+  In Emacs, you can click mouse button 2 on a menu item or cross reference
+  to select it.
+
+* Menu:
+
+GNU organization
+* Standards: (standards).       GNU coding standards.
+
+GNU programming tools
+* Libtool: (libtool).           Generic shared library support script.
+
+Individual utilities
+* aclocal-invocation: (automake)aclocal Invocation.
+                                                Generating aclocal.m4.
+* autoconf-invocation: (autoconf)autoconf Invocation.
+                                                How to create configuration 
+                                                  scripts
+* autoheader: (autoconf)autoheader Invocation.  How to create configuration 
+                                                  templates
+* autom4te: (autoconf)autom4te Invocation.      The Autoconf executables 
+                                                  backbone
+* automake-invocation: (automake)automake Invocation.
+                                                Generating Makefile.in.
+* autoreconf: (autoconf)autoreconf Invocation.  Remaking multiple `configure' 
+                                                  scripts
+* autoscan: (autoconf)autoscan Invocation.      Semi-automatic `configure.ac' 
+                                                  writing
+* autoupdate: (autoconf)autoupdate Invocation.  Automatic update of 
+                                                  `configure.ac'
+* config.status: (autoconf)config.status Invocation.
+                                                Recreating configurations.
+* configure: (autoconf)configure Invocation.    Configuring a package.
+* ifnames: (autoconf)ifnames Invocation.        Listing conditionals in source.
+* libtool-invocation: (libtool)Invoking libtool.
+                                                Running the `libtool' script.
+* libtoolize: (libtool)Invoking libtoolize.     Adding libtool support.
+* testsuite: (autoconf)testsuite Invocation.    Running an Autotest test suite.
+
+Software development
+* Autoconf: (autoconf).         Create source code configuration scripts.
+* Automake: (automake).         Making GNU standards-compliant Makefiles.
+* Make: (make).                 Remake files automatically.
+
+Text creation and manipulation
+* M4: (m4).                     A powerful macro processor.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info
new file mode 100644
index 0000000..9a1263a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info
@@ -0,0 +1,140 @@
+This is doc/libtool.info, produced by makeinfo version 4.13 from
+./doc/libtool.texi.
+
+INFO-DIR-SECTION GNU programming tools
+START-INFO-DIR-ENTRY
+* Libtool: (libtool).           Generic shared library support script.
+END-INFO-DIR-ENTRY
+
+INFO-DIR-SECTION Individual utilities
+START-INFO-DIR-ENTRY
+* libtool-invocation: (libtool)Invoking libtool.
+                                                Running the `libtool' script.
+* libtoolize: (libtool)Invoking libtoolize.     Adding libtool support.
+END-INFO-DIR-ENTRY
+
+   This file documents GNU Libtool 2.4.2
+
+   Copyright (C) 1996-2011 Free Software Foundation, Inc.
+
+   Permission is granted to copy, distribute and/or modify this document
+under the terms of the GNU Free Documentation License, Version 1.3 or
+any later version published by the Free Software Foundation; with no
+Invariant Sections, with no Front-Cover Texts, and with no Back-Cover
+Texts.  A copy of the license is included in the section entitled "GNU
+Free Documentation License".
+
+
+Indirect:
+libtool.info-1: 999
+libtool.info-2: 284584
+
+Tag Table:
+(Indirect)
+Node: Top999
+Node: Introduction8193
+Node: Motivation10020
+Node: Issues11340
+Node: Other implementations12818
+Node: Postmortem13361
+Node: Libtool paradigm14981
+Node: Using libtool15926
+Node: Creating object files18029
+Node: Linking libraries21766
+Ref: Linking libraries-Footnote-125593
+Node: Linking executables25734
+Ref: Linking executables-Footnote-130985
+Ref: Linking executables-Footnote-231278
+Node: Wrapper executables31358
+Node: Debugging executables33581
+Node: Installing libraries36404
+Ref: Installing libraries-Footnote-139566
+Node: Installing executables39637
+Node: Static libraries40473
+Node: Invoking libtool43750
+Node: Compile mode49393
+Node: Link mode52354
+Node: Execute mode61728
+Node: Install mode62508
+Node: Finish mode64881
+Node: Uninstall mode65743
+Node: Clean mode66184
+Node: Integrating libtool66643
+Node: Autoconf macros69477
+Node: Makefile rules73324
+Node: Using Automake74427
+Ref: Using Automake-Footnote-176008
+Node: Configuring76408
+Node: LT_INIT77660
+Ref: LT_INIT-Footnote-192591
+Node: Configure notes92844
+Node: Distributing96023
+Node: Invoking libtoolize96940
+Node: Autoconf and LTLIBOBJS103128
+Node: Static-only libraries103872
+Ref: Static-only libraries-Footnote-1105199
+Node: Other languages105308
+Node: C++ libraries106016
+Node: Tags107454
+Node: Versioning108868
+Node: Interfaces110236
+Node: Libtool versioning110869
+Node: Updating version info113082
+Node: Release numbers116110
+Node: Library tips117966
+Node: C header files120772
+Ref: C header files-Footnote-1124446
+Node: Inter-library dependencies124655
+Node: Dlopened modules127372
+Node: Building modules129259
+Node: Dlpreopening130460
+Node: Linking with dlopened modules136117
+Node: Finding the dlname141046
+Ref: Finding the dlname-Footnote-1142364
+Node: Dlopen issues142417
+Node: Using libltdl143472
+Node: Libltdl interface145314
+Ref: Libltdl interface-Footnote-1158964
+Node: Modules for libltdl159258
+Node: Thread Safety in libltdl161784
+Node: User defined module data162797
+Node: Module loaders for libltdl170290
+Ref: Module loaders for libltdl-Footnote-1179561
+Node: Distributing libltdl179667
+Ref: Distributing libltdl-Footnote-1193478
+Ref: Distributing libltdl-Footnote-2193774
+Node: Trace interface193924
+Node: FAQ194760
+Node: Stripped link flags195098
+Node: Troubleshooting196544
+Node: Libtool test suite197067
+Node: Test descriptions197845
+Node: When tests fail210238
+Node: Reporting bugs211242
+Node: Maintaining212860
+Node: New ports213603
+Node: Information sources214296
+Node: Porting inter-library dependencies216764
+Node: Tested platforms219490
+Node: Platform quirks227920
+Node: References229101
+Node: Compilers229951
+Ref: Compilers-Footnote-1231527
+Node: Reloadable objects231843
+Node: Multiple dependencies232202
+Node: Archivers233099
+Node: Cross compiling233689
+Node: File name conversion239701
+Node: File Name Conversion Failure242540
+Node: Native MinGW File Name Conversion243788
+Node: Cygwin/Windows File Name Conversion245351
+Node: Unix/Windows File Name Conversion246724
+Node: LT_CYGPATH247493
+Node: Cygwin to MinGW Cross250738
+Node: Windows DLLs255042
+Node: libtool script contents262320
+Node: Cheap tricks282713
+Node: GNU Free Documentation License284584
+Node: Combined Index309759
+
+End Tag Table
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-1
new file mode 100644
index 0000000..6d16648
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-1
@@ -0,0 +1,6698 @@
+This is doc/libtool.info, produced by makeinfo version 4.13 from
+./doc/libtool.texi.
+
+INFO-DIR-SECTION GNU programming tools
+START-INFO-DIR-ENTRY
+* Libtool: (libtool).           Generic shared library support script.
+END-INFO-DIR-ENTRY
+
+INFO-DIR-SECTION Individual utilities
+START-INFO-DIR-ENTRY
+* libtool-invocation: (libtool)Invoking libtool.
+                                                Running the `libtool' script.
+* libtoolize: (libtool)Invoking libtoolize.     Adding libtool support.
+END-INFO-DIR-ENTRY
+
+   This file documents GNU Libtool 2.4.2
+
+   Copyright (C) 1996-2011 Free Software Foundation, Inc.
+
+   Permission is granted to copy, distribute and/or modify this document
+under the terms of the GNU Free Documentation License, Version 1.3 or
+any later version published by the Free Software Foundation; with no
+Invariant Sections, with no Front-Cover Texts, and with no Back-Cover
+Texts.  A copy of the license is included in the section entitled "GNU
+Free Documentation License".
+
+
+File: libtool.info,  Node: Top,  Next: Introduction,  Prev: (dir),  Up: (dir)
+
+Shared library support for GNU
+******************************
+
+This file documents GNU Libtool, a script that allows package developers
+to provide generic shared library support.  This edition documents
+version 2.4.2.
+
+   *Note Reporting bugs::, for information on how to report problems
+with GNU Libtool.
+
+* Menu:
+
+* Introduction::                What the heck is libtool?
+* Libtool paradigm::            How libtool's view of libraries is different.
+* Using libtool::               Example of using libtool to build libraries.
+* Invoking libtool::            Running the `libtool' script.
+* Integrating libtool::         Using libtool in your own packages.
+* Other languages::             Using libtool without a C compiler.
+* Versioning::                  Using library interface versions.
+* Library tips::                Tips for library interface design.
+* Inter-library dependencies::  Libraries that depend on other libraries.
+* Dlopened modules::            `dlopen'ing libtool-created libraries.
+* Using libltdl::               Libtool's portable `dlopen' wrapper library.
+* Trace interface::             Libtool's trace interface.
+* FAQ::                         Frequently Asked Questions
+* Troubleshooting::             When libtool doesn't work as advertised.
+* Maintaining::                 Information used by the libtool maintainer.
+* GNU Free Documentation License::  License for this manual.
+* Combined Index::              Full index.
+
+ --- The Detailed Node Listing ---
+
+Introduction
+
+* Motivation::                  Why does GNU need a libtool?
+* Issues::                      The problems that need to be addressed.
+* Other implementations::       How other people have solved these issues.
+* Postmortem::                  Learning from past difficulties.
+
+Using libtool
+
+* Creating object files::       Compiling object files for libraries.
+* Linking libraries::           Creating libraries from object files.
+* Linking executables::         Linking object files against libtool libraries.
+* Debugging executables::       Running GDB on libtool-generated programs.
+* Installing libraries::        Making libraries available to users.
+* Installing executables::      Making programs available to users.
+* Static libraries::            When shared libraries are not wanted.
+
+Linking executables
+
+* Wrapper executables::         Wrapper executables for some platforms.
+
+Invoking `libtool'
+
+* Compile mode::                Creating library object files.
+* Link mode::                   Generating executables and libraries.
+* Execute mode::                Debugging libtool-generated programs.
+* Install mode::                Making libraries and executables public.
+* Finish mode::                 Completing a library installation.
+* Uninstall mode::              Removing installed executables and libraries.
+* Clean mode::                  Removing uninstalled executables and libraries.
+
+Integrating libtool with your package
+
+* Autoconf macros::             Autoconf macros exported by libtool.
+* Makefile rules::              Writing `Makefile' rules for libtool.
+* Using Automake::              Automatically supporting libtool.
+* Configuring::                 Configuring libtool for a host system.
+* Distributing::                What files to distribute with your package.
+* Static-only libraries::       Sometimes shared libraries are just a pain.
+
+Configuring libtool
+
+* LT_INIT::                     Configuring `libtool' in `configure.ac'.
+* Configure notes::             Platform-specific notes for configuration.
+
+Including libtool in your package
+
+* Invoking libtoolize::         `libtoolize' command line options.
+* Autoconf and LTLIBOBJS::      Autoconf automates LTLIBOBJS generation.
+
+Using libtool with other languages
+
+* C++ libraries::               Writing libraries for C++
+* Tags::                        Tags
+
+Library interface versions
+
+* Interfaces::                  What are library interfaces?
+* Libtool versioning::          Libtool's versioning system.
+* Updating version info::       Changing version information before releases.
+* Release numbers::             Breaking binary compatibility for aesthetics.
+
+Tips for interface design
+
+* C header files::              How to write portable include files.
+
+Dlopened modules
+
+* Building modules::            Creating dlopenable objects and libraries.
+* Dlpreopening::                Dlopening that works on static platforms.
+* Linking with dlopened modules::  Using dlopenable modules in libraries.
+* Finding the dlname::          Choosing the right file to `dlopen'.
+* Dlopen issues::               Unresolved problems that need your attention.
+
+Using libltdl
+
+* Libltdl interface::           How to use libltdl in your programs.
+* Modules for libltdl::         Creating modules that can be `dlopen'ed.
+* Thread Safety in libltdl::    Registering callbacks for multi-thread safety.
+* User defined module data::    Associating data with loaded modules.
+* Module loaders for libltdl::  Creating user defined module loaders.
+* Distributing libltdl::        How to distribute libltdl with your package.
+
+Frequently Asked Questions about libtool
+
+* Stripped link flags::         Dropped flags when creating a library
+
+Troubleshooting
+
+* Libtool test suite::          Libtool's self-tests.
+* Reporting bugs::              How to report problems with libtool.
+
+The libtool test suite
+
+* Test descriptions::           The contents of the old test suite.
+* When tests fail::             What to do when a test fails.
+
+Maintenance notes for libtool
+
+* New ports::                   How to port libtool to new systems.
+* Tested platforms::            When libtool was last tested.
+* Platform quirks::             Information about different library systems.
+* libtool script contents::     Configuration information that libtool uses.
+* Cheap tricks::                Making libtool maintainership easier.
+
+Porting libtool to new systems
+
+* Information sources::         Where to find relevant documentation
+* Porting inter-library dependencies::  Implementation details explained
+
+Platform quirks
+
+* References::                  Finding more information.
+* Compilers::                   Creating object files from source files.
+* Reloadable objects::          Binding object files together.
+* Multiple dependencies::       Removing duplicate dependent libraries.
+* Archivers::                   Programs that create static archives.
+* Cross compiling::             Issues that arise when cross compiling.
+* File name conversion::        Converting file names between platforms.
+* Windows DLLs::                Windows header defines.
+
+File name conversion
+
+* File Name Conversion Failure::  What happens when file name conversion fails
+* Native MinGW File Name Conversion::  MSYS file name conversion idiosyncrasies
+* Cygwin/Windows File Name Conversion::  Using `cygpath' to convert Cygwin file names
+* Unix/Windows File Name Conversion::  Using Wine to convert Unix paths
+* LT_CYGPATH::                  Invoking `cygpath' from other environments
+* Cygwin to MinGW Cross::       Other notes concerning MinGW cross
+
+
+File: libtool.info,  Node: Introduction,  Next: Libtool paradigm,  Prev: Top,  Up: Top
+
+1 Introduction
+**************
+
+In the past, if you were a source code package developer and wanted to
+take advantage of the power of shared libraries, you needed to write
+custom support code for each platform on which your package ran.  You
+also had to design a configuration interface so that the package
+installer could choose what sort of libraries were built.
+
+   GNU Libtool simplifies your job by encapsulating both the
+platform-specific dependencies, and the user interface, in a single
+script.  GNU Libtool is designed so that the complete functionality of
+each host type is available via a generic interface, but nasty quirks
+are hidden from the programmer.
+
+   GNU Libtool's consistent interface is reassuring... users don't need
+to read obscure documentation in order to have their favorite source
+package build shared libraries.  They just run your package `configure'
+script (or equivalent), and libtool does all the dirty work.
+
+   There are several examples throughout this document.  All assume the
+same environment: we want to build a library, `libhello', in a generic
+way.
+
+   `libhello' could be a shared library, a static library, or both...
+whatever is available on the host system, as long as libtool has been
+ported to it.
+
+   This chapter explains the original design philosophy of libtool.
+Feel free to skip to the next chapter, unless you are interested in
+history, or want to write code to extend libtool in a consistent way.
+
+* Menu:
+
+* Motivation::                  Why does GNU need a libtool?
+* Issues::                      The problems that need to be addressed.
+* Other implementations::       How other people have solved these issues.
+* Postmortem::                  Learning from past difficulties.
+
+
+File: libtool.info,  Node: Motivation,  Next: Issues,  Up: Introduction
+
+1.1 Motivation for writing libtool
+==================================
+
+Since early 1995, several different GNU developers have recognized the
+importance of having shared library support for their packages.  The
+primary motivation for such a change is to encourage modularity and
+reuse of code (both conceptually and physically) in GNU programs.
+
+   Such a demand means that the way libraries are built in GNU packages
+needs to be general, to allow for any library type the package installer
+might want.  The problem is compounded by the absence of a standard
+procedure for creating shared libraries on different platforms.
+
+   The following sections outline the major issues facing shared library
+support in GNU, and how shared library support could be standardized
+with libtool.
+
+   The following specifications were used in developing and evaluating
+this system:
+
+  1. The system must be as elegant as possible.
+
+  2. The system must be fully integrated with the GNU Autoconf and
+     Automake utilities, so that it will be easy for GNU maintainers to
+     use.  However, the system must not require these tools, so that it
+     can be used by non-GNU packages.
+
+  3. Portability to other (non-GNU) architectures and tools is
+     desirable.
+
+
+File: libtool.info,  Node: Issues,  Next: Other implementations,  Prev: Motivation,  Up: Introduction
+
+1.2 Implementation issues
+=========================
+
+The following issues need to be addressed in any reusable shared library
+system, specifically libtool:
+
+  1. The package installer should be able to control what sort of
+     libraries are built.
+
+  2. It can be tricky to run dynamically linked programs whose
+     libraries have not yet been installed.  `LD_LIBRARY_PATH' must be
+     set properly (if it is supported), or programs fail to run.
+
+  3. The system must operate consistently even on hosts that don't
+     support shared libraries.
+
+  4. The commands required to build shared libraries may differ wildly
+     from host to host.  These need to be determined at configure time
+     in a consistent way.
+
+  5. It is not always obvious with what prefix or suffix a shared
+     library should be installed.  This makes it difficult for
+     `Makefile' rules, since they generally assume that file names are
+     the same from host to host.
+
+  6. The system needs a simple library version number abstraction, so
+     that shared libraries can be upgraded in place.  The programmer
+     should be informed how to design the interfaces to the library to
+     maximize binary compatibility.
+
+  7. The install `Makefile' target should warn the package installer to
+     set the proper environment variables (`LD_LIBRARY_PATH' or
+     equivalent), or run `ldconfig'.
+
+
+File: libtool.info,  Node: Other implementations,  Next: Postmortem,  Prev: Issues,  Up: Introduction
+
+1.3 Other implementations
+=========================
+
+Even before libtool was developed, many free software packages built and
+installed their own shared libraries.  At first, these packages were
+examined to avoid reinventing existing features.
+
+   Now it is clear that none of these packages have documented the
+details of shared library systems that libtool requires.  So, other
+packages have been more or less abandoned as influences.
+
+
+File: libtool.info,  Node: Postmortem,  Prev: Other implementations,  Up: Introduction
+
+1.4 A postmortem analysis of other implementations
+==================================================
+
+In all fairness, each of the implementations that were examined do the
+job that they were intended to do, for a number of different host
+systems.  However, none of these solutions seem to function well as a
+generalized, reusable component.
+
+   Most were too complex to use (much less modify) without understanding
+exactly what the implementation does, and they were generally not
+documented.
+
+   The main difficulty is that different vendors have different views of
+what libraries are, and none of the packages that were examined seemed
+to be confident enough to settle on a single paradigm that just _works_.
+
+   Ideally, libtool would be a standard that would be implemented as
+series of extensions and modifications to existing library systems to
+make them work consistently.  However, it is not an easy task to
+convince operating system developers to mend their evil ways, and
+people want to build shared libraries right now, even on buggy, broken,
+confused operating systems.
+
+   For this reason, libtool was designed as an independent shell script.
+It isolates the problems and inconsistencies in library building that
+plague `Makefile' writers by wrapping the compiler suite on different
+platforms with a consistent, powerful interface.
+
+   With luck, libtool will be useful to and used by the GNU community,
+and that the lessons that were learned in writing it will be taken up by
+designers of future library systems.
+
+
+File: libtool.info,  Node: Libtool paradigm,  Next: Using libtool,  Prev: Introduction,  Up: Top
+
+2 The libtool paradigm
+**********************
+
+At first, libtool was designed to support an arbitrary number of library
+object types.  After libtool was ported to more platforms, a new
+paradigm gradually developed for describing the relationship between
+libraries and programs.
+
+   In summary, "libraries are programs with multiple entry points, and
+more formally defined interfaces."
+
+   Version 0.7 of libtool was a complete redesign and rewrite of
+libtool to reflect this new paradigm.  So far, it has proved to be
+successful: libtool is simpler and more useful than before.
+
+   The best way to introduce the libtool paradigm is to contrast it with
+the paradigm of existing library systems, with examples from each.  It
+is a new way of thinking, so it may take a little time to absorb, but
+when you understand it, the world becomes simpler.
+
+
+File: libtool.info,  Node: Using libtool,  Next: Invoking libtool,  Prev: Libtool paradigm,  Up: Top
+
+3 Using libtool
+***************
+
+It makes little sense to talk about using libtool in your own packages
+until you have seen how it makes your life simpler.  The examples in
+this chapter introduce the main features of libtool by comparing the
+standard library building procedure to libtool's operation on two
+different platforms:
+
+`a23'
+     An Ultrix 4.2 platform with only static libraries.
+
+`burger'
+     A NetBSD/i386 1.2 platform with shared libraries.
+
+   You can follow these examples on your own platform, using the
+preconfigured libtool script that was installed with libtool (*note
+Configuring::).
+
+   Source files for the following examples are taken from the `demo'
+subdirectory of the libtool distribution.  Assume that we are building a
+library, `libhello', out of the files `foo.c' and `hello.c'.
+
+   Note that the `foo.c' source file uses the `cos' math library
+function, which is usually found in the standalone math library, and not
+the C library (*note Trigonometric Functions: (libc)Trig Functions.).
+So, we need to add `-lm' to the end of the link line whenever we link
+`foo.lo' into an executable or a library (*note Inter-library
+dependencies::).
+
+   The same rule applies whenever you use functions that don't appear in
+the standard C library... you need to add the appropriate `-lNAME' flag
+to the end of the link line when you link against those objects.
+
+   After we have built that library, we want to create a program by
+linking `main.o' against `libhello'.
+
+* Menu:
+
+* Creating object files::       Compiling object files for libraries.
+* Linking libraries::           Creating libraries from object files.
+* Linking executables::         Linking object files against libtool libraries.
+* Debugging executables::       Running GDB on libtool-generated programs.
+* Installing libraries::        Making libraries available to users.
+* Installing executables::      Making programs available to users.
+* Static libraries::            When shared libraries are not wanted.
+
+
+File: libtool.info,  Node: Creating object files,  Next: Linking libraries,  Up: Using libtool
+
+3.1 Creating object files
+=========================
+
+To create an object file from a source file, the compiler is invoked
+with the `-c' flag (and any other desired flags):
+
+     burger$ gcc -g -O -c main.c
+     burger$
+
+   The above compiler command produces an object file, usually named
+`main.o', from the source file `main.c'.
+
+   For most library systems, creating object files that become part of a
+static library is as simple as creating object files that are linked to
+form an executable:
+
+     burger$ gcc -g -O -c foo.c
+     burger$ gcc -g -O -c hello.c
+     burger$
+
+   Shared libraries, however, may only be built from
+"position-independent code" (PIC).  So, special flags must be passed to
+the compiler to tell it to generate PIC rather than the standard
+position-dependent code.
+
+   Since this is a library implementation detail, libtool hides the
+complexity of PIC compiler flags and uses separate library object files
+(the PIC one lives in the `.libs' subdirectory and the static one lives
+in the current directory).  On systems without shared libraries, the
+PIC library object files are not created, whereas on systems where all
+code is PIC, such as AIX, the static ones are not created.
+
+   To create library object files for `foo.c' and `hello.c', simply
+invoke libtool with the standard compilation command as arguments
+(*note Compile mode::):
+
+     a23$ libtool --mode=compile gcc -g -O -c foo.c
+     gcc -g -O -c foo.c -o foo.o
+     a23$ libtool --mode=compile gcc -g -O -c hello.c
+     gcc -g -O -c hello.c -o hello.o
+     a23$
+
+   Note that libtool silently creates an additional control file on each
+`compile' invocation.  The `.lo' file is the libtool object, which
+Libtool uses to determine what object file may be built into a shared
+library.  On `a23', only static libraries are supported so the library
+objects look like this:
+
+     # foo.lo - a libtool object file
+     # Generated by ltmain.sh (GNU libtool) 2.4.2
+     #
+     # Please DO NOT delete this file!
+     # It is necessary for linking the library.
+
+     # Name of the PIC object.
+     pic_object=none
+
+     # Name of the non-PIC object.
+     non_pic_object='foo.o'
+
+   On shared library systems, libtool automatically generates an
+additional PIC object by inserting the appropriate PIC generation flags
+into the compilation command:
+
+     burger$ libtool --mode=compile gcc -g -O -c foo.c
+     mkdir .libs
+     gcc -g -O -c foo.c  -fPIC -DPIC -o .libs/foo.o
+     gcc -g -O -c foo.c -o foo.o >/dev/null 2>&1
+     burger$
+
+   Note that Libtool automatically created `.libs' directory upon its
+first execution, where PIC library object files will be stored.
+
+   Since `burger' supports shared libraries, and requires PIC objects
+to build them, Libtool has compiled a PIC object this time, and made a
+note of it in the libtool object:
+
+     # foo.lo - a libtool object file
+     # Generated by ltmain.sh (GNU libtool) 2.4.2
+     #
+     # Please DO NOT delete this file!
+     # It is necessary for linking the library.
+
+     # Name of the PIC object.
+     pic_object='.libs/foo.o'
+
+     # Name of the non-PIC object.
+     non_pic_object='foo.o'
+
+   Notice that the second run of GCC has its output discarded.  This is
+done so that compiler warnings aren't annoyingly duplicated.  If you
+need to see both sets of warnings (you might have conditional code
+inside `#ifdef PIC' for example), you can turn off suppression with the
+`-no-suppress' option to libtool's compile mode:
+
+     burger$ libtool --mode=compile gcc -no-suppress -g -O -c hello.c
+     gcc -g -O -c hello.c  -fPIC -DPIC -o .libs/hello.o
+     gcc -g -O -c hello.c -o hello.o
+     burger$
+
+
+File: libtool.info,  Node: Linking libraries,  Next: Linking executables,  Prev: Creating object files,  Up: Using libtool
+
+3.2 Linking libraries
+=====================
+
+Without libtool, the programmer would invoke the `ar' command to create
+a static library:
+
+     burger$ ar cru libhello.a hello.o foo.o
+     burger$
+
+   But of course, that would be too simple, so many systems require that
+you run the `ranlib' command on the resulting library (to give it
+better karma, or something):
+
+     burger$ ranlib libhello.a
+     burger$
+
+   It seems more natural to use the C compiler for this task, given
+libtool's "libraries are programs" approach.  So, on platforms without
+shared libraries, libtool simply acts as a wrapper for the system `ar'
+(and possibly `ranlib') commands.
+
+   Again, the libtool control file name (`.la' suffix) differs from the
+standard library name (`.a' suffix).  The arguments to libtool are the
+same ones you would use to produce an executable named `libhello.la'
+with your compiler (*note Link mode::):
+
+     a23$ libtool --mode=link gcc -g -O -o libhello.la foo.o hello.o
+     *** Warning: Linking the shared library libhello.la against the
+     *** non-libtool objects foo.o hello.o is not portable!
+     ar cru .libs/libhello.a
+     ranlib .libs/libhello.a
+     creating libhello.la
+     (cd .libs && rm -f libhello.la && ln -s ../libhello.la libhello.la)
+     a23$
+
+   Aha!  Libtool caught a common error... trying to build a library
+from standard objects instead of special `.lo' object files.  This
+doesn't matter so much for static libraries, but on shared library
+systems, it is of great importance.  (Note that you may replace
+`libhello.la' with `libhello.a' in which case libtool won't issue the
+warning any more.  But although this method works, this is not intended
+to be used because it makes you lose the benefits of using Libtool.)
+
+   So, let's try again, this time with the library object files.
+Remember also that we need to add `-lm' to the link command line because
+`foo.c' uses the `cos' math library function (*note Using libtool::).
+
+   Another complication in building shared libraries is that we need to
+specify the path to the directory in which they (eventually) will be
+installed (in this case, `/usr/local/lib')(1):
+
+     a23$ libtool --mode=link gcc -g -O -o libhello.la foo.lo hello.lo \
+                     -rpath /usr/local/lib -lm
+     ar cru .libs/libhello.a foo.o hello.o
+     ranlib .libs/libhello.a
+     creating libhello.la
+     (cd .libs && rm -f libhello.la && ln -s ../libhello.la libhello.la)
+     a23$
+
+   Now, let's try the same trick on the shared library platform:
+
+     burger$ libtool --mode=link gcc -g -O -o libhello.la foo.lo hello.lo \
+                     -rpath /usr/local/lib -lm
+     rm -fr  .libs/libhello.a .libs/libhello.la
+     ld -Bshareable -o .libs/libhello.so.0.0 .libs/foo.o .libs/hello.o -lm
+     ar cru .libs/libhello.a foo.o hello.o
+     ranlib .libs/libhello.a
+     creating libhello.la
+     (cd .libs && rm -f libhello.la && ln -s ../libhello.la libhello.la)
+     burger$
+
+   Now that's significantly cooler... Libtool just ran an obscure `ld'
+command to create a shared library, as well as the static library.
+
+   Note how libtool creates extra files in the `.libs' subdirectory,
+rather than the current directory.  This feature is to make it easier
+to clean up the build directory, and to help ensure that other programs
+fail horribly if you accidentally forget to use libtool when you should.
+
+   Again, you may want to have a look at the `.la' file in order to see
+what Libtool stores in it.  In particular, you will see that Libtool
+uses this file to remember the destination directory for the library
+(the argument to `-rpath') as well as the dependency on the math
+library (`-lm').
+
+   ---------- Footnotes ----------
+
+   (1) If you don't specify an `rpath', then libtool builds a libtool
+convenience archive, not a shared library (*note Static libraries::).
+
+
+File: libtool.info,  Node: Linking executables,  Next: Debugging executables,  Prev: Linking libraries,  Up: Using libtool
+
+3.3 Linking executables
+=======================
+
+If you choose at this point to "install" the library (put it in a
+permanent location) before linking executables against it, then you
+don't need to use libtool to do the linking.  Simply use the appropriate
+`-L' and `-l' flags to specify the library's location.
+
+   Some system linkers insist on encoding the full directory name of
+each shared library in the resulting executable.  Libtool has to work
+around this misfeature by special magic to ensure that only permanent
+directory names are put into installed executables.
+
+   The importance of this bug must not be overlooked: it won't cause
+programs to crash in obvious ways.  It creates a security hole, and
+possibly even worse, if you are modifying the library source code after
+you have installed the package, you will change the behaviour of the
+installed programs!
+
+   So, if you want to link programs against the library before you
+install it, you must use libtool to do the linking.
+
+   Here's the old way of linking against an uninstalled library:
+
+     burger$ gcc -g -O -o hell.old main.o libhello.a -lm
+     burger$
+
+   Libtool's way is almost the same(1) (*note Link mode::):
+
+     a23$ libtool --mode=link gcc -g -O -o hell main.o libhello.la
+     gcc -g -O -o hell main.o  ./.libs/libhello.a -lm
+     a23$
+
+   That looks too simple to be true.  All libtool did was transform
+`libhello.la' to `./.libs/libhello.a', but remember that `a23' has no
+shared libraries.  Notice that Libtool also remembered that
+`libhello.la' depends on `-lm', so even though we didn't specify `-lm'
+on the libtool command line(2) Libtool has added it to the `gcc' link
+line for us.
+
+   On `burger' Libtool links against the uninstalled shared library:
+
+     burger$ libtool --mode=link gcc -g -O -o hell main.o libhello.la
+     gcc -g -O -o .libs/hell main.o -L./.libs -R/usr/local/lib -lhello -lm
+     creating hell
+     burger$
+
+   Now assume `libhello.la' had already been installed, and you want to
+link a new program with it.  You could figure out where it lives by
+yourself, then run:
+
+     burger$ gcc -g -O -o test test.o -L/usr/local/lib -lhello -lm
+
+   However, unless `/usr/local/lib' is in the standard library search
+path, you won't be able to run `test'.  However, if you use libtool to
+link the already-installed libtool library, it will do The Right Thing
+(TM) for you:
+
+     burger$ libtool --mode=link gcc -g -O -o test test.o \
+                     /usr/local/lib/libhello.la
+     gcc -g -O -o .libs/test test.o -Wl,--rpath \
+             -Wl,/usr/local/lib /usr/local/lib/libhello.a -lm
+     creating test
+     burger$
+
+   Note that libtool added the necessary run-time path flag, as well as
+`-lm', the library libhello.la depended upon.  Nice, huh?
+
+   Notice that the executable, `hell', was actually created in the
+`.libs' subdirectory.  Then, a wrapper script (or, on certain
+platforms, a wrapper executable *note Wrapper executables::) was
+created in the current directory.
+
+   Since libtool created a wrapper script, you should use libtool to
+install it and debug it too.  However, since the program does not depend
+on any uninstalled libtool library, it is probably usable even without
+the wrapper script.
+
+   On NetBSD 1.2, libtool encodes the installation directory of
+`libhello', by using the `-R/usr/local/lib' compiler flag.  Then, the
+wrapper script guarantees that the executable finds the correct shared
+library (the one in `./.libs') until it is properly installed.
+
+   Let's compare the two different programs:
+
+     burger$ time ./hell.old
+     Welcome to GNU Hell!
+     ** This is not GNU Hello.  There is no built-in mail reader. **
+             0.21 real         0.02 user         0.08 sys
+     burger$ time ./hell
+     Welcome to GNU Hell!
+     ** This is not GNU Hello.  There is no built-in mail reader. **
+             0.63 real         0.09 user         0.59 sys
+     burger$
+
+   The wrapper script takes significantly longer to execute, but at
+least the results are correct, even though the shared library hasn't
+been installed yet.
+
+   So, what about all the space savings that shared libraries are
+supposed to yield?
+
+     burger$ ls -l hell.old libhello.a
+     -rwxr-xr-x  1 gord  gord  15481 Nov 14 12:11 hell.old
+     -rw-r--r--  1 gord  gord   4274 Nov 13 18:02 libhello.a
+     burger$ ls -l .libs/hell .libs/libhello.*
+     -rwxr-xr-x  1 gord  gord  11647 Nov 14 12:10 .libs/hell
+     -rw-r--r--  1 gord  gord   4274 Nov 13 18:44 .libs/libhello.a
+     -rwxr-xr-x  1 gord  gord  12205 Nov 13 18:44 .libs/libhello.so.0.0
+     burger$
+
+   Well, that sucks.  Maybe I should just scrap this project and take up
+basket weaving.
+
+   Actually, it just proves an important point: shared libraries incur
+overhead because of their (relative) complexity.  In this situation, the
+price of being dynamic is eight kilobytes, and the payoff is about four
+kilobytes.  So, having a shared `libhello' won't be an advantage until
+we link it against at least a few more programs.
+
+* Menu:
+
+* Wrapper executables::         Wrapper executables for some platforms.
+
+   ---------- Footnotes ----------
+
+   (1) However, you should avoid using `-L' or `-l' flags to link
+against an uninstalled libtool library.  Just specify the relative path
+to the `.la' file, such as `../intl/libintl.la'.  This is a design
+decision to eliminate any ambiguity when linking against uninstalled
+shared libraries.
+
+   (2) And why should we? `main.o' doesn't directly depend on `-lm'
+after all.
+
+
+File: libtool.info,  Node: Wrapper executables,  Up: Linking executables
+
+3.3.1 Wrapper executables for uninstalled programs
+--------------------------------------------------
+
+Some platforms, notably those hosted on Windows such as Cygwin and
+MinGW, use a wrapper executable rather than a wrapper script to ensure
+proper operation of uninstalled programs linked by libtool against
+uninstalled shared libraries. The wrapper executable thus performs the
+same function as the wrapper script used on other platforms, but allows
+to satisfy the `make' rules for the program, whose name ends in
+`$(EXEEXT)'. The actual program executable is created below .libs, and
+its name will end in `$(EXEEXT)' and may or may not contain an `lt-'
+prefix.  This wrapper executable sets various environment values so
+that the program executable may locate its (uninstalled) shared
+libraries, and then launches the program executable.
+
+   The wrapper executable provides a debug mode, enabled by passing the
+command-line option `--lt-debug' (see below). When executing in debug
+mode, diagnostic information will be printed to `stderr' before the
+program executable is launched.
+
+   Finally, the wrapper executable supports a number of command line
+options that may be useful when debugging the operation of the wrapper
+system. All of these options begin with `--lt-', and if present they
+and their arguments will be removed from the argument list passed on to
+the program executable.  Therefore, the program executable may not
+employ command line options that begin with `--lt-'. (In fact, the
+wrapper executable will detect any command line options that begin with
+`--lt-' and abort with an error message if the option is not
+recognized). If this presents a problem, please contact the Libtool
+team at the Libtool bug reporting address <bug-libtool@gnu.org>.
+
+   These command line options include:
+
+`--lt-dump-script'
+     Causes the wrapper to print a copy of the wrapper _script_ to
+     `stdout', and exit.
+
+`--lt-debug'
+     Causes the wrapper to print diagnostic information to `stdout',
+     before launching the program executable.
+
+
+   For consistency, both the wrapper _script_ and the wrapper
+_executable_ support these options.
+
+
+File: libtool.info,  Node: Debugging executables,  Next: Installing libraries,  Prev: Linking executables,  Up: Using libtool
+
+3.4 Debugging executables
+=========================
+
+If `hell' was a complicated program, you would certainly want to test
+and debug it before installing it on your system.  In the above
+section, you saw how the libtool wrapper script makes it possible to run
+the program directly, but unfortunately, this mechanism interferes with
+the debugger:
+
+     burger$ gdb hell
+     GDB is free software and you are welcome to distribute copies of it
+      under certain conditions; type "show copying" to see the conditions.
+     There is no warranty for GDB; type "show warranty" for details.
+     GDB 4.16 (i386-unknown-netbsd), (C) 1996 Free Software Foundation, Inc.
+
+     "hell": not in executable format: File format not recognized
+
+     (gdb) quit
+     burger$
+
+   Sad.  It doesn't work because GDB doesn't know where the executable
+lives.  So, let's try again, by invoking GDB directly on the executable:
+
+     burger$ gdb .libs/hell
+     GNU gdb 5.3 (i386-unknown-netbsd)
+     Copyright 2002 Free Software Foundation, Inc.
+     GDB is free software, covered by the GNU General Public License,
+     and you are welcome to change it and/or distribute copies of it
+     under certain conditions.  Type "show copying" to see the conditions.
+     There is no warranty for GDB.  Type "show warranty" for details.
+     (gdb) break main
+     Breakpoint 1 at 0x8048547: file main.c, line 29.
+     (gdb) run
+     Starting program: /home/src/libtool/demo/.libs/hell
+     /home/src/libtool/demo/.libs/hell: can't load library 'libhello.so.0'
+
+     Program exited with code 020.
+     (gdb) quit
+     burger$
+
+   Argh.  Now GDB complains because it cannot find the shared library
+that `hell' is linked against.  So, we must use libtool in order to
+properly set the library path and run the debugger.  Fortunately, we can
+forget all about the `.libs' directory, and just run it on the
+executable wrapper (*note Execute mode::):
+
+     burger$ libtool --mode=execute gdb hell
+     GNU gdb 5.3 (i386-unknown-netbsd)
+     Copyright 2002 Free Software Foundation, Inc.
+     GDB is free software, covered by the GNU General Public License,
+     and you are welcome to change it and/or distribute copies of it
+     under certain conditions.  Type "show copying" to see the conditions.
+     There is no warranty for GDB.  Type "show warranty" for details.
+     (gdb) break main
+     Breakpoint 1 at 0x8048547: file main.c, line 29.
+     (gdb) run
+     Starting program: /home/src/libtool/demo/.libs/hell
+
+     Breakpoint 1, main (argc=1, argv=0xbffffc40) at main.c:29
+     29        printf ("Welcome to GNU Hell!\n");
+     (gdb) quit
+     The program is running.  Quit anyway (and kill it)? (y or n) y
+     burger$
+
+
+File: libtool.info,  Node: Installing libraries,  Next: Installing executables,  Prev: Debugging executables,  Up: Using libtool
+
+3.5 Installing libraries
+========================
+
+Installing libraries on a non-libtool system is quite
+straightforward... just copy them into place:(1)
+
+     burger$ su
+     Password: ********
+     burger# cp libhello.a /usr/local/lib/libhello.a
+     burger#
+
+   Oops, don't forget the `ranlib' command:
+
+     burger# ranlib /usr/local/lib/libhello.a
+     burger#
+
+   Libtool installation is quite simple, as well.  Just use the
+`install' or `cp' command that you normally would (*note Install
+mode::):
+
+     a23# libtool --mode=install cp libhello.la /usr/local/lib/libhello.la
+     cp libhello.la /usr/local/lib/libhello.la
+     cp .libs/libhello.a /usr/local/lib/libhello.a
+     ranlib /usr/local/lib/libhello.a
+     a23#
+
+   Note that the libtool library `libhello.la' is also installed, to
+help libtool with uninstallation (*note Uninstall mode::) and linking
+(*note Linking executables::) and to help programs with dlopening
+(*note Dlopened modules::).
+
+   Here is the shared library example:
+
+     burger# libtool --mode=install install -c libhello.la \
+                     /usr/local/lib/libhello.la
+     install -c .libs/libhello.so.0.0 /usr/local/lib/libhello.so.0.0
+     install -c libhello.la /usr/local/lib/libhello.la
+     install -c .libs/libhello.a /usr/local/lib/libhello.a
+     ranlib /usr/local/lib/libhello.a
+     burger#
+
+   It is safe to specify the `-s' (strip symbols) flag if you use a
+BSD-compatible install program when installing libraries.  Libtool will
+either ignore the `-s' flag, or will run a program that will strip only
+debugging and compiler symbols from the library.
+
+   Once the libraries have been put in place, there may be some
+additional configuration that you need to do before using them.  First,
+you must make sure that where the library is installed actually agrees
+with the `-rpath' flag you used to build it.
+
+   Then, running `libtool -n finish LIBDIR' can give you further hints
+on what to do (*note Finish mode::):
+
+     burger# libtool -n finish /usr/local/lib
+     PATH="$PATH:/sbin" ldconfig -m /usr/local/lib
+     -----------------------------------------------------------------
+     Libraries have been installed in:
+        /usr/local/lib
+
+     To link against installed libraries in a given directory, LIBDIR,
+     you must use the `-LLIBDIR' flag during linking.
+
+      You will also need to do one of the following:
+        - add LIBDIR to the `LD_LIBRARY_PATH' environment variable
+          during execution
+        - add LIBDIR to the `LD_RUN_PATH' environment variable
+          during linking
+        - use the `-RLIBDIR' linker flag
+
+     See any operating system documentation about shared libraries for
+     more information, such as the ld and ld.so manual pages.
+     -----------------------------------------------------------------
+     burger#
+
+   After you have completed these steps, you can go on to begin using
+the installed libraries.  You may also install any executables that
+depend on libraries you created.
+
+   ---------- Footnotes ----------
+
+   (1) Don't strip static libraries though, or they will be unusable.
+
+
+File: libtool.info,  Node: Installing executables,  Next: Static libraries,  Prev: Installing libraries,  Up: Using libtool
+
+3.6 Installing executables
+==========================
+
+If you used libtool to link any executables against uninstalled libtool
+libraries (*note Linking executables::), you need to use libtool to
+install the executables after the libraries have been installed (*note
+Installing libraries::).
+
+   So, for our Ultrix example, we would run:
+
+     a23# libtool --mode=install -c hell /usr/local/bin/hell
+     install -c hell /usr/local/bin/hell
+     a23#
+
+   On shared library systems that require wrapper scripts, libtool just
+ignores the wrapper script and installs the correct binary:
+
+     burger# libtool --mode=install -c hell /usr/local/bin/hell
+     install -c .libs/hell /usr/local/bin/hell
+     burger#
+
+
+File: libtool.info,  Node: Static libraries,  Prev: Installing executables,  Up: Using libtool
+
+3.7 Linking static libraries
+============================
+
+Why return to `ar' and `ranlib' silliness when you've had a taste of
+libtool?  Well, sometimes it is desirable to create a static archive
+that can never be shared.  The most frequent case is when you have a
+set of object files that you use to build several different libraries.
+You can create a "convenience library" out of those objects, and link
+against that with the other libraries, instead of listing all the
+object files every time.
+
+   If you just want to link this convenience library into programs, then
+you could just ignore libtool entirely, and use the old `ar' and
+`ranlib' commands (or the corresponding GNU Automake `_LIBRARIES'
+rules).  You can even install a convenience library using GNU Libtool,
+though you probably don't want to and hence GNU Automake doesn't allow
+you to do so.
+
+     burger$ libtool --mode=install ./install-sh -c libhello.a \
+                     /local/lib/libhello.a
+     ./install-sh -c libhello.a /local/lib/libhello.a
+     ranlib /local/lib/libhello.a
+     burger$
+
+   Using libtool for static library installation protects your library
+from being accidentally stripped (if the installer used the `-s' flag),
+as well as automatically running the correct `ranlib' command.
+
+   But libtool libraries are more than just collections of object files:
+they can also carry library dependency information, which old archives
+do not.  If you want to create a libtool static convenience library, you
+can omit the `-rpath' flag and use `-static' to indicate that you're
+only interested in a static library.  When you link a program with such
+a library, libtool will actually link all object files and dependency
+libraries into the program.
+
+   If you omit both `-rpath' and `-static', libtool will create a
+convenience library that can be used to create other libtool libraries,
+even shared ones.  Just like in the static case, the library behaves as
+an alias to a set of object files and dependency libraries, but in this
+case the object files are suitable for inclusion in shared libraries.
+But be careful not to link a single convenience library, directly or
+indirectly, into a single program or library, otherwise you may get
+errors about symbol redefinitions.
+
+   The key is remembering that a convenience library contains PIC
+objects, and can be linked where a list of PIC objects makes sense;
+i.e. into a shared library.  A static convenience library contains
+non-PIC objects, so can be linked into an old static library, or a
+program.
+
+   When GNU Automake is used, you should use `noinst_LTLIBRARIES'
+instead of `lib_LTLIBRARIES' for convenience libraries, so that the
+`-rpath' option is not passed when they are linked.
+
+   As a rule of thumb, link a libtool convenience library into at most
+one libtool library, and never into a program, and link libtool static
+convenience libraries only into programs, and only if you need to carry
+library dependency information to the user of the static convenience
+library.
+
+   Another common situation where static linking is desirable is in
+creating a standalone binary.  Use libtool to do the linking and add the
+`-all-static' flag.
+
+
+File: libtool.info,  Node: Invoking libtool,  Next: Integrating libtool,  Prev: Using libtool,  Up: Top
+
+4 Invoking `libtool'
+********************
+
+The `libtool' program has the following synopsis:
+
+     libtool [OPTION]... [MODE-ARG]...
+
+and accepts the following options:
+
+`--config'
+     Display libtool configuration variables and exit.
+
+`--debug'
+     Dump a trace of shell script execution to standard output.  This
+     produces a lot of output, so you may wish to pipe it to `less' (or
+     `more') or redirect to a file.
+
+`-n'
+`--dry-run'
+     Don't create, modify, or delete any files, just show what commands
+     would be executed by libtool.
+
+`--features'
+     Display basic configuration options.  This provides a way for
+     packages to determine whether shared or static libraries will be
+     built.
+
+`--finish'
+     Same as `--mode=finish'.
+
+`-h'
+     Display short help message.
+
+`--help'
+     Display a help message and exit.  If `--mode=MODE' is specified,
+     then detailed help for MODE is displayed.
+
+`--help-all'
+     Display help for the general options as well as detailed help for
+     each operation mode, and exit.
+
+`--mode=MODE'
+     Use MODE as the operation mode.  When using libtool from the
+     command line, you can give just MODE (or a unique abbreviation of
+     it) as the first argument as a shorthand for the full
+     `--mode=MODE'.  For example, the following are equivalent:
+
+          $ libtool --mode=execute --dry-run gdb prog.exe
+          $ libtool        execute --dry-run gdb prog.exe
+          $ libtool        exe     --dry-run gdb prog.exe
+          $ libtool        e       --dry-run gdb prog.exe
+
+     MODE must be set to one of the following:
+
+    `compile'
+          Compile a source file into a libtool object.
+
+    `execute'
+          Automatically set the library path so that another program
+          can use uninstalled libtool-generated programs or libraries.
+
+    `link'
+          Create a library or an executable.
+
+    `install'
+          Install libraries or executables.
+
+    `finish'
+          Complete the installation of libtool libraries on the system.
+
+    `uninstall'
+          Delete installed libraries or executables.
+
+    `clean'
+          Delete uninstalled libraries or executables.
+
+`--tag=TAG'
+     Use configuration variables from tag TAG (*note Tags::).
+
+`--preserve-dup-deps'
+     Do not remove duplicate dependencies in libraries.  When building
+     packages with static libraries, the libraries may depend
+     circularly on each other (shared libs can too, but for those it
+     doesn't matter), so there are situations, where -la -lb -la is
+     required, and the second -la may not be stripped or the link will
+     fail.  In cases where these duplications are required, this option
+     will preserve them, only stripping the libraries that libtool
+     knows it can safely.
+
+`--quiet'
+`--silent'
+     Do not print out any progress or informational messages.
+
+`-v'
+`--verbose'
+     Print out progress and informational messages (enabled by default),
+     as well as additional messages not ordinary seen by default.
+
+`--no-quiet'
+`--no-silent'
+     Print out the progress and informational messages that are seen by
+     default. This option has no effect on whether the additional
+     messages seen in `--verbose' mode are shown.
+
+`--no-verbose'
+     Do not print out any additional informational messages beyond
+     those ordinarily seen by default. This option has no effect on
+     whether the ordinary progress and informational messages enabled
+     by `--no-quiet' are shown.
+
+     Thus, there are now three different message levels (not counting
+     `--debug'), depending on whether the normal messages and/or the
+     additional verbose messages are displayed.  Note that there is no
+     mechanism to diplay verbose messages, without also displaying
+     normal messages.
+
+    *default*
+          Normal messages are displayed, verbose messages are not
+          displayed.  In addition to being the default mode, it can be
+          forcibly achieved by using both option `--no-verbose' and
+          either option `--no-silent' or option `--no-quiet'.
+
+    *silent*
+          Neither normal messages nor verbose messages are displayed.
+          This mode can be achieved using either option `--silent' or
+          option `--quiet'.
+
+    *verbose*
+          Both normal messages and verbose messages are displayed. This
+          mode can be achieved using either option `-v' or option
+          `--verbose'.
+
+`--version'
+     Print libtool version information and exit.
+
+   The current `libtool' implementation is done with a shell script
+that needs to be invoked by the shell which `configure' chose for
+configuring `libtool' (*note The Autoconf Manual:
+(autoconf)config.status Invocation.).  This shell is set in the
+she-bang (`#!') line of the `libtool' script.  Using a different shell
+may cause undefined behavior.
+
+   The MODE-ARGS are a variable number of arguments, depending on the
+selected operation mode.  In general, each MODE-ARG is interpreted by
+programs libtool invokes, rather than libtool itself.
+
+* Menu:
+
+* Compile mode::                Creating library object files.
+* Link mode::                   Generating executables and libraries.
+* Execute mode::                Debugging libtool-generated programs.
+* Install mode::                Making libraries and executables public.
+* Finish mode::                 Completing a library installation.
+* Uninstall mode::              Removing installed executables and libraries.
+* Clean mode::                  Removing uninstalled executables and libraries.
+
+
+File: libtool.info,  Node: Compile mode,  Next: Link mode,  Up: Invoking libtool
+
+4.1 Compile mode
+================
+
+For "compile" mode, MODE-ARGS is a compiler command to be used in
+creating a "standard" object file.  These arguments should begin with
+the name of the C compiler, and contain the `-c' compiler flag so that
+only an object file is created.
+
+   Libtool determines the name of the output file by removing the
+directory component from the source file name, then substituting the
+source code suffix (e.g. `.c' for C source code) with the library
+object suffix, `.lo'.
+
+   If shared libraries are being built, any necessary PIC generation
+flags are substituted into the compilation command.
+
+   The following components of MODE-ARGS are treated specially:
+
+`-o'
+     Note that the `-o' option is now fully supported.  It is emulated
+     on the platforms that don't support it (by locking and moving the
+     objects), so it is really easy to use libtool, just with minor
+     modifications to your Makefiles.  Typing for example
+          libtool --mode=compile gcc -c foo/x.c -o foo/x.lo
+     will do what you expect.
+
+     Note, however, that, if the compiler does not support `-c' and
+     `-o', it is impossible to compile `foo/x.c' without overwriting an
+     existing `./x.o'.  Therefore, if you do have a source file
+     `./x.c', make sure you introduce dependencies in your `Makefile'
+     to make sure `./x.o' (or `./x.lo') is re-created after any
+     sub-directory's `x.lo':
+
+          x.o x.lo: foo/x.lo bar/x.lo
+
+     This will also ensure that make won't try to use a temporarily
+     corrupted `x.o' to create a program or library.  It may cause
+     needless recompilation on platforms that support `-c' and `-o'
+     together, but it's the only way to make it safe for those that
+     don't.
+
+`-no-suppress'
+     If both PIC and non-PIC objects are being built, libtool will
+     normally suppress the compiler output for the PIC object
+     compilation to save showing very similar, if not identical
+     duplicate output for each object.  If the `-no-suppress' option is
+     given in compile mode, libtool will show the compiler output for
+     both objects.
+
+`-prefer-pic'
+     Libtool will try to build only PIC objects.
+
+`-prefer-non-pic'
+     Libtool will try to build only non-PIC objects.
+
+`-shared'
+     Even if Libtool was configured with `--enable-static', the object
+     file Libtool builds will not be suitable for static linking.
+     Libtool will signal an error if it was configured with
+     `--disable-shared', or if the host does not support shared
+     libraries.
+
+`-static'
+     Even if libtool was configured with `--disable-static', the object
+     file Libtool builds *will* be suitable for static linking.
+
+`-Wc,FLAG'
+`-Xcompiler FLAG'
+     Pass a flag directly to the compiler.  With `-Wc,', multiple flags
+     may be separated by commas, whereas `-Xcompiler ' passes through
+     commas unchanged.
+
+
+File: libtool.info,  Node: Link mode,  Next: Execute mode,  Prev: Compile mode,  Up: Invoking libtool
+
+4.2 Link mode
+=============
+
+"Link" mode links together object files (including library objects) to
+form another library or to create an executable program.
+
+   MODE-ARGS consist of a command using the C compiler to create an
+output file (with the `-o' flag) from several object files.
+
+   The following components of MODE-ARGS are treated specially:
+
+`-all-static'
+     If OUTPUT-FILE is a program, then do not link it against any
+     shared libraries at all.  If OUTPUT-FILE is a library, then only
+     create a static library.  In general, this flag cannot be used
+     together with `disable-static' (*note LT_INIT::).
+
+`-avoid-version'
+     Tries to avoid versioning (*note Versioning::) for libraries and
+     modules, i.e. no version information is stored and no symbolic
+     links are created.  If the platform requires versioning, this
+     option has no effect.
+
+`-bindir'
+     Pass the absolute name of the directory for installing executable
+     programs (*note Directory Variables: (standards)Directory
+     Variables.).  `libtool' may use this value to install shared
+     libraries there on systems that do not provide for any library
+     hardcoding and use the directory of a program and the `PATH'
+     variable as library search path.  This is typically used for DLLs
+     on Windows or other systems using the PE (Portable Executable)
+     format.  On other systems, `-bindir' is ignored.  The default
+     value used is `LIBDIR/../bin' for libraries installed to `LIBDIR'.
+     You should not use `-bindir' for modules.
+
+`-dlopen FILE'
+     Same as `-dlpreopen FILE', if native dlopening is not supported on
+     the host platform (*note Dlopened modules::) or if the program is
+     linked with `-static', `-static-libtool-libs', or `-all-static'.
+     Otherwise, no effect.  If FILE is `self' Libtool will make sure
+     that the program can `dlopen' itself, either by enabling
+     `-export-dynamic' or by falling back to `-dlpreopen self'.
+
+`-dlpreopen FILE'
+     Link FILE into the output program, and add its symbols to the list
+     of preloaded symbols (*note Dlpreopening::).  If FILE is `self',
+     the symbols of the program itself will be added to preloaded
+     symbol lists.  If FILE is `force' Libtool will make sure that a
+     preloaded symbol list is always _defined_, regardless of whether
+     it's empty or not.
+
+`-export-dynamic'
+     Allow symbols from OUTPUT-FILE to be resolved with `dlsym' (*note
+     Dlopened modules::).
+
+`-export-symbols SYMFILE'
+     Tells the linker to export only the symbols listed in SYMFILE.
+     The symbol file should end in `.sym' and must contain the name of
+     one symbol per line.  This option has no effect on some platforms.
+     By default all symbols are exported.
+
+`-export-symbols-regex REGEX'
+     Same as `-export-symbols', except that only symbols matching the
+     regular expression REGEX are exported.  By default all symbols are
+     exported.
+
+`-LLIBDIR'
+     Search LIBDIR for required libraries that have already been
+     installed.
+
+`-lNAME'
+     OUTPUT-FILE requires the installed library `libNAME'.  This option
+     is required even when OUTPUT-FILE is not an executable.
+
+`-module'
+     Creates a library that can be dlopened (*note Dlopened modules::).
+     This option doesn't work for programs.  Module names don't need to
+     be prefixed with `lib'.  In order to prevent name clashes,
+     however, `libNAME' and `NAME' must not be used at the same time in
+     your package.
+
+`-no-fast-install'
+     Disable fast-install mode for the executable OUTPUT-FILE.  Useful
+     if the program won't be necessarily installed.
+
+`-no-install'
+     Link an executable OUTPUT-FILE that can't be installed and
+     therefore doesn't need a wrapper script on systems that allow
+     hardcoding of library paths.  Useful if the program is only used
+     in the build tree, e.g., for testing or generating other files.
+
+`-no-undefined'
+     Declare that OUTPUT-FILE does not depend on any libraries other
+     than the ones listed on the command line, i.e., after linking, it
+     will not have unresolved symbols.  Some platforms require all
+     symbols in shared libraries to be resolved at library creation
+     (*note Inter-library dependencies::), and using this parameter
+     allows `libtool' to assume that this will not happen.
+
+`-o OUTPUT-FILE'
+     Create OUTPUT-FILE from the specified objects and libraries.
+
+`-objectlist FILE'
+     Use a list of object files found in FILE to specify objects.
+
+`-precious-files-regex REGEX'
+     Prevents removal of files from the temporary output directory whose
+     names match this regular expression.  You might specify `\.bbg?$'
+     to keep those files created with `gcc -ftest-coverage' for example.
+
+`-release RELEASE'
+     Specify that the library was generated by release RELEASE of your
+     package, so that users can easily tell which versions are newer
+     than others.  Be warned that no two releases of your package will
+     be binary compatible if you use this flag.  If you want binary
+     compatibility, use the `-version-info' flag instead (*note
+     Versioning::).
+
+`-rpath LIBDIR'
+     If OUTPUT-FILE is a library, it will eventually be installed in
+     LIBDIR.  If OUTPUT-FILE is a program, add LIBDIR to the run-time
+     path of the program.  On platforms that don't support hardcoding
+     library paths into executables and only search PATH for shared
+     libraries, such as when OUTPUT-FILE is a Windows (or other PE
+     platform) DLL, the `.la' control file will be installed in LIBDIR,
+     but see `-bindir' above for the eventual destination of the `.dll'
+     or other library file itself.
+
+`-R LIBDIR'
+     If OUTPUT-FILE is a program, add LIBDIR to its run-time path.  If
+     OUTPUT-FILE is a library, add `-RLIBDIR' to its DEPENDENCY_LIBS,
+     so that, whenever the library is linked into a program, LIBDIR
+     will be added to its run-time path.
+
+`-shared'
+     If OUTPUT-FILE is a program, then link it against any uninstalled
+     shared libtool libraries (this is the default behavior).  If
+     OUTPUT-FILE is a library, then only create a shared library.  In
+     the later case, libtool will signal an error if it was configured
+     with `--disable-shared', or if the host does not support shared
+     libraries.
+
+`-shrext SUFFIX'
+     If OUTPUT-FILE is a libtool library, replace the system's standard
+     file name extension for shared libraries with SUFFIX (most systems
+     use `.so' here).  This option is helpful in certain cases where an
+     application requires that shared libraries (typically modules)
+     have an extension other than the default one.  Please note you
+     must supply the full file name extension including any leading dot.
+
+`-static'
+     If OUTPUT-FILE is a program, then do not link it against any
+     uninstalled shared libtool libraries.  If OUTPUT-FILE is a
+     library, then only create a static library.
+
+`-static-libtool-libs'
+     If OUTPUT-FILE is a program, then do not link it against any
+     shared libtool libraries.  If OUTPUT-FILE is a library, then only
+     create a static library.
+
+`-version-info CURRENT[:REVISION[:AGE]]'
+     If OUTPUT-FILE is a libtool library, use interface version
+     information CURRENT, REVISION, and AGE to build it (*note
+     Versioning::).  Do *not* use this flag to specify package release
+     information, rather see the `-release' flag.
+
+`-version-number MAJOR[:MINOR[:REVISION]]'
+     If OUTPUT-FILE is a libtool library, compute interface version
+     information so that the resulting library uses the specified
+     major, minor and revision numbers.  This is designed to permit
+     libtool to be used with existing projects where identical version
+     numbers are already used across operating systems.  New projects
+     should use the `-version-info' flag instead.
+
+`-weak LIBNAME'
+     if OUTPUT-FILE is a libtool library, declare that it provides a
+     weak LIBNAME interface.  This is a hint to libtool that there is
+     no need to append LIBNAME to the list of dependency libraries of
+     OUTPUT-FILE, because linking against OUTPUT-FILE already supplies
+     the same interface (*note Linking with dlopened modules::).
+
+`-Wc,FLAG'
+`-Xcompiler FLAG'
+     Pass a linker-specific flag directly to the compiler.  With `-Wc,',
+     multiple flags may be separated by commas, whereas `-Xcompiler '
+     passes through commas unchanged.
+
+`-Wl,FLAG'
+`-Xlinker FLAG'
+     Pass a linker-specific flag directly to the linker.
+
+`-XCClinker FLAG'
+     Pass a link-specific flag to the compiler driver (`CC') during
+     linking.
+
+   If the OUTPUT-FILE ends in `.la', then a libtool library is created,
+which must be built only from library objects (`.lo' files).  The
+`-rpath' option is required.  In the current implementation, libtool
+libraries may not depend on other uninstalled libtool libraries (*note
+Inter-library dependencies::).
+
+   If the OUTPUT-FILE ends in `.a', then a standard library is created
+using `ar' and possibly `ranlib'.
+
+   If OUTPUT-FILE ends in `.o' or `.lo', then a reloadable object file
+is created from the input files (generally using `ld -r').  This method
+is often called "partial linking".
+
+   Otherwise, an executable program is created.
+
+
+File: libtool.info,  Node: Execute mode,  Next: Install mode,  Prev: Link mode,  Up: Invoking libtool
+
+4.3 Execute mode
+================
+
+For "execute" mode, the library path is automatically set, then a
+program is executed.
+
+   The first of the MODE-ARGS is treated as a program name, with the
+rest as arguments to that program.
+
+   The following components of MODE-ARGS are treated specially:
+
+`-dlopen FILE'
+     Add the directory containing FILE to the library path.
+
+   This mode sets the library path environment variable according to any
+`-dlopen' flags.
+
+   If any of the ARGS are libtool executable wrappers, then they are
+translated into the name of their corresponding uninstalled binary, and
+any of their required library directories are added to the library path.
+
+
+File: libtool.info,  Node: Install mode,  Next: Finish mode,  Prev: Execute mode,  Up: Invoking libtool
+
+4.4 Install mode
+================
+
+In "install" mode, libtool interprets most of the elements of MODE-ARGS
+as an installation command beginning with `cp', or a BSD-compatible
+`install' program.
+
+   The following components of MODE-ARGS are treated specially:
+
+`-inst-prefix-dir INST-PREFIX-DIR'
+     When installing into a temporary staging area, rather than the
+     final `prefix', this argument is used to reflect the temporary
+     path, in much the same way `automake' uses `DESTDIR'.  For
+     instance, if `prefix' is `/usr/local', but INST-PREFIX-DIR is
+     `/tmp', then the object will be installed under `/tmp/usr/local/'.
+     If the installed object is a libtool library, then the internal
+     fields of that library will reflect only `prefix', not
+     INST-PREFIX-DIR:
+
+          # Directory that this library needs to be installed in:
+          libdir='/usr/local/lib'
+
+     not
+
+          # Directory that this library needs to be installed in:
+          libdir='/tmp/usr/local/lib'
+
+     `inst-prefix' is also used to insure that if the installed object
+     must be relinked upon installation, that it is relinked against
+     the libraries in INST-PREFIX-DIR/`prefix', not `prefix'.
+
+     In truth, this option is not really intended for use when calling
+     libtool directly; it is automatically used when `libtool
+     --mode=install' calls `libtool --mode=relink'.  Libtool does this
+     by analyzing the destination path given in the original `libtool
+     --mode=install' command and comparing it to the expected
+     installation path established during `libtool --mode=link'.
+
+     Thus, end-users need change nothing, and `automake'-style `make
+     install DESTDIR=/tmp' will Just Work(tm) most of the time.  For
+     systems where fast installation can not be turned on, relinking
+     may be needed.  In this case, a `DESTDIR' install will fail.
+
+     Currently it is not generally possible to install into a temporary
+     staging area that contains needed third-party libraries which are
+     not yet visible at their final location.
+
+   The rest of the MODE-ARGS are interpreted as arguments to the `cp'
+or `install' command.
+
+   The command is run, and any necessary unprivileged post-installation
+commands are also completed.
+
+
+File: libtool.info,  Node: Finish mode,  Next: Uninstall mode,  Prev: Install mode,  Up: Invoking libtool
+
+4.5 Finish mode
+===============
+
+"Finish" mode has two functions.  One is to help system administrators
+install libtool libraries so that they can be located and linked into
+user programs.  To invoke this functionality, pass the name of a library
+directory as MODE-ARG.  Running this command may require superuser
+privileges, and the `--dry-run' option may be useful.
+
+   The second is to facilitate transferring libtool libraries to a
+native compilation environment after they were built in a
+cross-compilation environment.  Cross-compilation environments may rely
+on recent libtool features, and running libtool in finish mode will
+make it easier to work with older versions of libtool.  This task is
+performed whenever the MODE-ARG is a `.la' file.
+
+
+File: libtool.info,  Node: Uninstall mode,  Next: Clean mode,  Prev: Finish mode,  Up: Invoking libtool
+
+4.6 Uninstall mode
+==================
+
+"Uninstall" mode deletes installed libraries, executables and objects.
+
+   The first MODE-ARG is the name of the program to use to delete files
+(typically `/bin/rm').
+
+   The remaining MODE-ARGS are either flags for the deletion program
+(beginning with a `-'), or the names of files to delete.
+
+
+File: libtool.info,  Node: Clean mode,  Prev: Uninstall mode,  Up: Invoking libtool
+
+4.7 Clean mode
+==============
+
+"Clean" mode deletes uninstalled libraries, executables, objects and
+libtool's temporary files associated with them.
+
+   The first MODE-ARG is the name of the program to use to delete files
+(typically `/bin/rm').
+
+   The remaining MODE-ARGS are either flags for the deletion program
+(beginning with a `-'), or the names of files to delete.
+
+
+File: libtool.info,  Node: Integrating libtool,  Next: Other languages,  Prev: Invoking libtool,  Up: Top
+
+5 Integrating libtool with your package
+***************************************
+
+This chapter describes how to integrate libtool with your packages so
+that your users can install hassle-free shared libraries.
+
+   There are several ways in which Libtool may be integrated in your
+package, described in the following sections.  Typically, the Libtool
+macro files as well as `ltmain.sh' are copied into your package using
+`libtoolize' and `aclocal' after setting up the `configure.ac' and
+toplevel `Makefile.am', then `autoconf' adds the needed tests to the
+`configure' script.  These individual steps are often automated with
+`autoreconf'.
+
+   Here is a diagram showing how such a typical Libtool configuration
+works when preparing a package for distribution, assuming that `m4' has
+been chosen as location for additional Autoconf macros, and `build-aux'
+as location for auxiliary build tools (*note The Autoconf Manual:
+(autoconf)Input.):
+
+     libtool.m4 -----.                .--> aclocal.m4 -----.
+     ltoptions.m4 ---+  .-> aclocal* -+                    +--> autoconf*
+     ltversion.m4 ---+--+             `--> [copy in m4/] --+       |
+     ltsugar.m4 -----+  |                    ^             |       \/
+     lt~obsolete.m4 -+  +-> libtoolize* -----'             |    configure
+     [ltdl.m4] ------+  |                                  |
+                        `----------------------------------'
+
+     ltmain.sh -----------> libtoolize* -> [copy in build-aux/]
+
+   During configuration, the `libtool' script is generated either
+through `config.status' or `config.lt':
+
+                  .--> config.status* --.
+     configure* --+                     +--> libtool
+                  `--> [config.lt*] ----'      ^
+                                               |
+     ltmain.sh --------------------------------'
+
+   At `make' run time, `libtool' is then invoked as needed as a wrapper
+around compilers, linkers, install and cleanup programs.
+
+   There are alternatives choices to several parts of the setup; for
+example, the Libtool macro files can either be copied or symlinked into
+the package, or copied into `aclocal.m4'.  As another example, an
+external, pre-configured `libtool' script may be used, by-passing most
+of the tests and package-specific setup for Libtool.
+
+* Menu:
+
+* Autoconf macros::             Autoconf macros exported by libtool.
+* Makefile rules::              Writing `Makefile' rules for libtool.
+* Using Automake::              Automatically supporting libtool.
+* Configuring::                 Configuring libtool for a host system.
+* Distributing::                What files to distribute with your package.
+* Static-only libraries::       Sometimes shared libraries are just a pain.
+
+
+File: libtool.info,  Node: Autoconf macros,  Next: Makefile rules,  Up: Integrating libtool
+
+5.1 Autoconf macros exported by libtool
+=======================================
+
+Libtool uses a number of macros to interrogate the host system when it
+is being built, and you can use some of them yourself too.  Although
+there are a great many other macros in the libtool installed m4 files,
+these do not form part of the published interface, and are subject to
+change between releases.
+
+Macros in the `LT_CMD_' namespace check for various shell commands:
+
+ -- Macro: LT_CMD_MAX_LEN
+     Finds the longest command line that can be safely passed to
+     `$SHELL' without being truncated, and store in the shell variable
+     `$max_cmd_len'.  It is only an approximate value, but command
+     lines of this length or shorter are guaranteed not to be truncated.
+
+Macros in the `LT_FUNC_' namespace check characteristics of library
+functions:
+
+ -- Macro: LT_FUNC_DLSYM_USCORE
+     `AC_DEFINE' the preprocessor symbol `DLSYM_USCORE' if we have to
+     add an underscore to symbol-names passed in to `dlsym'.
+
+Macros in the `LT_LIB_' namespace check characteristics of system
+libraries:
+
+ -- Macro: LT_LIB_M
+     Set `LIBM' to the math library or libraries required on this
+     machine, if any.
+
+ -- Macro: LT_LIB_DLLOAD
+     This is the macro used by `libltdl' to determine which dlloaders
+     to use on this machine, if any.  Several shell variables are set
+     (and `AC_SUBST'ed) depending on the dlload interfaces are
+     available on this machine.  `LT_DLLOADERS' contains a list of
+     libtool libraries that can be used, and if necessary also sets
+     `LIBADD_DLOPEN' if additional system libraries are required by the
+     `dlopen' loader, and `LIBADD_SHL_LOAD' if additional system
+     libraries are required by the `shl_load' loader, respectively.
+     Finally some symbols are set in `config.h' depending on the
+     loaders that are found to work: `HAVE_LIBDL', `HAVE_SHL_LOAD',
+     `HAVE_DYLD', `HAVE_DLD'.
+
+Macros in the `LT_PATH_' namespace search the system for the full path
+to particular system commands:
+
+ -- Macro: LT_PATH_LD
+     Add a `--with-gnu-ld' option to `configure'.  Try to find the path
+     to the linker used by `$CC', and whether it is the GNU linker.
+     The result is stored in the shell variable `$LD', which is
+     `AC_SUBST'ed.
+
+ -- Macro: LT_PATH_NM
+     Try to find a BSD-compatible `nm' or a MS-compatible `dumpbin'
+     command on this machine.  The result is stored in the shell
+     variable `$NM', which is `AC_SUBST'ed.
+
+Macros in the `LT_SYS_' namespace probe for system characteristics:
+
+ -- Macro: LT_SYS_DLOPEN_SELF
+     Tests whether a program can dlopen itself, and then also whether
+     the same program can still dlopen itself when statically linked.
+     Results are stored in the shell variables `$enable_dlopen_self' and
+     `enable_dlopen_self_static' respectively.
+
+ -- Macro: LT_SYS_DLOPEN_DEPLIBS
+     Define the preprocessor symbol `LTDL_DLOPEN_DEPLIBS' if the OS
+     needs help to load dependent libraries for `dlopen' (or
+     equivalent).
+
+ -- Macro: LT_SYS_DLSEARCH_PATH
+     Define the preprocessor symbol `LT_DLSEARCH_PATH' to the system
+     default library search path.
+
+ -- Macro: LT_SYS_MODULE_EXT
+     Define the preprocessor symbol `LT_MODULE_EXT' to the extension
+     used for runtime loadable modules.  If you use libltdl to open
+     modules, then you can simply use the libtool library extension,
+     `.la'.
+
+ -- Macro: LT_SYS_MODULE_PATH
+     Define the preprocessor symbol `LT_MODULE_PATH_VAR' to the name of
+     the shell environment variable that determines the run-time module
+     search path.
+
+ -- Macro: LT_SYS_SYMBOL_USCORE
+     Set the shell variable `sys_symbol_underscore' to `no' unless the
+     compiler prefixes global symbols with an underscore.
+
+
+File: libtool.info,  Node: Makefile rules,  Next: Using Automake,  Prev: Autoconf macros,  Up: Integrating libtool
+
+5.2 Writing `Makefile' rules for libtool
+========================================
+
+Libtool is fully integrated with Automake (*note Introduction:
+(automake)Top.), starting with Automake version 1.2.
+
+   If you want to use libtool in a regular `Makefile' (or
+`Makefile.in'), you are on your own.  If you're not using Automake, and
+you don't know how to incorporate libtool into your package you need to
+do one of the following:
+
+  1. Download the latest Automake distribution from your nearest GNU
+     mirror, install it, and start using it.
+
+  2. Learn how to write `Makefile' rules by hand.  They're sometimes
+     complex, but if you're clever enough to write rules for compiling
+     your old libraries, then you should be able to figure out new
+     rules for libtool libraries (hint: examine the `Makefile.in' in
+     the `tests/demo' subdirectory of the libtool distribution... note
+     especially that it was automatically generated from the
+     `Makefile.am' by Automake).
+
+
+File: libtool.info,  Node: Using Automake,  Next: Configuring,  Prev: Makefile rules,  Up: Integrating libtool
+
+5.3 Using Automake with libtool
+===============================
+
+Libtool library support is implemented under the `LTLIBRARIES' primary.
+
+   Here are some samples from the Automake `Makefile.am' in the libtool
+distribution's `demo' subdirectory.
+
+   First, to link a program against a libtool library, just use the
+`program_LDADD'(1) variable:
+
+     bin_PROGRAMS = hell hell_static
+
+     # Build hell from main.c and libhello.la
+     hell_SOURCES = main.c
+     hell_LDADD = libhello.la
+
+     # Create a statically linked version of hell.
+     hell_static_SOURCES = main.c
+     hell_static_LDADD = libhello.la
+     hell_static_LDFLAGS = -static
+
+   You may use the `program_LDFLAGS' variable to stuff in any flags you
+want to pass to libtool while linking `program' (such as `-static' to
+avoid linking uninstalled shared libtool libraries).
+
+   Building a libtool library is almost as trivial... note the use of
+`libhello_la_LDFLAGS' to pass the `-version-info' (*note Versioning::)
+option to libtool:
+
+     # Build a libtool library, libhello.la for installation in libdir.
+     lib_LTLIBRARIES = libhello.la
+     libhello_la_SOURCES = hello.c foo.c
+     libhello_la_LDFLAGS = -version-info 3:12:1
+
+   The `-rpath' option is passed automatically by Automake (except for
+libraries listed as `noinst_LTLIBRARIES'), so you should not specify it.
+
+   *Note Building a Shared Library: (automake)A Shared Library, for
+more information.
+
+   ---------- Footnotes ----------
+
+   (1) Since GNU Automake 1.5, the flags `-dlopen' or `-dlpreopen'
+(*note Link mode::) can be employed with the `program_LDADD' variable.
+Unfortunately, older releases didn't accept these flags, so if you are
+stuck with an ancient Automake, we recommend quoting the flag itself,
+and setting `program_DEPENDENCIES' too:
+
+     program_LDADD = "-dlopen" libfoo.la
+     program_DEPENDENCIES = libfoo.la
+
+
+File: libtool.info,  Node: Configuring,  Next: Distributing,  Prev: Using Automake,  Up: Integrating libtool
+
+5.4 Configuring libtool
+=======================
+
+Libtool requires intimate knowledge of your compiler suite and operating
+system in order to be able to create shared libraries and link against
+them properly.  When you install the libtool distribution, a
+system-specific libtool script is installed into your binary directory.
+
+   However, when you distribute libtool with your own packages (*note
+Distributing::), you do not always know the compiler suite and
+operating system that are used to compile your package.
+
+   For this reason, libtool must be "configured" before it can be used.
+This idea should be familiar to anybody who has used a GNU `configure'
+script.  `configure' runs a number of tests for system features, then
+generates the `Makefile's (and possibly a `config.h' header file),
+after which you can run `make' and build the package.
+
+   Libtool adds its own tests to your `configure' script in order to
+generate a libtool script for the installer's host machine.
+
+* Menu:
+
+* LT_INIT::                     Configuring `libtool' in `configure.ac'.
+* Configure notes::             Platform-specific notes for configuration.
+
+
+File: libtool.info,  Node: LT_INIT,  Next: Configure notes,  Up: Configuring
+
+5.4.1 The `LT_INIT' macro
+-------------------------
+
+If you are using GNU Autoconf (or Automake), you should add a call to
+`LT_INIT' to your `configure.ac' file.  This macro adds many new tests
+to the `configure' script so that the generated libtool script will
+understand the characteristics of the host.  It's the most important of
+a number of macros defined by Libtool:
+
+ -- Macro: LT_PREREQ (VERSION)
+     Ensure that a recent enough version of Libtool is being used.  If
+     the version of Libtool used for `LT_INIT' is earlier than VERSION,
+     print an error message to the standard error output and exit with
+     failure (exit status is 63).  For example:
+
+          LT_PREREQ([2.4.2])
+
+ -- Macro: LT_INIT (OPTIONS)
+ -- Macro: AC_PROG_LIBTOOL
+ -- Macro: AM_PROG_LIBTOOL
+     Add support for the `--enable-shared', `--disable-shared',
+     `--enable-static', `--disable-static', `--with-pic', and
+     `--without-pic' `configure' flags.(1)  `AC_PROG_LIBTOOL' and
+     `AM_PROG_LIBTOOL' are deprecated names for older versions of this
+     macro; `autoupdate' will upgrade your `configure.ac' files.
+
+     By default, this macro turns on shared libraries if they are
+     available, and also enables static libraries if they don't
+     conflict with the shared libraries.  You can modify these defaults
+     by passing either `disable-shared' or `disable-static' in the
+     option list to `LT_INIT', or using `AC_DISABLE_SHARED' or
+     `AC_DISABLE_STATIC'.
+
+          # Turn off shared libraries during beta-testing, since they
+          # make the build process take too long.
+          LT_INIT([disable-shared])
+
+     The user may specify modified forms of the configure flags
+     `--enable-shared' and `--enable-static' to choose whether shared
+     or static libraries are built based on the name of the package.
+     For example, to have shared `bfd' and `gdb' libraries built, but
+     not shared `libg++', you can run all three `configure' scripts as
+     follows:
+
+          trick$ ./configure --enable-shared=bfd,gdb
+
+     In general, specifying `--enable-shared=PKGS' is the same as
+     configuring with `--enable-shared' every package named in the
+     comma-separated PKGS list, and every other package with
+     `--disable-shared'.  The `--enable-static=PKGS' flag behaves
+     similarly, but it uses `--enable-static' and `--disable-static'.
+     The same applies to the `--enable-fast-install=PKGS' flag, which
+     uses `--enable-fast-install' and `--disable-fast-install'.
+
+     The package name `default' matches any packages that have not set
+     their name in the `PACKAGE' environment variable.
+
+     The `--with-pic' and `--without-pic' configure flags can be used
+     to specify whether or not `libtool' uses PIC objects.  By default,
+     `libtool' uses PIC objects for shared libraries and non-PIC
+     objects for static libraries.  The `--with-pic' option also
+     accepts a comma-separated list of package names.  Specifying
+     `--with-pic=PKGS' is the same as configuring every package in PKGS
+     with `--with-pic' and every other package with the default
+     configuration.  The package name `default' is treated the same as
+     for `--enable-shared' and `--enable-static'.
+
+     This macro also sets the shell variable `LIBTOOL_DEPS', that you
+     can use to automatically update the libtool script if it becomes
+     out-of-date.  In order to do that, add to your `configure.ac':
+
+          LT_INIT
+          AC_SUBST([LIBTOOL_DEPS])
+
+     and, to `Makefile.in' or `Makefile.am':
+
+          LIBTOOL_DEPS = @LIBTOOL_DEPS@
+          libtool: $(LIBTOOL_DEPS)
+                  $(SHELL) ./config.status libtool
+
+     If you are using GNU Automake, you can omit the assignment, as
+     Automake will take care of it.  You'll obviously have to create
+     some dependency on `libtool'.
+
+     Aside from `disable-static' and `disable-shared', there are other
+     options that you can pass to `LT_INIT' to modify its behaviour.
+     Here is a full list:
+
+    `dlopen'
+          Enable checking for dlopen support.  This option should be
+          used if the package makes use of the `-dlopen' and
+          `-dlpreopen' libtool flags, otherwise libtool will assume
+          that the system does not support dlopening.
+
+    `win32-dll'
+          This option should be used if the package has been ported to
+          build clean dlls on win32 platforms.  Usually this means that
+          any library data items are exported with
+          `__declspec(dllexport)' and imported with
+          `__declspec(dllimport)'.  If this macro is not used, libtool
+          will assume that the package libraries are not dll clean and
+          will build only static libraries on win32 hosts.
+
+          Provision must be made to pass `-no-undefined' to `libtool'
+          in link mode from the package `Makefile'.  Naturally, if you
+          pass `-no-undefined', you must ensure that all the library
+          symbols *really are* defined at link time!
+
+    `disable-fast-install'
+          Change the default behaviour for `LT_INIT' to disable
+          optimization for fast installation.  The user may still
+          override this default, depending on platform support, by
+          specifying `--enable-fast-install' to `configure'.
+
+    `shared'
+          Change the default behaviour for `LT_INIT' to enable shared
+          libraries.  This is the default on all systems where Libtool
+          knows how to create shared libraries.  The user may still
+          override this default by specifying `--disable-shared' to
+          `configure'.
+
+    `disable-shared'
+          Change the default behaviour for `LT_INIT' to disable shared
+          libraries.  The user may still override this default by
+          specifying `--enable-shared' to `configure'.
+
+    `static'
+          Change the default behaviour for `LT_INIT' to enable static
+          libraries.  This is the default on all systems where shared
+          libraries have been disabled for some reason, and on most
+          systems where shared libraries have been enabled.  If shared
+          libraries are enabled, the user may still override this
+          default by specifying `--disable-static' to `configure'.
+
+    `disable-static'
+          Change the default behaviour for `LT_INIT' to disable static
+          libraries.  The user may still override this default by
+          specifying `--enable-static' to `configure'.
+
+    `pic-only'
+          Change the default behaviour for `libtool' to try to use only
+          PIC objects.  The user may still override this default by
+          specifying `--without-pic' to `configure'.
+
+    `no-pic'
+          Change the default behaviour of `libtool' to try to use only
+          non-PIC objects.  The user may still override this default by
+          specifying `--with-pic' to `configure'.
+
+
+
+ -- Macro: LT_LANG (LANGUAGE)
+     Enable `libtool' support for the language given if it has not yet
+     already been enabled.  Languages accepted are "C++", "Fortran 77",
+     "Java", "Go", and "Windows Resource".
+
+     If Autoconf language support macros such as `AC_PROG_CXX' are used
+     in your `configure.ac', Libtool language support will automatically
+     be enabled.
+
+     Conversely using `LT_LANG' to enable language support for Libtool
+     will automatically enable Autoconf language support as well.
+
+     Both of the following examples are therefore valid ways of adding
+     C++ language support to Libtool.
+
+          LT_INIT
+          LT_LANG([C++])
+
+          LT_INIT
+          AC_PROG_CXX
+
+
+ -- Macro: AC_LIBTOOL_DLOPEN
+     This macro is deprecated, the `dlopen' option to `LT_INIT' should
+     be used instead.
+
+ -- Macro: AC_LIBTOOL_WIN32_DLL
+     This macro is deprecated, the `win32-dll' option to `LT_INIT'
+     should be used instead.
+
+ -- Macro: AC_DISABLE_FAST_INSTALL
+     This macro is deprecated, the `disable-fast-install' option to
+     `LT_INIT' should be used instead.
+
+ -- Macro: AC_DISABLE_SHARED
+ -- Macro: AM_DISABLE_SHARED
+     Change the default behaviour for `LT_INIT' to disable shared
+     libraries.  The user may still override this default by specifying
+     `--enable-shared'.  The option `disable-shared' to `LT_INIT' is a
+     shorthand for this.  `AM_DISABLE_SHARED' is a deprecated alias for
+     `AC_DISABLE_SHARED'.
+
+ -- Macro: AC_ENABLE_SHARED
+ -- Macro: AM_ENABLE_SHARED
+     Change the default behaviour for `LT_INIT' to enable shared
+     libraries.  This is the default on all systems where Libtool knows
+     how to create shared libraries.  The user may still override this
+     default by specifying `--disable-shared'.  The option `shared' to
+     `LT_INIT' is a shorthand for this.  `AM_ENABLE_SHARED' is a
+     deprecated alias for `AC_ENABLE_SHARED'.
+
+ -- Macro: AC_DISABLE_STATIC
+ -- Macro: AM_DISABLE_STATIC
+     Change the default behaviour for `LT_INIT' to disable static
+     libraries.  The user may still override this default by specifying
+     `--enable-static'.  The option `disable-static' to `LT_INIT' is a
+     shorthand for this.  `AM_DISABLE_STATIC' is a deprecated alias for
+     `AC_DISABLE_STATIC'.
+
+ -- Macro: AC_ENABLE_STATIC
+ -- Macro: AM_ENABLE_STATIC
+     Change the default behaviour for `LT_INIT' to enable static
+     libraries.  This is the default on all systems where shared
+     libraries have been disabled for some reason, and on most systems
+     where shared libraries have been enabled.  If shared libraries are
+     enabled, the user may still override this default by specifying
+     `--disable-static'.  The option `static' to `LT_INIT' is a
+     shorthand for this.  `AM_ENABLE_STATIC' is a deprecated alias for
+     `AC_ENABLE_STATIC'.
+
+   The tests in `LT_INIT' also recognize the following environment
+variables:
+
+ -- Variable: CC
+     The C compiler that will be used by the generated `libtool'.  If
+     this is not set, `LT_INIT' will look for `gcc' or `cc'.
+
+ -- Variable: CFLAGS
+     Compiler flags used to generate standard object files.  If this is
+     not set, `LT_INIT' will not use any such flags.  It affects only
+     the way `LT_INIT' runs tests, not the produced `libtool'.
+
+ -- Variable: CPPFLAGS
+     C preprocessor flags.  If this is not set, `LT_INIT' will not use
+     any such flags.  It affects only the way `LT_INIT' runs tests, not
+     the produced `libtool'.
+
+ -- Variable: LD
+     The system linker to use (if the generated `libtool' requires one).
+     If this is not set, `LT_INIT' will try to find out what is the
+     linker used by `CC'.
+
+ -- Variable: LDFLAGS
+     The flags to be used by `libtool' when it links a program.  If
+     this is not set, `LT_INIT' will not use any such flags.  It
+     affects only the way `LT_INIT' runs tests, not the produced
+     `libtool'.
+
+ -- Variable: LIBS
+     The libraries to be used by `LT_INIT' when it links a program.  If
+     this is not set, `LT_INIT' will not use any such flags.  It
+     affects only the way `LT_INIT' runs tests, not the produced
+     `libtool'.
+
+ -- Variable: NM
+     Program to use rather than checking for `nm'.
+
+ -- Variable: RANLIB
+     Program to use rather than checking for `ranlib'.
+
+ -- Variable: LN_S
+     A command that creates a link of a program, a soft-link if
+     possible, a hard-link otherwise.  `LT_INIT' will check for a
+     suitable program if this variable is not set.
+
+ -- Variable: DLLTOOL
+     Program to use rather than checking for `dlltool'.  Only meaningful
+     for Cygwin/MS-Windows.
+
+ -- Variable: OBJDUMP
+     Program to use rather than checking for `objdump'.  Only meaningful
+     for Cygwin/MS-Windows.
+
+ -- Variable: AS
+     Program to use rather than checking for `as'.  Only used on
+     Cygwin/MS-Windows at the moment.
+
+ -- Variable: MANIFEST_TOOL
+     Program to use rather than checking for `mt', the Manifest Tool.
+     Only used on Cygwin/MS-Windows at the moment.
+
+   With 1.3 era libtool, if you wanted to know any details of what
+libtool had discovered about your architecture and environment, you had
+to run the script with `--config' and grep through the results.  This
+idiom was supported up to and including 1.5.x era libtool, where it was
+possible to call the generated libtool script from `configure.ac' as
+soon as `LT_INIT' had completed.  However, one of the features of
+libtool 1.4 was that the libtool configuration was migrated out of a
+separate `ltconfig' file, and added to the `LT_INIT' macro (nee
+`AC_PROG_LIBTOOL'), so the results of the configuration tests were
+available directly to code in `configure.ac', rendering the call out to
+the generated libtool script obsolete.
+
+   Starting with libtool 2.0, the multipass generation of the libtool
+script has been consolidated into a single `config.status' pass, which
+happens after all the code in `configure.ac' has completed.  The
+implication of this is that the libtool script does not exist during
+execution of code from `configure.ac', and so obviously it cannot be
+called for `--config' details anymore.  If you are upgrading projects
+that used this idiom to libtool 2.0 or newer, you should replace those
+calls with direct references to the equivalent Autoconf shell variables
+that are set by the configure time tests before being passed to
+`config.status' for inclusion in the generated libtool script.
+
+ -- Macro: LT_OUTPUT
+     By default, the configured `libtool' script is generated by the
+     call to `AC_OUTPUT' command, and there is rarely any need to use
+     `libtool' from `configure'.  However, sometimes it is necessary to
+     run configure time compile and link tests using `libtool'.  You
+     can add `LT_OUTPUT' to your `configure.ac' any time after
+     `LT_INIT' and any `LT_LANG' calls; that done, `libtool' will be
+     created by a specially generated `config.lt' file, and available
+     for use in later tests.
+
+     Also, when `LT_OUTPUT' is used, for backwards compatibility with
+     Automake regeneration rules, `config.status' will call `config.lt'
+     to regenerate `libtool', rather than generating the file itself.
+
+   When you invoke the `libtoolize' program (*note Invoking
+libtoolize::), it will tell you where to find a definition of
+`LT_INIT'.  If you use Automake, the `aclocal' program will
+automatically add `LT_INIT' support to your `configure' script when it
+sees the invocation of `LT_INIT' in `configure.ac'.
+
+   Because of these changes, and the runtime version compatibility
+checks Libtool now executes, we now advise *against* including a copy of
+`libtool.m4' (and brethren) in `acinclude.m4'.  Instead, you should set
+your project macro directory with `AC_CONFIG_MACRO_DIR'.  When you
+`libtoolize' your project, a copy of the relevant macro definitions
+will be placed in your `AC_CONFIG_MACRO_DIR', where `aclocal' can
+reference them directly from `aclocal.m4'.
+
+   ---------- Footnotes ----------
+
+   (1) `LT_INIT' requires that you define the `Makefile' variable
+`top_builddir' in your `Makefile.in'.  Automake does this
+automatically, but Autoconf users should set it to the relative path to
+the top of your build directory (`../..', for example).
+
+
+File: libtool.info,  Node: Configure notes,  Prev: LT_INIT,  Up: Configuring
+
+5.4.2 Platform-specific configuration notes
+-------------------------------------------
+
+While Libtool tries to hide as many platform-specific features as
+possible, some have to be taken into account when configuring either
+the Libtool package or a libtoolized package.
+
+   * You currently need GNU make to build the Libtool package itself.
+
+   * On AIX there are two different styles of shared linking, one in
+     which symbols are bound at link-time and one in which symbols are
+     bound at runtime only, similar to ELF.  In case of doubt use
+     `LDFLAGS=-Wl,-brtl' for the latter style.
+
+   * On AIX, native tools are to be preferred over binutils; especially
+     for C++ code, if using the AIX Toolbox GCC 4.0 and binutils,
+     configure with `AR=/usr/bin/ar LD=/usr/bin/ld NM='/usr/bin/nm -B''.
+
+   * On AIX, the `/bin/sh' is very slow due to its inefficient handling
+     of here-documents.  A modern shell is preferable:
+          CONFIG_SHELL=/bin/bash; export $CONFIG_SHELL
+          $CONFIG_SHELL ./configure [...]
+
+   * For C++ code with templates, it may be necessary to specify the
+     way the compiler will generate the instantiations.  For Portland
+     pgCC version5, use `CXX='pgCC --one_instantiation_per_object'' and
+     avoid parallel `make'.
+
+   * On Darwin, for C++ code with templates you need two level shared
+     libraries.  Libtool builds these by default if
+     `MACOSX_DEPLOYMENT_TARGET' is set to 10.3 or later at `configure'
+     time.  See `rdar://problem/4135857' for more information on this
+     issue.
+
+   * The default shell on UNICOS 9, a ksh 88e variant, is too buggy to
+     correctly execute the libtool script.  Users are advised to
+     install a modern shell such as GNU bash.
+
+   * Some HP-UX `sed' programs are horribly broken, and cannot handle
+     libtool's requirements, so users may report unusual problems.
+     There is no workaround except to install a working `sed' (such as
+     GNU sed) on these systems.
+
+   * The vendor-distributed NCR MP-RAS `cc' programs emits copyright on
+     standard error that confuse tests on size of `conftest.err'.  The
+     workaround is to specify `CC' when run configure with `CC='cc
+     -Hnocopyr''.
+
+   * Any earlier DG/UX system with ELF executables, such as R3.10 or
+     R4.10, is also likely to work, but hasn't been explicitly tested.
+
+   * On Reliant Unix libtool has only been tested with the Siemens
+     C-compiler and an old version of `gcc' provided by Marco Walther.
+
+   * `libtool.m4', `ltdl.m4' and the `configure.ac' files are marked to
+     use autoconf-mode, which is distributed with GNU Emacs 21,
+     Autoconf itself, and all recent releases of XEmacs.
+
+   * When building on some GNU/Linux systems for multilib targets
+     `libtool' sometimes guesses the wrong paths that the linker and
+     dynamic linker search by default. If this occurs, you may override
+     libtool's guesses at `configure' time by setting the `autoconf'
+     cache variables `lt_cv_sys_lib_search_path_spec' and
+     `lt_cv_sys_lib_dlsearch_path_spec' respectively to the correct
+     search paths.
+
+
+
+File: libtool.info,  Node: Distributing,  Next: Static-only libraries,  Prev: Configuring,  Up: Integrating libtool
+
+5.5 Including libtool in your package
+=====================================
+
+In order to use libtool, you need to include the following files with
+your package:
+
+`config.guess'
+     Attempt to guess a canonical system name.
+
+`config.sub'
+     Canonical system name validation subroutine script.
+
+`install-sh'
+     BSD-compatible `install' replacement script.
+
+`ltmain.sh'
+     A generic script implementing basic libtool functionality.
+
+   Note that the libtool script itself should _not_ be included with
+your package.  *Note Configuring::.
+
+   You should use the `libtoolize' program, rather than manually
+copying these files into your package.
+
+* Menu:
+
+* Invoking libtoolize::         `libtoolize' command line options.
+* Autoconf and LTLIBOBJS::      Autoconf automates LTLIBOBJS generation.
+
+
+File: libtool.info,  Node: Invoking libtoolize,  Next: Autoconf and LTLIBOBJS,  Up: Distributing
+
+5.5.1 Invoking `libtoolize'
+---------------------------
+
+The `libtoolize' program provides a standard way to add libtool support
+to your package.  In the future, it may implement better usage
+checking, or other features to make libtool even easier to use.
+
+   The `libtoolize' program has the following synopsis:
+
+     libtoolize [OPTION]...
+
+and accepts the following options:
+
+`--copy'
+`-c'
+     Copy files from the libtool data directory rather than creating
+     symlinks.
+
+`--debug'
+     Dump a trace of shell script execution to standard output.  This
+     produces a lot of output, so you may wish to pipe it to `less' (or
+     `more') or redirect to a file.
+
+`--dry-run'
+`-n'
+     Don't run any commands that modify the file system, just print them
+     out.
+
+`--force'
+`-f'
+     Replace existing libtool files.  By default, `libtoolize' won't
+     overwrite existing files.
+
+`--help'
+     Display a help message and exit.
+
+`--ltdl [TARGET-DIRECTORY-NAME]'
+     Install libltdl in the TARGET-DIRECTORY-NAME subdirectory of your
+     package.  Normally, the directory is extracted from the argument
+     to `LT_CONFIG_LTDL_DIR' in `configure.ac', though you can also
+     specify a subdirectory name here if you are not using Autoconf for
+     example.  If `libtoolize' can't determine the target directory,
+     `libltdl' is used as the default.
+
+`--no-warn'
+     Normally, Libtoolize tries to diagnose use of deprecated libtool
+     macros and other stylistic issues.  If you are deliberately using
+     outdated calling conventions, this option prevents Libtoolize from
+     explaining how to update your project's Libtool conventions.
+
+`--nonrecursive'
+     If passed in conjunction with `--ltdl', this option will cause the
+     `libltdl' installed by `libtoolize' to be set up for use with a
+     non-recursive `automake' build.  To make use of it, you will need
+     to add the following to the `Makefile.am' of the parent project:
+
+          ## libltdl/Makefile.inc appends to the following variables
+          ## so we set them here before including it:
+          BUILT_SOURCES   =
+
+          AM_CPPFLAGS        =
+          AM_LDFLAGS         =
+
+          include_HEADERS    =
+          noinst_LTLIBRARIES =
+          lib_LTLIBRARIES   =
+          EXTRA_LTLIBRARIES  =
+
+          EXTRA_DIST   =
+
+          CLEANFILES   =
+          MOSTLYCLEANFILES   =
+
+          include libltdl/Makefile.inc
+
+
+`--quiet'
+`-q'
+     Work silently.  `libtoolize --quiet' is used by GNU Automake to
+     add libtool files to your package if necessary.
+
+`--recursive'
+     If passed in conjunction with `--ltdl', this option will cause the
+     `libtoolize' installed `libltdl' to be set up for use with a
+     recursive `automake' build.  To make use of it, you will need to
+     adjust the parent project's `configure.ac':
+
+          AC_CONFIG_FILES([libltdl/Makefile])
+
+     and `Makefile.am':
+
+          SUBDIRS += libltdl
+
+`--subproject'
+     If passed in conjunction with `--ltdl', this option will cause the
+     `libtoolize' installed `libltdl' to be set up for independent
+     configuration and compilation as a self-contained subproject.  To
+     make use of it, you should arrange for your build to call
+     `libltdl/configure', and then run `make' in the `libltdl'
+     directory (or the subdirectory you put libltdl into).  If your
+     project uses Autoconf, you can use the supplied `LT_WITH_LTDL'
+     macro, or else call `AC_CONFIG_SUBDIRS' directly.
+
+     Previous releases of `libltdl' built exclusively in this mode, but
+     now it is the default mode both for backwards compatibility and
+     because, for example, it is suitable for use in projects that wish
+     to use `libltdl', but not use the Autotools for their own build
+     process.
+
+`--verbose'
+`-v'
+     Work noisily!  Give a blow by blow account of what `libtoolize' is
+     doing.
+
+`--version'
+     Print `libtoolize' version information and exit.
+
+   Sometimes it can be useful to pass options to `libtoolize' even
+though it is called by another program, such as `autoreconf'.  A
+limited number of options are parsed from the environment variable
+`LIBTOOLIZE_OPTIONS': currently `--debug', `--no-warn', `--quiet' and
+`--verbose'.  Multiple options passed in `LIBTOOLIZE_OPTIONS' must be
+separated with a space, comma or a colon.
+
+   By default, a warning is issued for unknown options found in
+`LIBTOOLIZE_OPTIONS' unless the first such option is `--no-warn'.
+Where `libtoolize' has always quit on receipt of an unknown option at
+the command line, this and all previous releases of `libtoolize' will
+continue unabated whatever the content of `LIBTOOLIZE_OPTIONS' (modulo
+some possible warning messages).
+
+     trick$ LIBTOOLIZE_OPTIONS=--no-warn,--quiet autoreconf --install
+
+   If `libtoolize' detects an explicit call to `AC_CONFIG_MACRO_DIR'
+(*note The Autoconf Manual: (autoconf)Input.) in your `configure.ac',
+it will put the Libtool macros in the specified directory.
+
+   In the future other Autotools will automatically check the contents
+of `AC_CONFIG_MACRO_DIR', but at the moment it is more portable to add
+the macro directory to `ACLOCAL_AMFLAGS' in `Makefile.am', which is
+where the tools currently look.  If `libtoolize' doesn't see
+`AC_CONFIG_MACRO_DIR', it too will honour the first `-I' argument in
+`ACLOCAL_AMFLAGS' when choosing a directory to store libtool
+configuration macros in.  It is perfectly sensible to use both
+`AC_CONFIG_MACRO_DIR' and `ACLOCAL_AMFLAGS', as long as they are kept
+in synchronisation.
+
+     ACLOCAL_AMFLAGS = -I m4
+
+   When you bootstrap your project with `aclocal', then you will need
+to explicitly pass the same macro directory with `aclocal''s `-I' flag:
+
+     trick$ aclocal -I m4
+
+   If `libtoolize' detects an explicit call to `AC_CONFIG_AUX_DIR'
+(*note The Autoconf Manual: (autoconf)Input.) in your `configure.ac', it
+will put the other support files in the specified directory.  Otherwise
+they too end up in the project root directory.
+
+   Unless `--no-warn' is passed, `libtoolize' displays hints for adding
+libtool support to your package, as well.
+
+
+File: libtool.info,  Node: Autoconf and LTLIBOBJS,  Prev: Invoking libtoolize,  Up: Distributing
+
+5.5.2 Autoconf and `LTLIBOBJS'
+------------------------------
+
+People used to add code like the following to their `configure.ac':
+
+     LTLIBOBJS=`echo "$LIBOBJS" | sed 's/\.[^.]* /.lo /g;s/\.[^.]*$/.lo/'`
+     AC_SUBST([LTLIBOBJS])
+
+This is no longer required (since Autoconf 2.54), and doesn't take
+Automake's deansification support into account either, so doesn't work
+correctly even with ancient Autoconfs!
+
+   Provided you are using a recent (2.54 or better) incarnation of
+Autoconf, the call to `AC_OUTPUT' takes care of setting `LTLIBOBJS' up
+correctly, so you can simply delete such snippets from your
+`configure.ac' if you had them.
+
+
+File: libtool.info,  Node: Static-only libraries,  Prev: Distributing,  Up: Integrating libtool
+
+5.6 Static-only libraries
+=========================
+
+When you are developing a package, it is often worthwhile to configure
+your package with the `--disable-shared' flag, or to override the
+defaults for `LT_INIT' by using the `disable-shared' option (*note The
+`LT_INIT' macro: LT_INIT.).  This prevents libtool from building shared
+libraries, which has several advantages:
+
+   * compilation is twice as fast, which can speed up your development
+     cycle,
+
+   * debugging is easier because you don't need to deal with any
+     complexities added by shared libraries, and
+
+   * you can see how libtool behaves on static-only platforms.
+
+   You may want to put a small note in your package `README' to let
+other developers know that `--disable-shared' can save them time.  The
+following example note is taken from the GIMP(1) distribution `README':
+
+     The GIMP uses GNU Libtool in order to build shared libraries on a
+     variety of systems.  While this is very nice for making usable
+     binaries, it can be a pain when trying to debug a program.  For that
+     reason, compilation of shared libraries can be turned off by
+     specifying the `--disable-shared' option to `configure'.
+
+   ---------- Footnotes ----------
+
+   (1) GNU Image Manipulation Program, for those who haven't taken the
+plunge.  See `http://www.gimp.org/'.
+
+
+File: libtool.info,  Node: Other languages,  Next: Versioning,  Prev: Integrating libtool,  Up: Top
+
+6 Using libtool with other languages
+************************************
+
+Libtool was first implemented in order to add support for writing shared
+libraries in the C language.  However, over time, libtool is being
+integrated with other languages, so that programmers are free to reap
+the benefits of shared libraries in their favorite programming language.
+
+   This chapter describes how libtool interacts with other languages,
+and what special considerations you need to make if you do not use C.
+
+* Menu:
+
+* C++ libraries::               Writing libraries for C++
+* Tags::                        Tags
+
+
+File: libtool.info,  Node: C++ libraries,  Next: Tags,  Up: Other languages
+
+6.1 Writing libraries for C++
+=============================
+
+Creating libraries of C++ code should be a fairly straightforward
+process, because its object files differ from C ones in only three ways:
+
+  1. Because of name mangling, C++ libraries are only usable by the C++
+     compiler that created them.  This decision was made by the
+     designers of C++ in order to protect users from conflicting
+     implementations of features such as constructors, exception
+     handling, and RTTI.
+
+  2. On some systems, the C++ compiler must take special actions for the
+     dynamic linker to run dynamic (i.e., run-time) initializers.  This
+     means that we should not call `ld' directly to link such
+     libraries, and we should use the C++ compiler instead.
+
+  3. C++ compilers will link some Standard C++ library in by default,
+     but libtool does not know which are these libraries, so it cannot
+     even run the inter-library dependence analyzer to check how to
+     link it in.  Therefore, running `ld' to link a C++ program or
+     library is deemed to fail.
+
+   Because of these three issues, Libtool has been designed to always
+use the C++ compiler to compile and link C++ programs and libraries.  In
+some instances the `main()' function of a program must also be compiled
+with the C++ compiler for static C++ objects to be properly initialized.
+
+
+File: libtool.info,  Node: Tags,  Prev: C++ libraries,  Up: Other languages
+
+6.2 Tags
+========
+
+Libtool supports multiple languages through the use of tags.
+Technically a tag corresponds to a set of configuration variables
+associated with a language.  These variables tell `libtool' how it
+should create objects and libraries for each language.
+
+   Tags are defined at `configure'-time for each language activated in
+the package (see `LT_LANG' in *note LT_INIT::).  Here is the
+correspondence between language names and tags names.
+
+Language name      Tag name
+C                  CC
+C++                CXX
+Java               GCJ
+Fortran 77         F77
+Fortran            FC
+Go                 GO
+Windows Resource   RC
+
+   `libtool' tries to automatically infer which tag to use from the
+compiler command being used to compile or link.  If it can't infer a
+tag, then it defaults to the configuration for the `C' language.
+
+   The tag can also be specified using `libtool''s `--tag=TAG' option
+(*note Invoking libtool::).  It is a good idea to do so in `Makefile'
+rules, because that will allow users to substitute the compiler without
+relying on `libtool' inference heuristics.  When no tag is specified,
+`libtool' will default to `CC'; this tag always exists.
+
+   Finally, the set of tags available in a particular project can be
+retrieved by tracing for the `LT_SUPPORTED_TAG' macro (*note Trace
+interface::).
+
+
+File: libtool.info,  Node: Versioning,  Next: Library tips,  Prev: Other languages,  Up: Top
+
+7 Library interface versions
+****************************
+
+The most difficult issue introduced by shared libraries is that of
+creating and resolving runtime dependencies.  Dependencies on programs
+and libraries are often described in terms of a single name, such as
+`sed'.  So, one may say "libtool depends on sed," and that is good
+enough for most purposes.
+
+   However, when an interface changes regularly, we need to be more
+specific: "Gnus 5.1 requires Emacs 19.28 or above."  Here, the
+description of an interface consists of a name, and a "version number."
+
+   Even that sort of description is not accurate enough for some
+purposes.  What if Emacs 20 changes enough to break Gnus 5.1?
+
+   The same problem exists in shared libraries: we require a formal
+version system to describe the sorts of dependencies that programs have
+on shared libraries, so that the dynamic linker can guarantee that
+programs are linked only against libraries that provide the interface
+they require.
+
+* Menu:
+
+* Interfaces::                  What are library interfaces?
+* Libtool versioning::          Libtool's versioning system.
+* Updating version info::       Changing version information before releases.
+* Release numbers::             Breaking binary compatibility for aesthetics.
+
+
+File: libtool.info,  Node: Interfaces,  Next: Libtool versioning,  Up: Versioning
+
+7.1 What are library interfaces?
+================================
+
+Interfaces for libraries may be any of the following (and more):
+
+   * global variables: both names and types
+
+   * global functions: argument types and number, return types, and
+     function names
+
+   * standard input, standard output, standard error, and file formats
+
+   * sockets, pipes, and other inter-process communication protocol
+     formats
+
+   Note that static functions do not count as interfaces, because they
+are not directly available to the user of the library.
+
+
+File: libtool.info,  Node: Libtool versioning,  Next: Updating version info,  Prev: Interfaces,  Up: Versioning
+
+7.2 Libtool's versioning system
+===============================
+
+Libtool has its own formal versioning system.  It is not as flexible as
+some, but it is definitely the simplest of the more powerful versioning
+systems.
+
+   Think of a library as exporting several sets of interfaces,
+arbitrarily represented by integers.  When a program is linked against
+a library, it may use any subset of those interfaces.
+
+   Libtool's description of the interfaces that a program uses is
+simple: it encodes the least and the greatest interface numbers in the
+resulting binary (FIRST-INTERFACE, LAST-INTERFACE).
+
+   The dynamic linker is guaranteed that if a library supports _every_
+interface number between FIRST-INTERFACE and LAST-INTERFACE, then the
+program can be relinked against that library.
+
+   Note that this can cause problems because libtool's compatibility
+requirements are actually stricter than is necessary.
+
+   Say `libhello' supports interfaces 5, 16, 17, 18, and 19, and that
+libtool is used to link `test' against `libhello'.
+
+   Libtool encodes the numbers 5 and 19 in `test', and the dynamic
+linker will only link `test' against libraries that support _every_
+interface between 5 and 19.  So, the dynamic linker refuses to link
+`test' against `libhello'!
+
+   In order to eliminate this problem, libtool only allows libraries to
+declare consecutive interface numbers.  So, `libhello' can declare at
+most that it supports interfaces 16 through 19.  Then, the dynamic
+linker will link `test' against `libhello'.
+
+   So, libtool library versions are described by three integers:
+
+CURRENT
+     The most recent interface number that this library implements.
+
+REVISION
+     The implementation number of the CURRENT interface.
+
+AGE
+     The difference between the newest and oldest interfaces that this
+     library implements.  In other words, the library implements all the
+     interface numbers in the range from number `CURRENT - AGE' to
+     `CURRENT'.
+
+   If two libraries have identical CURRENT and AGE numbers, then the
+dynamic linker chooses the library with the greater REVISION number.
+
+
+File: libtool.info,  Node: Updating version info,  Next: Release numbers,  Prev: Libtool versioning,  Up: Versioning
+
+7.3 Updating library version information
+========================================
+
+If you want to use libtool's versioning system, then you must specify
+the version information to libtool using the `-version-info' flag
+during link mode (*note Link mode::).
+
+   This flag accepts an argument of the form
+`CURRENT[:REVISION[:AGE]]'.  So, passing `-version-info 3:12:1' sets
+CURRENT to 3, REVISION to 12, and AGE to 1.
+
+   If either REVISION or AGE are omitted, they default to 0.  Also note
+that AGE must be less than or equal to the CURRENT interface number.
+
+   Here are a set of rules to help you update your library version
+information:
+
+  1. Start with version information of `0:0:0' for each libtool library.
+
+  2. Update the version information only immediately before a public
+     release of your software.  More frequent updates are unnecessary,
+     and only guarantee that the current interface number gets larger
+     faster.
+
+  3. If the library source code has changed at all since the last
+     update, then increment REVISION (`C:R:A' becomes `C:r+1:A').
+
+  4. If any interfaces have been added, removed, or changed since the
+     last update, increment CURRENT, and set REVISION to 0.
+
+  5. If any interfaces have been added since the last public release,
+     then increment AGE.
+
+  6. If any interfaces have been removed or changed since the last
+     public release, then set AGE to 0.
+
+   *_Never_* try to set the interface numbers so that they correspond
+to the release number of your package.  This is an abuse that only
+fosters misunderstanding of the purpose of library versions.  Instead,
+use the `-release' flag (*note Release numbers::), but be warned that
+every release of your package will not be binary compatible with any
+other release.
+
+   The following explanation may help to understand the above rules a
+bit better: consider that there are three possible kinds of reactions
+from users of your library to changes in a shared library:
+
+  1. Programs using the previous version may use the new version as
+     drop-in replacement, and programs using the new version can also
+     work with the previous one.  In other words, no recompiling nor
+     relinking is needed.  In this case, bump REVISION only, don't touch
+     CURRENT nor AGE.
+
+  2. Programs using the previous version may use the new version as
+     drop-in replacement, but programs using the new version may use
+     APIs not present in the previous one.  In other words, a program
+     linking against the new version may fail with "unresolved symbols"
+     if linking against the old version at runtime: set REVISION to 0,
+     bump CURRENT and AGE.
+
+  3. Programs may need to be changed, recompiled, relinked in order to
+     use the new version.  Bump CURRENT, set REVISION and AGE to 0.
+
+In the above description, _programs_ using the library in question may
+also be replaced by other libraries using it.
+
+
+File: libtool.info,  Node: Release numbers,  Prev: Updating version info,  Up: Versioning
+
+7.4 Managing release information
+================================
+
+Often, people want to encode the name of the package release into the
+shared library so that it is obvious to the user which package their
+programs are linked against.  This convention is used especially on
+GNU/Linux:
+
+     trick$ ls /usr/lib/libbfd*
+     /usr/lib/libbfd.a           /usr/lib/libbfd.so.2.7.0.2
+     /usr/lib/libbfd.so
+     trick$
+
+   On `trick', `/usr/lib/libbfd.so' is a symbolic link to
+`libbfd.so.2.7.0.2', which was distributed as a part of
+`binutils-2.7.0.2'.
+
+   Unfortunately, this convention conflicts directly with libtool's
+idea of library interface versions, because the library interface
+rarely changes at the same time that the release number does, and the
+library suffix is never the same across all platforms.
+
+   So, in order to accommodate both views, you can use the `-release'
+flag in order to set release information for libraries for which you do
+not want to use `-version-info'.  For the `libbfd' example, the next
+release that uses libtool should be built with `-release 2.9.0', which
+will produce the following files on GNU/Linux:
+
+     trick$ ls /usr/lib/libbfd*
+     /usr/lib/libbfd-2.9.0.so     /usr/lib/libbfd.a
+     /usr/lib/libbfd.so
+     trick$
+
+   In this case, `/usr/lib/libbfd.so' is a symbolic link to
+`libbfd-2.9.0.so'.  This makes it obvious that the user is dealing with
+`binutils-2.9.0', without compromising libtool's idea of interface
+versions.
+
+   Note that this option causes a modification of the library name, so
+do not use it unless you want to break binary compatibility with any
+past library releases.  In general, you should only use `-release' for
+package-internal libraries or for ones whose interfaces change very
+frequently.
+
+
+File: libtool.info,  Node: Library tips,  Next: Inter-library dependencies,  Prev: Versioning,  Up: Top
+
+8 Tips for interface design
+***************************
+
+Writing a good library interface takes a lot of practice and thorough
+understanding of the problem that the library is intended to solve.
+
+   If you design a good interface, it won't have to change often, you
+won't have to keep updating documentation, and users won't have to keep
+relearning how to use the library.
+
+   Here is a brief list of tips for library interface design that may
+help you in your exploits:
+
+Plan ahead
+     Try to make every interface truly minimal, so that you won't need
+     to delete entry points very often.
+
+Avoid interface changes
+     Some people love redesigning and changing entry points just for
+     the heck of it (note: _renaming_ a function is considered changing
+     an entry point).  Don't be one of those people.  If you must
+     redesign an interface, then try to leave compatibility functions
+     behind so that users don't need to rewrite their existing code.
+
+Use opaque data types
+     The fewer data type definitions a library user has access to, the
+     better.  If possible, design your functions to accept a generic
+     pointer (that you can cast to an internal data type), and provide
+     access functions rather than allowing the library user to directly
+     manipulate the data.  That way, you have the freedom to change the
+     data structures without changing the interface.
+
+     This is essentially the same thing as using abstract data types and
+     inheritance in an object-oriented system.
+
+Use header files
+     If you are careful to document each of your library's global
+     functions and variables in header files, and include them in your
+     library source files, then the compiler will let you know if you
+     make any interface changes by accident (*note C header files::).
+
+Use the `static' keyword (or equivalent) whenever possible
+     The fewer global functions your library has, the more flexibility
+     you'll have in changing them.  Static functions and variables may
+     change forms as often as you like... your users cannot access
+     them, so they aren't interface changes.
+
+Be careful with array dimensions
+     The number of elements in a global array is part of an interface,
+     even if the header just declares `extern int foo[];'.  This is
+     because on i386 and some other SVR4/ELF systems, when an
+     application references data in a shared library the size of that
+     data (whatever its type) is included in the application
+     executable.  If you might want to change the size of an array or
+     string then provide a pointer not the actual array.
+
+* Menu:
+
+* C header files::              How to write portable include files.
+
+
+File: libtool.info,  Node: C header files,  Up: Library tips
+
+8.1 Writing C header files
+==========================
+
+Writing portable C header files can be difficult, since they may be read
+by different types of compilers:
+
+C++ compilers
+     C++ compilers require that functions be declared with full
+     prototypes, since C++ is more strongly typed than C.  C functions
+     and variables also need to be declared with the `extern "C"'
+     directive, so that the names aren't mangled.  *Note C++
+     libraries::, for other issues relevant to using C++ with libtool.
+
+ANSI C compilers
+     ANSI C compilers are not as strict as C++ compilers, but functions
+     should be prototyped to avoid unnecessary warnings when the header
+     file is `#include'd.
+
+non-ANSI C compilers
+     Non-ANSI compilers will report errors if functions are prototyped.
+
+   These complications mean that your library interface headers must use
+some C preprocessor magic in order to be usable by each of the above
+compilers.
+
+   `foo.h' in the `tests/demo' subdirectory of the libtool distribution
+serves as an example for how to write a header file that can be safely
+installed in a system directory.
+
+   Here are the relevant portions of that file:
+
+     /* BEGIN_C_DECLS should be used at the beginning of your declarations,
+        so that C++ compilers don't mangle their names.  Use END_C_DECLS at
+        the end of C declarations. */
+     #undef BEGIN_C_DECLS
+     #undef END_C_DECLS
+     #ifdef __cplusplus
+     # define BEGIN_C_DECLS extern "C" {
+     # define END_C_DECLS }
+     #else
+     # define BEGIN_C_DECLS /* empty */
+     # define END_C_DECLS /* empty */
+     #endif
+
+     /* PARAMS is a macro used to wrap function prototypes, so that
+        compilers that don't understand ANSI C prototypes still work,
+        and ANSI C compilers can issue warnings about type mismatches. */
+     #undef PARAMS
+     #if defined (__STDC__) || defined (_AIX) \
+             || (defined (__mips) && defined (_SYSTYPE_SVR4)) \
+             || defined(WIN32) || defined(__cplusplus)
+     # define PARAMS(protos) protos
+     #else
+     # define PARAMS(protos) ()
+     #endif
+
+   These macros are used in `foo.h' as follows:
+
+     #ifndef FOO_H
+     #define FOO_H 1
+
+     /* The above macro definitions. */
+     #include "..."
+
+     BEGIN_C_DECLS
+
+     int foo PARAMS((void));
+     int hello PARAMS((void));
+
+     END_C_DECLS
+
+     #endif /* !FOO_H */
+
+   Note that the `#ifndef FOO_H' prevents the body of `foo.h' from
+being read more than once in a given compilation.
+
+   Also the only thing that must go outside the
+`BEGIN_C_DECLS'/`END_C_DECLS' pair are `#include' lines.  Strictly
+speaking it is only C symbol names that need to be protected, but your
+header files will be more maintainable if you have a single pair of
+these macros around the majority of the header contents.
+
+   You should use these definitions of `PARAMS', `BEGIN_C_DECLS', and
+`END_C_DECLS' into your own headers.  Then, you may use them to create
+header files that are valid for C++, ANSI, and non-ANSI compilers(1).
+
+   Do not be naive about writing portable code.  Following the tips
+given above will help you miss the most obvious problems, but there are
+definitely other subtle portability issues.  You may need to cope with
+some of the following issues:
+
+   * Pre-ANSI compilers do not always support the `void *' generic
+     pointer type, and so need to use `char *' in its place.
+
+   * The `const', `inline' and `signed' keywords are not supported by
+     some compilers, especially pre-ANSI compilers.
+
+   * The `long double' type is not supported by many compilers.
+
+   ---------- Footnotes ----------
+
+   (1) We used to recommend `__P', `__BEGIN_DECLS' and `__END_DECLS'.
+This was bad advice since symbols (even preprocessor macro names) that
+begin with an underscore are reserved for the use of the compiler.
+
+
+File: libtool.info,  Node: Inter-library dependencies,  Next: Dlopened modules,  Prev: Library tips,  Up: Top
+
+9 Inter-library dependencies
+****************************
+
+By definition, every shared library system provides a way for
+executables to depend on libraries, so that symbol resolution is
+deferred until runtime.
+
+   An "inter-library dependency" is one in which a library depends on
+other libraries.  For example, if the libtool library `libhello' uses
+the `cos' function, then it has an inter-library dependency on `libm',
+the math library that implements `cos'.
+
+   Some shared library systems provide this feature in an
+internally-consistent way: these systems allow chains of dependencies of
+potentially infinite length.
+
+   However, most shared library systems are restricted in that they only
+allow a single level of dependencies.  In these systems, programs may
+depend on shared libraries, but shared libraries may not depend on other
+shared libraries.
+
+   In any event, libtool provides a simple mechanism for you to declare
+inter-library dependencies: for every library `libNAME' that your own
+library depends on, simply add a corresponding `-lNAME' option to the
+link line when you create your library.  To make an example of our
+`libhello' that depends on `libm':
+
+     burger$ libtool --mode=link gcc -g -O -o libhello.la foo.lo hello.lo \
+                     -rpath /usr/local/lib -lm
+     burger$
+
+   When you link a program against `libhello', you don't need to
+specify the same `-l' options again: libtool will do that for you, in
+order to guarantee that all the required libraries are found.  This
+restriction is only necessary to preserve compatibility with static
+library systems and simple dynamic library systems.
+
+   Some platforms, such as Windows, do not even allow you this
+flexibility.  In order to build a shared library, it must be entirely
+self-contained or it must have dependencies known at link time (that is,
+have references only to symbols that are found in the `.lo' files or
+the specified `-l' libraries), and you need to specify the
+`-no-undefined' flag.  By default, libtool builds only static libraries
+on these kinds of platforms.
+
+   The simple-minded inter-library dependency tracking code of libtool
+releases prior to 1.2 was disabled because it was not clear when it was
+possible to link one library with another, and complex failures would
+occur.  A more complex implementation of this concept was re-introduced
+before release 1.3, but it has not been ported to all platforms that
+libtool supports.  The default, conservative behavior is to avoid
+linking one library with another, introducing their inter-dependencies
+only when a program is linked with them.
+
+
+File: libtool.info,  Node: Dlopened modules,  Next: Using libltdl,  Prev: Inter-library dependencies,  Up: Top
+
+10 Dlopened modules
+*******************
+
+It can sometimes be confusing to discuss "dynamic linking", because the
+term is used to refer to two different concepts:
+
+  1. Compiling and linking a program against a shared library, which is
+     resolved automatically at run time by the dynamic linker.  In this
+     process, dynamic linking is transparent to the application.
+
+  2. The application calling functions such as `dlopen' that load
+     arbitrary, user-specified modules at runtime.  This type of dynamic
+     linking is explicitly controlled by the application.
+
+   To mitigate confusion, this manual refers to the second type of
+dynamic linking as "dlopening" a module.
+
+   The main benefit to dlopening object modules is the ability to access
+compiled object code to extend your program, rather than using an
+interpreted language.  In fact, dlopen calls are frequently used in
+language interpreters to provide an efficient way to extend the
+language.
+
+   Libtool provides support for dlopened modules.  However, you should
+indicate that your package is willing to use such support, by using the
+`LT_INIT' option `dlopen' in `configure.ac'.  If this option is not
+given, libtool will assume no dlopening mechanism is available, and
+will try to simulate it.
+
+   This chapter discusses how you as a dlopen application developer
+might use libtool to generate dlopen-accessible modules.
+
+* Menu:
+
+* Building modules::            Creating dlopenable objects and libraries.
+* Dlpreopening::                Dlopening that works on static platforms.
+* Linking with dlopened modules::  Using dlopenable modules in libraries.
+* Finding the dlname::          Choosing the right file to `dlopen'.
+* Dlopen issues::               Unresolved problems that need your attention.
+
+
+File: libtool.info,  Node: Building modules,  Next: Dlpreopening,  Up: Dlopened modules
+
+10.1 Building modules to dlopen
+===============================
+
+On some operating systems, a program symbol must be specially declared
+in order to be dynamically resolved with the `dlsym' (or equivalent)
+function.  Libtool provides the `-export-dynamic' and `-module' link
+flags (*note Link mode::), for you to make that declaration.  You need
+to use these flags if you are linking an application program that
+dlopens other modules or a libtool library that will also be dlopened.
+
+   For example, if we wanted to build a shared library, `hello', that
+would later be dlopened by an application, we would add `-module' to
+the other link flags:
+
+     burger$ libtool --mode=link gcc -module -o hello.la foo.lo \
+                     hello.lo -rpath /usr/local/lib -lm
+     burger$
+
+   If symbols from your _executable_ are needed to satisfy unresolved
+references in a library you want to dlopen you will have to use the flag
+`-export-dynamic'.  You should use `-export-dynamic' while linking the
+executable that calls dlopen:
+
+     burger$ libtool --mode=link gcc -export-dynamic -o helldl main.o
+     burger$
+
+
+File: libtool.info,  Node: Dlpreopening,  Next: Linking with dlopened modules,  Prev: Building modules,  Up: Dlopened modules
+
+10.2 Dlpreopening
+=================
+
+Libtool provides special support for dlopening libtool object and
+libtool library files, so that their symbols can be resolved _even on
+platforms without any `dlopen' and `dlsym' functions_.
+
+   Consider the following alternative ways of loading code into your
+program, in order of increasing "laziness":
+
+  1. Linking against object files that become part of the program
+     executable, whether or not they are referenced.  If an object file
+     cannot be found, then the compile time linker refuses to create
+     the executable.
+
+  2. Declaring a static library to the linker, so that it is searched
+     at link time in order to satisfy any undefined references in the
+     above object files.  If the static library cannot be found, then
+     the compile time linker refuses to create the executable.
+
+  3. Declaring a shared library to the runtime linker, so that it is
+     searched at runtime in order to satisfy any undefined references
+     in the above files.  If the shared library cannot be found, then
+     the dynamic linker aborts the program before it runs.
+
+  4. Dlopening a module, so that the application can resolve its own,
+     dynamically-computed references.  If there is an error opening the
+     module, or the module is not found, then the application can
+     recover without crashing.
+
+   Libtool emulates `-dlopen' on static platforms by linking objects
+into the program at compile time, and creating data structures that
+represent the program's symbol table.  In order to use this feature,
+you must declare the objects you want your application to dlopen by
+using the `-dlopen' or `-dlpreopen' flags when you link your program
+(*note Link mode::).
+
+ -- Data Type: lt_dlsymlist typedef struct { const char *NAME;
+          void *ADDRESS; } lt_dlsymlist
+     The NAME attribute is a null-terminated character string of the
+     symbol name, such as `"fprintf"'.  The ADDRESS attribute is a
+     generic pointer to the appropriate object, such as `&fprintf'.
+
+ -- Variable: const lt_dlsymlist  lt_preloaded_symbols[]
+     An array of `lt_dlsymlist' structures, representing all the
+     preloaded symbols linked into the program proper.  For each module
+     `-dlpreopen'ed by the Libtool linked program there is an element
+     with the NAME of the module and an ADDRESS of `0', followed by all
+     symbols exported from this file.  For the executable itself the
+     special name `@PROGRAM@' is used.  The last element of all has a
+     NAME and ADDRESS of `0'.
+
+     To facilitate inclusion of symbol lists into libraries,
+     `lt_preloaded_symbols' is `#define'd to a suitably unique name in
+     `ltdl.h'.
+
+     This variable may not be declared `const' on some systems due to
+     relocation issues.
+
+   Some compilers may allow identifiers that are not valid in ANSI C,
+such as dollar signs.  Libtool only recognizes valid ANSI C symbols (an
+initial ASCII letter or underscore, followed by zero or more ASCII
+letters, digits, and underscores), so non-ANSI symbols will not appear
+in `lt_preloaded_symbols'.
+
+ -- Function: int lt_dlpreload (const lt_dlsymlist *PRELOADED)
+     Register the list of preloaded modules PRELOADED.  If PRELOADED is
+     `NULL', then all previously registered symbol lists, except the
+     list set by `lt_dlpreload_default', are deleted.  Return 0 on
+     success.
+
+ -- Function: int lt_dlpreload_default (const lt_dlsymlist *PRELOADED)
+     Set the default list of preloaded modules to PRELOADED, which
+     won't be deleted by `lt_dlpreload'.  Note that this function does
+     _not_ require libltdl to be initialized using `lt_dlinit' and can
+     be used in the program to register the default preloaded modules.
+     Instead of calling this function directly, most programs will use
+     the macro `LTDL_SET_PRELOADED_SYMBOLS'.
+
+     Return 0 on success.
+
+ -- Macro: LTDL_SET_PRELOADED_SYMBOLS
+     Set the default list of preloaded symbols.  Should be used in your
+     program to initialize libltdl's list of preloaded modules.
+
+          #include <ltdl.h>
+
+          int main() {
+            /* ... */
+            LTDL_SET_PRELOADED_SYMBOLS();
+            /* ... */
+          }
+
+ -- Function Type: int lt_dlpreload_callback_func (lt_dlhandle HANDLE)
+     Functions of this type can be passed to `lt_dlpreload_open', which
+     in turn will call back into a function thus passed for each
+     preloaded module that it opens.
+
+ -- Function: int lt_dlpreload_open (const char *ORIGINATOR,
+          lt_dlpreload_callback_func *FUNC)
+     Load all of the preloaded modules for ORIGINATOR.  For every
+     module opened in this way, call FUNC.
+
+     To open all of the modules preloaded into `libhell.la' (presumably
+     from within the `libhell.a' initialisation code):
+
+          #define preloaded_symbols lt_libhell_LTX_preloaded_symbols
+
+          static int hell_preload_callback (lt_dlhandle handle);
+
+          int
+          hell_init (void)
+          {
+            ...
+            if (lt_dlpreload (&preloaded_symbols) == 0)
+              {
+                lt_dlpreload_open ("libhell", preload_callback);
+              }
+            ...
+          }
+
+     Note that to prevent clashes between multiple preloaded modules,
+     the preloaded symbols are accessed via a mangled symbol name: to
+     get the symbols preloaded into `libhell', you must prefix
+     `preloaded_symbols' with `lt_'; the originator name, `libhell' in
+     this case; and `_LTX_'.  That is,
+     `lt_libhell_LTX_preloaded_symbols' here.
+
+
+File: libtool.info,  Node: Linking with dlopened modules,  Next: Finding the dlname,  Prev: Dlpreopening,  Up: Dlopened modules
+
+10.3 Linking with dlopened modules
+==================================
+
+When, say, an interpreter application uses dlopened modules to extend
+the list of methods it provides, an obvious abstraction for the
+maintainers of the interpreter is to have all methods (including the
+built in ones supplied with the interpreter) accessed through dlopen.
+For one thing, the dlopening functionality will be tested even during
+routine invocations.  For another, only one subsystem has to be written
+for getting methods into the interpreter.
+
+   The downside of this abstraction is, of course, that environments
+that provide only static linkage can't even load the intrinsic
+interpreter methods.  Not so!  We can statically link those methods by
+*dlpreopening* them.
+
+   Unfortunately, since platforms such as AIX and cygwin require that
+all library symbols must be resolved at compile time, the interpreter
+maintainers will need to provide a library to both its own dlpreopened
+modules, and third-party modules loaded by dlopen.  In itself, that is
+not so bad, except that the interpreter too must provide those same
+symbols otherwise it will be impossible to resolve all the symbols
+required by the modules as they are loaded.  Things are even worse if
+the code that loads the modules for the interpreter is itself in a
+library - and that is usually the case for any non-trivial application.
+Modern platforms take care of this by automatically loading all of a
+module's dependency libraries as the module is loaded (libltdl can do
+this even on platforms that can't do it by themselves).  In the end,
+this leads to problems with duplicated symbols and prevents modules
+from loading, and prevents the application from compiling when modules
+are preloaded.
+
+     ,-------------.    ,------------------.    ,-----------------.
+     | Interpreter |---->     Module------------>   Third-party   |
+     `-------------'    |     Loader       |    |Dlopened Modules |
+                        |        |         |    `-----------------'
+                        |,-------v--------.|             |
+                        ||  Dlpreopened   ||             |
+                        ||    Modules     ||             |
+                        |`----------------'|             |
+                        |        |         |             |
+                        |,-------v--------.|    ,--------v--------.
+                        ||Module Interface||    |Module Interface |
+                        ||    Library     ||    |     Library     |
+                        |`----------------'|    `-----------------'
+                        `------------------'
+
+   Libtool has the concept of "weak library interfaces" to circumvent
+this problem.  Recall that the code that dlopens method-provider
+modules for the interpreter application resides in a library: All of
+the modules and the dlopener library itself should be linked against
+the common library that resolves the module symbols at compile time.
+To guard against duplicate symbol definitions, and for dlpreopened
+modules to work at all in this scenario, the dlopener library must
+declare that it provides a weak library interface to the common symbols
+in the library it shares with the modules.  That way, when `libtool'
+links the *Module Loader* library with some *Dlpreopened Modules* that
+were in turn linked against the *Module Interface Library*, it knows
+that the *Module Loader* provides an already loaded *Module Interface
+Library* to resolve symbols for the *Dlpreopened Modules*, and doesn't
+ask the compiler driver to link an identical *Module Interface Library*
+dependency library too.
+
+   In conjunction with Automake, the `Makefile.am' for the *Module
+Loader* might look like this:
+
+     lib_LTLIBRARIES = libinterface.la libloader.la
+
+     libinterface_la_SOURCES = interface.c interface.h
+     libinterface_la_LDFLAGS = -version-info 3:2:1
+
+     libloader_la_SOURCES    = loader.c
+     libloader_la_LDFLAGS    = -weak libinterface.la \
+                               -version-info 3:2:1 \
+                               -dlpreopen ../modules/intrinsics.la
+     libloader_la_LIBADD     = $(libinterface_la_OBJECTS)
+
+   And the `Makefile.am' for the `intrinsics.la' module in a sibling
+`modules' directory might look like this:
+
+     AM_CPPFLAGS             = -I$(srcdir)/../libloader
+     AM_LDFLAGS              = -no-undefined -module -avoid-version \
+                               -export-dynamic
+
+     noinst_LTLIBRARIES      = intrinsics.la
+
+     intrinsics_la_LIBADD    = ../libloader/libinterface.la
+
+     ../libloader/libinterface.la:
+             cd ../libloader && $(MAKE) $(AM_MAKEFLAGS) libinterface.la
+
+   For a more complex example, see the sources of `libltdl' in the
+Libtool distribution, which is built with the help of the `-weak'
+option.
+
+
+File: libtool.info,  Node: Finding the dlname,  Next: Dlopen issues,  Prev: Linking with dlopened modules,  Up: Dlopened modules
+
+10.4 Finding the correct name to dlopen
+=======================================
+
+After a library has been linked with `-module', it can be dlopened.
+Unfortunately, because of the variation in library names, your package
+needs to determine the correct file to dlopen.
+
+   The most straightforward and flexible implementation is to determine
+the name at runtime, by finding the installed `.la' file, and searching
+it for the following lines:
+
+     # The name that we can `dlopen'.
+     dlname='DLNAME'
+
+   If DLNAME is empty, then the library cannot be dlopened.  Otherwise,
+it gives the dlname of the library.  So, if the library was installed
+as `/usr/local/lib/libhello.la', and the DLNAME was `libhello.so.3',
+then `/usr/local/lib/libhello.so.3' should be dlopened.
+
+   If your program uses this approach, then it should search the
+directories listed in the `LD_LIBRARY_PATH'(1) environment variable, as
+well as the directory where libraries will eventually be installed.
+Searching this variable (or equivalent) will guarantee that your
+program can find its dlopened modules, even before installation,
+provided you have linked them using libtool.
+
+   ---------- Footnotes ----------
+
+   (1) `LIBPATH' on AIX, and `SHLIB_PATH' on HP-UX.
+
+
+File: libtool.info,  Node: Dlopen issues,  Prev: Finding the dlname,  Up: Dlopened modules
+
+10.5 Unresolved dlopen issues
+=============================
+
+The following problems are not solved by using libtool's dlopen support:
+
+   * Dlopen functions are generally only available on shared library
+     platforms.  If you want your package to be portable to static
+     platforms, you have to use either libltdl (*note Using libltdl::)
+     or develop your own alternatives to dlopening dynamic code.  Most
+     reasonable solutions involve writing wrapper functions for the
+     `dlopen' family, which do package-specific tricks when dlopening
+     is unsupported or not available on a given platform.
+
+   * There are major differences in implementations of the `dlopen'
+     family of functions.  Some platforms do not even use the same
+     function names (notably HP-UX, with its `shl_load' family).
+
+   * The application developer must write a custom search function in
+     order to discover the correct module filename to supply to
+     `dlopen'.
+
+
+File: libtool.info,  Node: Using libltdl,  Next: Trace interface,  Prev: Dlopened modules,  Up: Top
+
+11 Using libltdl
+****************
+
+Libtool provides a small library, called `libltdl', that aims at hiding
+the various difficulties of dlopening libraries from programmers.  It
+consists of a few headers and small C source files that can be
+distributed with applications that need dlopening functionality.  On
+some platforms, whose dynamic linkers are too limited for a simple
+implementation of `libltdl' services, it requires GNU DLD, or it will
+only emulate dynamic linking with libtool's dlpreopening mechanism.
+
+libltdl supports currently the following dynamic linking mechanisms:
+
+   * `dlopen' (POSIX compliant systems, GNU/Linux, etc.)
+
+   * `shl_load' (HP-UX)
+
+   * `LoadLibrary' (Win16 and Win32)
+
+   * `load_add_on' (BeOS)
+
+   * `NSAddImage' or `NSLinkModule' (Darwin and Mac OS X)
+
+   * GNU DLD (emulates dynamic linking for static libraries)
+
+   * libtool's dlpreopen (see *note Dlpreopening::)
+
+libltdl is licensed under the terms of the GNU Lesser General Public
+License, with the following exception:
+
+     As a special exception to the GNU Lesser General Public License,
+     if you distribute this file as part of a program or library that
+     is built using GNU Libtool, you may include it under the same
+     distribution terms that you use for the rest of that program.
+
+* Menu:
+
+* Libltdl interface::           How to use libltdl in your programs.
+* Modules for libltdl::         Creating modules that can be `dlopen'ed.
+* Thread Safety in libltdl::    Registering callbacks for multi-thread safety.
+* User defined module data::    Associating data with loaded modules.
+* Module loaders for libltdl::  Creating user defined module loaders.
+* Distributing libltdl::        How to distribute libltdl with your package.
+
+
+File: libtool.info,  Node: Libltdl interface,  Next: Modules for libltdl,  Up: Using libltdl
+
+11.1 How to use libltdl in your programs
+========================================
+
+The libltdl API is similar to the POSIX dlopen interface, which is very
+simple but powerful.
+
+To use libltdl in your program you have to include the header file
+`ltdl.h':
+
+     #include <ltdl.h>
+
+The early releases of libltdl used some symbols that violated the POSIX
+namespace conventions.  These symbols are now deprecated, and have been
+replaced by those described here.  If you have code that relies on the
+old deprecated symbol names, defining `LT_NON_POSIX_NAMESPACE' before
+you include `ltdl.h' provides conversion macros.  Whichever set of
+symbols you use, the new API is not binary compatible with the last, so
+you will need to recompile your application in order to use this
+version of libltdl.
+
+Note that libltdl is not well tested in a multithreaded environment,
+though the intention is that it should work (*note Using libltdl in a
+multi threaded environment: Thread Safety in libltdl.).  It was
+reported that GNU/Linux's glibc 2.0's `dlopen' with `RTLD_LAZY' (which
+libltdl uses by default) is not thread-safe, but this problem is
+supposed to be fixed in glibc 2.1.  On the other hand, `RTLD_NOW' was
+reported to introduce problems in multi-threaded applications on
+FreeBSD.  Working around these problems is left as an exercise for the
+reader; contributions are certainly welcome.
+
+The following macros are defined by including `ltdl.h':
+
+ -- Macro: LT_PATHSEP_CHAR
+     `LT_PATHSEP_CHAR' is the system-dependent path separator, that is,
+     `;' on Windows and `:' everywhere else.
+
+ -- Macro: LT_DIRSEP_CHAR
+     If `LT_DIRSEP_CHAR' is defined, it can be used as directory
+     separator in addition to `/'.  On Windows, this contains `\'.
+
+The following types are defined in `ltdl.h':
+
+ -- Type: lt_dlhandle
+     `lt_dlhandle' is a module "handle".  Every lt_dlopened module has
+     a handle associated with it.
+
+ -- Type: lt_dladvise
+     `lt_dladvise' is used to control optional module loading modes.
+     If it is not used, the default mode of the underlying system module
+     loader is used.
+
+ -- Type: lt_dlsymlist
+     `lt_dlsymlist' is a symbol list for dlpreopened modules.  This
+     structure is described in *note Dlpreopening::.
+
+libltdl provides the following functions:
+
+ -- Function: int lt_dlinit (void)
+     Initialize libltdl.  This function must be called before using
+     libltdl and may be called several times.  Return 0 on success,
+     otherwise the number of errors.
+
+ -- Function: int lt_dlexit (void)
+     Shut down libltdl and close all modules.  This function will only
+     then shut down libltdl when it was called as many times as
+     `lt_dlinit' has been successfully called.  Return 0 on success,
+     otherwise the number of errors.
+
+ -- Function: lt_dlhandle lt_dlopen (const char *FILENAME)
+     Open the module with the file name FILENAME and return a handle
+     for it.  `lt_dlopen' is able to open libtool dynamic modules,
+     preloaded static modules, the program itself and native dynamic
+     modules(1).
+
+     Unresolved symbols in the module are resolved using its dependency
+     libraries and previously dlopened modules.  If the executable using
+     this module was linked with the `-export-dynamic' flag, then the
+     global symbols in the executable will also be used to resolve
+     references in the module.
+
+     If FILENAME is `NULL' and the program was linked with
+     `-export-dynamic' or `-dlopen self', `lt_dlopen' will return a
+     handle for the program itself, which can be used to access its
+     symbols.
+
+     If libltdl cannot find the library and the file name FILENAME does
+     not have a directory component it will additionally look in the
+     following search paths for the module (in the following order):
+
+       1. user-defined search path: This search path can be changed by
+          the program using the functions `lt_dlsetsearchpath',
+          `lt_dladdsearchdir' and `lt_dlinsertsearchdir'.
+
+       2. libltdl's search path: This search path is the value of the
+          environment variable `LTDL_LIBRARY_PATH'.
+
+       3. system library search path: The system dependent library
+          search path (e.g. on GNU/Linux it is `LD_LIBRARY_PATH').
+
+     Each search path must be a list of absolute directories separated
+     by `LT_PATHSEP_CHAR', for example, `"/usr/lib/mypkg:/lib/foo"'.
+     The directory names may not contain the path separator.
+
+     If the same module is loaded several times, the same handle is
+     returned.  If `lt_dlopen' fails for any reason, it returns `NULL'.
+
+ -- Function: lt_dlhandle lt_dlopenext (const char *FILENAME)
+     The same as `lt_dlopen', except that it tries to append different
+     file name extensions to the file name.  If the file with the file
+     name FILENAME cannot be found libltdl tries to append the
+     following extensions:
+
+       1. the libtool archive extension `.la'
+
+       2. the extension used for native dynamically loadable modules on
+          the host platform, e.g., `.so', `.sl', etc.
+
+     This lookup strategy was designed to allow programs that don't
+     have knowledge about native dynamic libraries naming conventions
+     to be able to `dlopen' such libraries as well as libtool modules
+     transparently.
+
+ -- Function: lt_dlhandle lt_dlopenadvise (const char *FILENAME,
+          lt_dladvise ADVISE)
+     The same as `lt_dlopen', except that it also requires an additional
+     argument which may contain additional hints to the underlying
+     system module loader.  The ADVISE parameter is opaque and can only
+     be accessed with the functions documented below.
+
+     Note that this function does not change the content of ADVISE, so
+     unlike the other calls in this API takes a direct `lt_dladvise'
+     type, and not a pointer to the same.
+
+ -- Function: int lt_dladvise_init (lt_dladvise *ADVISE)
+     The ADVISE parameter can be used to pass hints to the module
+     loader when using `lt_dlopenadvise' to perform the loading.  The
+     ADVISE parameter needs to be initialised by this function before
+     it can be used.  Any memory used by ADVISE needs to be recycled
+     with `lt_dladvise_destroy' when it is no longer needed.
+
+     On failure, `lt_dladvise_init' returns non-zero and sets an error
+     message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_destroy (lt_dladvise *ADVISE)
+     Recycle the memory used by ADVISE.  For an example, see the
+     documentation for `lt_dladvise_ext'.
+
+     On failure, `lt_dladvise_destroy' returns non-zero and sets an
+     error message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_ext (lt_dladvise *ADVISE)
+     Set the `ext' hint on ADVISE.  Passing an ADVISE parameter to
+     `lt_dlopenadvise' with this hint set causes it to try to append
+     different file name extensions like `lt_dlopenext'.
+
+     The following example is equivalent to calling `lt_dlopenext
+     (filename)':
+
+          lt_dlhandle
+          my_dlopenext (const char *filename)
+          {
+            lt_dlhandle handle = 0;
+            lt_dladvise advise;
+
+            if (!lt_dladvise_init (&advise) && !lt_dladvise_ext (&advise))
+              handle = lt_dlopenadvise (filename, advise);
+
+            lt_dladvise_destroy (&advise);
+
+            return handle;
+          }
+
+     On failure, `lt_dladvise_ext' returns non-zero and sets an error
+     message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_global (lt_dladvise *ADVISE)
+     Set the `symglobal' hint on ADVISE.  Passing an ADVISE parameter
+     to `lt_dlopenadvise' with this hint set causes it to try to make
+     the loaded module's symbols globally available for resolving
+     unresolved symbols in subsequently loaded modules.
+
+     If neither the `symglobal' nor the `symlocal' hints are set, or if
+     a module is loaded without using the `lt_dlopenadvise' call in any
+     case, then the visibility of the module's symbols will be as per
+     the default for the underlying module loader and OS.  Even if a
+     suitable hint is passed, not all loaders are able to act upon it in
+     which case `lt_dlgetinfo' will reveal whether the hint was actually
+     followed.
+
+     On failure, `lt_dladvise_global' returns non-zero and sets an error
+     message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_local (lt_dladvise *ADVISE)
+     Set the `symlocal' hint on ADVISE.  Passing an ADVISE parameter to
+     `lt_dlopenadvise' with this hint set causes it to try to keep the
+     loaded module's symbols hidden so that they are not visible to
+     subsequently loaded modules.
+
+     If neither the `symglobal' nor the `symlocal' hints are set, or if
+     a module is loaded without using the `lt_dlopenadvise' call in any
+     case, then the visibility of the module's symbols will be as per
+     the default for the underlying module loader and OS.  Even if a
+     suitable hint is passed, not all loaders are able to act upon it in
+     which case `lt_dlgetinfo' will reveal whether the hint was actually
+     followed.
+
+     On failure, `lt_dladvise_local' returns non-zero and sets an error
+     message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_resident (lt_dladvise *ADVISE)
+     Set the `resident' hint on ADVISE.  Passing an ADVISE parameter to
+     `lt_dlopenadvise' with this hint set causes it to try to make the
+     loaded module resident in memory, so that it cannot be unloaded
+     with a later call to `lt_dlclose'.
+
+     On failure, `lt_dladvise_resident' returns non-zero and sets an
+     error message that can be retrieved with `lt_dlerror'.
+
+ -- Function: int lt_dladvise_preload (lt_dladvise *ADVISE)
+     Set the `preload' hint on ADVISE.  Passing an ADVISE parameter to
+     `lt_dlopenadvise' with this hint set causes it to load only
+     preloaded modules, so that if a suitable preloaded module is not
+     found, `lt_dlopenadvise' will return `NULL'.
+
+ -- Function: int lt_dlclose (lt_dlhandle HANDLE)
+     Decrement the reference count on the module HANDLE.  If it drops
+     to zero and no other module depends on this module, then the
+     module is unloaded.  Return 0 on success.
+
+ -- Function: void * lt_dlsym (lt_dlhandle HANDLE, const char *NAME)
+     Return the address in the module HANDLE, where the symbol given by
+     the null-terminated string NAME is loaded.  If the symbol cannot
+     be found, `NULL' is returned.
+
+ -- Function: const char * lt_dlerror (void)
+     Return a human readable string describing the most recent error
+     that occurred from any of libltdl's functions.  Return `NULL' if
+     no errors have occurred since initialization or since it was last
+     called.
+
+ -- Function: int lt_dladdsearchdir (const char *SEARCH_DIR)
+     Append the search directory SEARCH_DIR to the current user-defined
+     library search path.  Return 0 on success.
+
+ -- Function: int lt_dlinsertsearchdir (const char *BEFORE,
+          const char *SEARCH_DIR)
+     Insert the search directory SEARCH_DIR into the user-defined
+     library search path, immediately before the element starting at
+     address BEFORE.  If BEFORE is `NULL', then SEARCH_DIR is appending
+     as if `lt_dladdsearchdir' had been called.  Return 0 on success.
+
+ -- Function: int lt_dlsetsearchpath (const char *SEARCH_PATH)
+     Replace the current user-defined library search path with
+     SEARCH_PATH, which must be a list of absolute directories separated
+     by `LT_PATHSEP_CHAR'.  Return 0 on success.
+
+ -- Function: const char * lt_dlgetsearchpath (void)
+     Return the current user-defined library search path.
+
+ -- Function: int lt_dlforeachfile (const char *SEARCH_PATH,
+          int (*FUNC) (const char *FILENAME, void * DATA), void * DATA)
+     In some applications you may not want to load individual modules
+     with known names, but rather find all of the modules in a set of
+     directories and load them all during initialisation.  With this
+     function you can have libltdl scan the `LT_PATHSEP_CHAR'-delimited
+     directory list in SEARCH_PATH for candidates, and pass them, along
+     with DATA to your own callback function, FUNC.  If SEARCH_PATH is
+     `NULL', then search all of the standard locations that `lt_dlopen'
+     would examine.  This function will continue to make calls to FUNC
+     for each file that it discovers in SEARCH_PATH until one of these
+     calls returns non-zero, or until the files are exhausted.
+     `lt_dlforeachfile' returns the value returned by the last call
+     made to FUNC.
+
+     For example you could define FUNC to build an ordered "argv"-like
+     vector of files using DATA to hold the address of the start of the
+     vector.
+
+ -- Function: int lt_dlmakeresident (lt_dlhandle HANDLE)
+     Mark a module so that it cannot be `lt_dlclose'd.  This can be
+     useful if a module implements some core functionality in your
+     project that would cause your code to crash if removed.  Return 0
+     on success.
+
+     If you use `lt_dlopen (NULL)' to get a HANDLE for the running
+     binary, that handle will always be marked as resident, and
+     consequently cannot be successfully `lt_dlclose'd.
+
+ -- Function: int lt_dlisresident (lt_dlhandle HANDLE)
+     Check whether a particular module has been marked as resident,
+     returning 1 if it has or 0 otherwise.  If there is an error while
+     executing this function, return -1 and set an error message for
+     retrieval with `lt_dlerror'.
+
+   ---------- Footnotes ----------
+
+   (1) Some platforms, notably Mac OS X, differentiate between a
+runtime library that cannot be opened by `lt_dlopen' and a dynamic
+module that can.  For maximum portability you should try to ensure that
+you only pass `lt_dlopen' objects that have been compiled with libtool's
+`-module' flag.
+
+
+File: libtool.info,  Node: Modules for libltdl,  Next: Thread Safety in libltdl,  Prev: Libltdl interface,  Up: Using libltdl
+
+11.2 Creating modules that can be `dlopen'ed
+============================================
+
+Libtool modules are created like normal libtool libraries with a few
+exceptions:
+
+   You have to link the module with libtool's `-module' switch, and you
+should link any program that is intended to dlopen the module with
+`-dlopen MODULENAME.LA' where possible, so that libtool can dlpreopen
+the module on platforms that do not support dlopening.  If the module
+depends on any other libraries, make sure you specify them either when
+you link the module or when you link programs that dlopen it.  If you
+want to disable versioning (*note Versioning::) for a specific module
+you should link it with the `-avoid-version' switch.  Note that libtool
+modules don't need to have a "lib" prefix.  However, Automake 1.4 or
+higher is required to build such modules.
+
+   Usually a set of modules provide the same interface, i.e. exports
+the same symbols, so that a program can dlopen them without having to
+know more about their internals: In order to avoid symbol conflicts all
+exported symbols must be prefixed with "modulename_LTX_" (MODULENAME is
+the name of the module).  Internal symbols must be named in such a way
+that they won't conflict with other modules, for example, by prefixing
+them with "_modulename_".  Although some platforms support having the
+same symbols defined more than once it is generally not portable and it
+makes it impossible to dlpreopen such modules.
+
+   libltdl will automatically cut the prefix off to get the real name of
+the symbol.  Additionally, it supports modules that do not use a prefix
+so that you can also dlopen non-libtool modules.
+
+   `foo1.c' gives an example of a portable libtool module.  Exported
+symbols are prefixed with "foo1_LTX_", internal symbols with "_foo1_".
+Aliases are defined at the beginning so that the code is more readable.
+
+     /* aliases for the exported symbols */
+     #define foo  foo1_LTX_foo
+     #define bar  foo1_LTX_bar
+
+     /* a global variable definition */
+     int bar = 1;
+
+     /* a private function */
+     int _foo1_helper() {
+       return bar;
+     }
+
+     /* an exported function */
+     int foo() {
+       return _foo1_helper();
+     }
+
+The `Makefile.am' contains the necessary rules to build the module
+`foo1.la':
+
+     ...
+     lib_LTLIBRARIES = foo1.la
+
+     foo1_la_SOURCES = foo1.c
+     foo1_la_LDFLAGS = -module
+     ...
+
+
+File: libtool.info,  Node: Thread Safety in libltdl,  Next: User defined module data,  Prev: Modules for libltdl,  Up: Using libltdl
+
+11.3 Using libltdl in a multi threaded environment
+==================================================
+
+Libltdl provides a wrapper around whatever dynamic run-time object
+loading mechanisms are provided by the host system, many of which are
+themselves not thread safe.  Consequently libltdl cannot itself be
+consistently thread safe.
+
+   If you wish to use libltdl in a multithreaded environment, then you
+must mutex lock around libltdl calls, since they may in turn be calling
+non-thread-safe system calls on some target hosts.
+
+   Some old releases of libtool provided a mutex locking API that was
+unusable with POSIX threads, so callers were forced to lock around all
+libltdl API calls anyway.  That mutex locking API was next to useless,
+and is not present in current releases.
+
+   Some future release of libtool may provide a new POSIX thread
+compliant mutex locking API.
+
+
+File: libtool.info,  Node: User defined module data,  Next: Module loaders for libltdl,  Prev: Thread Safety in libltdl,  Up: Using libltdl
+
+11.4 Data associated with loaded modules
+========================================
+
+Some of the internal information about each loaded module that is
+maintained by libltdl is available to the user, in the form of this
+structure:
+
+ -- Type: struct lt_dlinfo { char *FILENAME; char *NAME; int REF_COUNT;
+          int IS_RESIDENT; int IS_SYMGLOBAL; int IS_SYMLOCAL;}
+     `lt_dlinfo' is used to store information about a module.  The
+     FILENAME attribute is a null-terminated character string of the
+     real module file name.  If the module is a libtool module then
+     NAME is its module name (e.g. `"libfoo"' for `"dir/libfoo.la"'),
+     otherwise it is set to `NULL'.  The REF_COUNT attribute is a
+     reference counter that describes how often the same module is
+     currently loaded. The remaining fields can be compared to any
+     hints that were passed to `lt_dlopenadvise' to determine whether
+     the underlying loader was able to follow them.
+
+   The following function will return a pointer to libltdl's internal
+copy of this structure for the given HANDLE:
+
+ -- Function: const lt_dlinfo * lt_dlgetinfo (lt_dlhandle HANDLE)
+     Return a pointer to a struct that contains some information about
+     the module HANDLE.  The contents of the struct must not be
+     modified.  Return `NULL' on failure.
+
+   Furthermore, in order to save you from having to keep a list of the
+handles of all the modules you have loaded, these functions allow you to
+iterate over libltdl's list of loaded modules:
+
+ -- Type: lt_dlinterface_id
+     The opaque type used to hold the module interface details for each
+     registered libltdl client.
+
+ -- Type: int lt_dlhandle_interface (lt_dlhandle HANDLE,
+          const char *ID_STRING)
+     Functions of this type are called to check that a handle conforms
+     to a library's expected module interface when iterating over the
+     global handle list.  You should be careful to write a callback
+     function of this type that can correctly identify modules that
+     belong to this client, both to prevent other clients from
+     accidentally finding your loaded modules with the iterator
+     functions below, and vice versa.  The best way to do this is to
+     check that module HANDLE conforms to the interface specification
+     of your loader using `lt_dlsym'.
+
+     The callback may be given *every* module loaded by all the libltdl
+     module clients in the current address space, including any modules
+     loaded by other libraries such as libltdl itself, and should
+     return non-zero if that module does not fulfill the interface
+     requirements of your loader.
+
+          int
+          my_interface_cb (lt_dlhandle handle, const char *id_string)
+          {
+            char *(*module_id) (void) = NULL;
+
+            /* A valid my_module must provide all of these symbols.  */
+            if (!((module_id = (char*(*)(void)) lt_dlsym ("module_version"))
+                  && lt_dlsym ("my_module_entrypoint")))
+                return 1;
+
+            if (strcmp (id_string, module_id()) != 0)
+                return 1;
+
+            return 0;
+          }
+
+ -- Function: lt_dlinterface_id lt_dlinterface_register
+          (const char *ID_STRING, lt_dlhandle_interface *IFACE)
+     Use this function to register your interface validator with
+     libltdl, and in return obtain a unique key to store and retrieve
+     per-module data.  You supply an ID_STRING and IFACE so that the
+     resulting `lt_dlinterface_id' can be used to filter the module
+     handles returned by the iteration functions below.  If IFACE is
+     `NULL', all modules will be matched.
+
+ -- Function: void lt_dlinterface_free (lt_dlinterface_id IFACE)
+     Release the data associated with IFACE.
+
+ -- Function: int lt_dlhandle_map (lt_dlinterface_id IFACE,
+          int (*FUNC) (lt_dlhandle HANDLE, void * DATA), void * DATA)
+     For each module that matches IFACE, call the function FUNC.  When
+     writing the FUNC callback function, the argument HANDLE is the
+     handle of a loaded module, and DATA is the last argument passed to
+     `lt_dlhandle_map'. As soon as FUNC returns a non-zero value for
+     one of the handles, `lt_dlhandle_map' will stop calling FUNC and
+     immediately return that non-zero value.  Otherwise 0 is eventually
+     returned when FUNC has been successfully called for all matching
+     modules.
+
+ -- Function: lt_dlhandle lt_dlhandle_iterate
+          (lt_dlinterface_id  IFACE, lt_dlhandle PLACE)
+     Iterate over the module handles loaded by IFACE, returning the
+     first matching handle in the list if PLACE is `NULL', and the next
+     one on subsequent calls.  If PLACE is the last element in the list
+     of eligible modules, this function returns `NULL'.
+
+          lt_dlhandle handle = 0;
+          lt_dlinterface_id iface = my_interface_id;
+
+          while ((handle = lt_dlhandle_iterate (iface, handle)))
+            {
+              ...
+            }
+
+ -- Function: lt_dlhandle lt_dlhandle_fetch (lt_dlinterface_id IFACE,
+          const char *MODULE_NAME)
+     Search through the module handles loaded by IFACE for a module
+     named MODULE_NAME, returning its handle if found or else `NULL' if
+     no such named module has been loaded by IFACE.
+
+   However, you might still need to maintain your own list of loaded
+module handles (in parallel with the list maintained inside libltdl) if
+there were any other data that your application wanted to associate
+with each open module.  Instead, you can use the following API calls to
+do that for you.  You must first obtain a unique interface id from
+libltdl as described above, and subsequently always use it to retrieve
+the data you stored earlier.  This allows different libraries to each
+store their own data against loaded modules, without interfering with
+one another.
+
+ -- Function: void * lt_dlcaller_set_data (lt_dlinterface_id KEY,
+          lt_dlhandle HANDLE, void * DATA)
+     Set DATA as the set of data uniquely associated with KEY and
+     HANDLE for later retrieval.  This function returns the DATA
+     previously associated with KEY and HANDLE if any.  A result of 0,
+     may indicate that a diagnostic for the last error (if any) is
+     available from `lt_dlerror()'.
+
+     For example, to correctly remove some associated data:
+
+          void *stale = lt_dlcaller_set_data (key, handle, 0);
+          if (stale != NULL)
+            {
+              free (stale);
+            }
+          else
+            {
+              char *error_msg = lt_dlerror ();
+
+              if (error_msg != NULL)
+                {
+                  my_error_handler (error_msg);
+                  return STATUS_FAILED;
+                }
+            }
+
+ -- Function: void * lt_dlcaller_get_data (lt_dlinterface_id KEY,
+          lt_dlhandle HANDLE)
+     Return the address of the data associated with KEY and HANDLE, or
+     else `NULL' if there is none.
+
+   Old versions of libltdl also provided a simpler, but similar, API
+based around `lt_dlcaller_id'.  Unfortunately, it had no provision for
+detecting whether a module belonged to a particular interface as
+libltdl didn't support multiple loaders in the same address space at
+that time.  Those APIs are no longer supported as there would be no way
+to stop clients of the old APIs from seeing (and accidentally altering)
+modules loaded by other libraries.
+
+
+File: libtool.info,  Node: Module loaders for libltdl,  Next: Distributing libltdl,  Prev: User defined module data,  Up: Using libltdl
+
+11.5 How to create and register new module loaders
+==================================================
+
+Sometimes libltdl's many ways of gaining access to modules are not
+sufficient for the purposes of a project.  You can write your own
+loader, and register it with libltdl so that `lt_dlopen' will be able
+to use it.
+
+   Writing a loader involves writing at least three functions that can
+be called by `lt_dlopen', `lt_dlsym' and `lt_dlclose'.  Optionally, you
+can provide a finalisation function to perform any cleanup operations
+when `lt_dlexit' executes, and a symbol prefix string that will be
+prepended to any symbols passed to `lt_dlsym'.  These functions must
+match the function pointer types below, after which they can be
+allocated to an instance of `lt_user_dlloader' and registered.
+
+   Registering the loader requires that you choose a name for it, so
+that it can be recognised by `lt_dlloader_find' and removed with
+`lt_dlloader_remove'.  The name you choose must be unique, and not
+already in use by libltdl's builtin loaders:
+
+"dlopen"
+     The system dynamic library loader, if one exists.
+
+"dld"
+     The GNU dld loader, if `libdld' was installed when libltdl was
+     built.
+
+"dlpreload"
+     The loader for `lt_dlopen'ing of preloaded static modules.
+
+   The prefix "dl" is reserved for loaders supplied with future
+versions of libltdl, so you should not use that for your own loader
+names.
+
+The following types are defined in `ltdl.h':
+
+ -- Type: lt_module
+     `lt_module' is a dlloader dependent module.  The dynamic module
+     loader extensions communicate using these low level types.
+
+ -- Type: lt_dlloader
+     `lt_dlloader' is a handle for module loader types.
+
+ -- Type: lt_user_data
+     `lt_user_data' is used for specifying loader instance data.
+
+ -- Type: struct lt_user_dlloader {const char *SYM_PREFIX;
+          lt_module_open *MODULE_OPEN; lt_module_close *MODULE_CLOSE;
+          lt_find_sym *FIND_SYM; lt_dlloader_exit *DLLOADER_EXIT; }
+     If you want to define a new way to open dynamic modules, and have
+     the `lt_dlopen' API use it, you need to instantiate one of these
+     structures and pass it to `lt_dlloader_add'.  You can pass whatever
+     you like in the DLLOADER_DATA field, and it will be passed back as
+     the value of the first parameter to each of the functions
+     specified in the function pointer fields.
+
+ -- Type: lt_module lt_module_open (const char *FILENAME)
+     The type of the loader function for an `lt_dlloader' module
+     loader.  The value set in the dlloader_data field of the `struct
+     lt_user_dlloader' structure will be passed into this function in
+     the LOADER_DATA parameter.  Implementation of such a function
+     should attempt to load the named module, and return an `lt_module'
+     suitable for passing in to the associated `lt_module_close' and
+     `lt_sym_find' function pointers.  If the function fails it should
+     return `NULL', and set the error message with `lt_dlseterror'.
+
+ -- Type: int lt_module_close (lt_user_data LOADER_DATA,
+          lt_module MODULE)
+     The type of the unloader function for a user defined module loader.
+     Implementation of such a function should attempt to release any
+     resources tied up by the MODULE module, and then unload it from
+     memory.  If the function fails for some reason, set the error
+     message with `lt_dlseterror' and return non-zero.
+
+ -- Type: void * lt_find_sym (lt_module MODULE, const char *SYMBOL)
+     The type of the symbol lookup function for a user defined module
+     loader.  Implementation of such a function should return the
+     address of the named SYMBOL in the module MODULE, or else set the
+     error message with `lt_dlseterror' and return `NULL' if lookup
+     fails.
+
+ -- Type: int lt_dlloader_exit (lt_user_data LOADER_DATA)
+     The type of the finalisation function for a user defined module
+     loader.  Implementation of such a function should free any
+     resources associated with the loader, including any user specified
+     data in the `dlloader_data' field of the `lt_user_dlloader'.  If
+     non-`NULL', the function will be called by `lt_dlexit', and
+     `lt_dlloader_remove'.
+
+   For example:
+
+     int
+     register_myloader (void)
+     {
+       lt_user_dlloader dlloader;
+
+       /* User modules are responsible for their own initialisation. */
+       if (myloader_init () != 0)
+         return MYLOADER_INIT_ERROR;
+
+       dlloader.sym_prefix    = NULL;
+       dlloader.module_open   = myloader_open;
+       dlloader.module_close  = myloader_close;
+       dlloader.find_sym      = myloader_find_sym;
+       dlloader.dlloader_exit = myloader_exit;
+       dlloader.dlloader_data = (lt_user_data)myloader_function;
+
+       /* Add my loader as the default module loader. */
+       if (lt_dlloader_add (lt_dlloader_next (NULL), &dlloader,
+                            "myloader") != 0)
+         return ERROR;
+
+       return OK;
+     }
+
+   Note that if there is any initialisation required for the loader, it
+must be performed manually before the loader is registered - libltdl
+doesn't handle user loader initialisation.
+
+   Finalisation _is_ handled by libltdl however, and it is important to
+ensure the `dlloader_exit' callback releases any resources claimed
+during the initialisation phase.
+
+libltdl provides the following functions for writing your own module
+loaders:
+
+ -- Function: int lt_dlloader_add (lt_dlloader *PLACE,
+          lt_user_dlloader *DLLOADER, const char *LOADER_NAME)
+     Add a new module loader to the list of all loaders, either as the
+     last loader (if PLACE is `NULL'), else immediately before the
+     loader passed as PLACE.  LOADER_NAME will be returned by
+     `lt_dlloader_name' if it is subsequently passed a newly registered
+     loader.  These LOADER_NAMEs must be unique, or
+     `lt_dlloader_remove' and `lt_dlloader_find' cannot work.  Returns
+     0 for success.
+
+          /* Make myloader be the last one. */
+          if (lt_dlloader_add (NULL, myloader) != 0)
+            perror (lt_dlerror ());
+
+ -- Function: int lt_dlloader_remove (const char *LOADER_NAME)
+     Remove the loader identified by the unique name, LOADER_NAME.
+     Before this can succeed, all modules opened by the named loader
+     must have been closed.  Returns 0 for success, otherwise an error
+     message can be obtained from `lt_dlerror'.
+
+          /* Remove myloader. */
+          if (lt_dlloader_remove ("myloader") != 0)
+            perror (lt_dlerror ());
+
+ -- Function: lt_dlloader * lt_dlloader_next (lt_dlloader *PLACE)
+     Iterate over the module loaders, returning the first loader if
+     PLACE is `NULL', and the next one on subsequent calls.  The handle
+     is for use with `lt_dlloader_add'.
+
+          /* Make myloader be the first one. */
+          if (lt_dlloader_add (lt_dlloader_next (NULL), myloader) != 0)
+            return ERROR;
+
+ -- Function: lt_dlloader * lt_dlloader_find (const char *LOADER_NAME)
+     Return the first loader with a matching LOADER_NAME identifier, or
+     else `NULL', if the identifier is not found.
+
+     The identifiers that may be used by libltdl itself, if the host
+     architecture supports them are "dlopen"(1), "dld" and "dlpreload".
+
+          /* Add a user loader as the next module loader to be tried if
+             the standard dlopen loader were to fail when lt_dlopening. */
+          if (lt_dlloader_add (lt_dlloader_find ("dlopen"), myloader) != 0)
+            return ERROR;
+
+ -- Function: const char * lt_dlloader_name (lt_dlloader *PLACE)
+     Return the identifying name of PLACE, as obtained from
+     `lt_dlloader_next' or `lt_dlloader_find'.  If this function fails,
+     it will return `NULL' and set an error for retrieval with
+     `lt_dlerror'.
+
+ -- Function: lt_user_data * lt_dlloader_data (lt_dlloader *PLACE)
+     Return the address of the `dlloader_data' of PLACE, as obtained
+     from `lt_dlloader_next' or `lt_dlloader_find'.  If this function
+     fails, it will return `NULL' and set an error for retrieval with
+     `lt_dlerror'.
+
+11.5.1 Error handling within user module loaders
+------------------------------------------------
+
+ -- Function: int lt_dladderror (const char *DIAGNOSTIC)
+     This function allows you to integrate your own error messages into
+     `lt_dlerror'.  Pass in a suitable diagnostic message for return by
+     `lt_dlerror', and an error identifier for use with `lt_dlseterror'
+     is returned.
+
+     If the allocation of an identifier fails, this function returns -1.
+
+          int myerror = lt_dladderror ("Doh!");
+          if (myerror < 0)
+            perror (lt_dlerror ());
+
+ -- Function: int lt_dlseterror (int ERRORCODE)
+     When writing your own module loaders, you should use this function
+     to raise errors so that they are propagated through the
+     `lt_dlerror' interface.  All of the standard errors used by
+     libltdl are declared in `ltdl.h', or you can add more of your own
+     with `lt_dladderror'.  This function returns 0 on success.
+
+          if (lt_dlseterror (LTDL_ERROR_NO_MEMORY) != 0)
+            perror (lt_dlerror ());
+
+---------- Footnotes ----------
+
+   (1) This is used for the host dependent module loading API -
+`shl_load' and `LoadLibrary' for example
+
+
+File: libtool.info,  Node: Distributing libltdl,  Prev: Module loaders for libltdl,  Up: Using libltdl
+
+11.6 How to distribute libltdl with your package
+================================================
+
+Even though libltdl is installed together with libtool, you may wish to
+include libltdl in the distribution of your package, for the
+convenience of users of your package that don't have libtool or libltdl
+installed, or if you are using features of a very new version of
+libltdl that you don't expect your users to have yet.  In such cases,
+you must decide which flavor of libltdl you want to use: a convenience
+library or an installable libtool library.
+
+   The most simplistic way to add `libltdl' to your package is to copy
+all the `libltdl' source files to a subdirectory within your package
+and to build and link them along with the rest of your sources.  To
+help you do this, the m4 macros for Autoconf are available in
+`ltdl.m4'.  You must ensure that they are available in `aclocal.m4'
+before you run Autoconf(1).  Having made the macros available, you must
+add a call to the `LTDL_INIT' macro (after the call to `LT_INIT') to
+your package's `configure.ac' to perform the configure time checks
+required to build the library correctly.  Unfortunately, this method
+has problems if you then try to link the package binaries with an
+installed libltdl, or a library that depends on libltdl, because of the
+duplicate symbol definitions.  For example, ultimately linking against
+two different versions of libltdl, or against both a local convenience
+library and an installed libltdl is bad.  Ensuring that only one copy
+of the libltdl sources are linked into any program is left as an
+exercise for the reader.
+
+ -- Macro: LT_CONFIG_LTDL_DIR (DIRECTORY)
+     Declare DIRECTORY to be the location of the `libltdl' source
+     files, for `libtoolize --ltdl' to place them. *Note Invoking
+     libtoolize::, for more details.  Provided that you add an
+     appropriate `LT_CONFIG_LTDL_DIR' call in your `configure.ac'
+     before calling `libtoolize', the appropriate `libltdl' files will
+     be installed automatically.
+
+ -- Macro: LTDL_INIT (OPTIONS)
+ -- Macro: LT_WITH_LTDL
+ -- Macro: AC_WITH_LTDL
+     `AC_WITH_LTDL' and `LT_WITH_LTDL' are deprecated names for older
+     versions of this macro; `autoupdate' will update your
+     `configure.ac' file.
+
+     This macro adds the following options to the `configure' script:
+
+    `--with-ltdl-include INSTALLED-LTDL-HEADER-DIR'
+          The `LTDL_INIT' macro will look in the standard header file
+          locations to find the installed `libltdl' headers.  If
+          `LTDL_INIT' can't find them by itself, the person who builds
+          your package can use this option to tell `configure' where
+          the installed `libltdl' headers are.
+
+    `--with-ltdl-lib INSTALLED-LTDL-LIBRARY-DIR'
+          Similarly, the person building your package can use this
+          option to help `configure' find the installed `libltdl.la'.
+
+    `--with-included-ltdl'
+          If there is no installed `libltdl', or in any case if the
+          person building your package would rather use the `libltdl'
+          sources shipped with the package in the subdirectory named by
+          `LT_CONFIG_LTDL_DIR', they should pass this option to
+          `configure'.
+
+     If the `--with-included-ltdl' is not passed at configure time, and
+     an installed `libltdl' is not found(2), then `configure' will exit
+     immediately with an error that asks the user to either specify the
+     location of an installed `libltdl' using the `--with-ltdl-include'
+     and `--with-ltdl-lib' options, or to build with the `libltdl'
+     sources shipped with the package by passing `--with-included-ltdl'.
+
+     If an installed `libltdl' is found, then `LIBLTDL' is set to the
+     link flags needed to use it, and `LTDLINCL' to the preprocessor
+     flags needed to find the installed headers, and `LTDLDEPS' will be
+     empty.  Note, however, that no version checking is performed.  You
+     should manually check for the `libltdl' features you need in
+     `configure.ac':
+
+          LT_INIT([dlopen])
+          LTDL_INIT
+
+          # The lt_dladvise_init symbol was added with libtool-2.2
+          if test "x$with_included_ltdl" != "xyes"; then
+            save_CFLAGS="$CFLAGS"
+            save_LDFLAGS="$LDFLAGS"
+            CFLAGS="$CFLAGS $LTDLINCL"
+            LDFLAGS="$LDFLAGS $LIBLTDL"
+            AC_CHECK_LIB([ltdl], [lt_dladvise_init],
+                          [],
+                  [AC_MSG_ERROR([installed libltdl is too old])])
+            LDFLAGS="$save_LDFLAGS"
+            CFLAGS="$save_CFLAGS"
+          fi
+
+     OPTIONS may include no more than one of the following build modes
+     depending on how you want your project to build `libltdl':
+     `nonrecursive', `recursive', or `subproject'.  In order for
+     `libtoolize' to detect this option correctly, if you supply one of
+     these arguments, they must be given literally (i.e., macros or
+     shell variables that expand to the correct ltdl mode will not
+     work).
+
+    `nonrecursive'
+          This is how the Libtool project distribution builds the
+          `libltdl' we ship and install.  If you wish to use Automake
+          to build `libltdl' without invoking a recursive make to
+          descend into the `libltdl' subdirectory, then use this
+          option.  You will need to set your configuration up carefully
+          to make this work properly, and you will need releases of
+          Autoconf and Automake that support `subdir-objects' and
+          `LIBOBJDIR' properly.  In your `configure.ac', add:
+
+               AM_INIT_AUTOMAKE([subdir-objects])
+               AC_CONFIG_HEADERS([config.h])
+               LT_CONFIG_LTDL_DIR([libltdl])
+               LT_INIT([dlopen])
+               LTDL_INIT([nonrecursive])
+
+          You _have to_ use a config header, but it may have a name
+          different than `config.h'.
+
+          Also, add the following near the top of your `Makefile.am':
+
+               AM_CPPFLAGS =
+               AM_LDFLAGS =
+
+               BUILT_SOURCES =
+               EXTRA_DIST =
+               CLEANFILES =
+               MOSTLYCLEANFILES =
+
+               include_HEADERS =
+               noinst_LTLIBRARIES =
+               lib_LTLIBRARIES =
+               EXTRA_LTLIBRARIES =
+
+               include libltdl/Makefile.inc
+
+          Unless you build no other libraries from this `Makefile.am',
+          you will also need to change `lib_LTLIBRARIES' to assign with
+          `+=' so that the `libltdl' targets declared in `Makefile.inc'
+          are not overwritten.
+
+    `recursive'
+          This build mode still requires that you use Automake, but (in
+          contrast with `nonrecursive') uses the more usual device of
+          starting another `make' process in the `libltdl'
+          subdirectory.  To use this mode, you should add to your
+          `configure.ac':
+
+               AM_INIT_AUTOMAKE
+               AC_CONFIG_HEADERS([config.h])
+               LT_CONFIG_LTDL_DIR([libltdl])
+               LT_INIT([dlopen])
+               LTDL_INIT([recursive])
+               AC_CONFIG_FILES([libltdl/Makefile])
+
+          Again, you _have to_ use a config header, but it may have a
+          name different than `config.h' if you like.
+
+          Also, add this to your `Makefile.am':
+
+               SUBDIRS = libltdl
+
+    `subproject'
+          This mode is the default unless you explicitly add
+          `recursive' or `nonrecursive' to your `LTDL_INIT' options;
+          `subproject' is the only mode supported by previous releases
+          of libltdl.  Even if you do not use Autoconf in the parent
+          project, then, in `subproject' mode, still `libltdl' contains
+          all the necessary files to configure and build itself - you
+          just need to arrange for your build system to call
+          `libltdl/configure' with appropriate options, and then run
+          `make' in the `libltdl' subdirectory.
+
+          If you _are_ using Autoconf and Automake, then you will need
+          to add the following to your `configure.ac':
+
+               LT_CONFIG_LTDL_DIR([libltdl])
+               LTDL_INIT
+
+          and to `Makefile.am':
+
+               SUBDIRS = libltdl
+
+     Aside from setting the libltdl build mode, there are other keywords
+     that you can pass to `LTDL_INIT' to modify its behavior when
+     `--with-included-ltdl' has been given:
+
+    `convenience'
+          This is the default unless you explicitly add `installable' to
+          your `LTDL_INIT' options.
+
+          This keyword will cause options to be passed to the
+          `configure' script in the subdirectory named by
+          `LT_CONFIG_LTDL_DIR' in order to cause it to be built as a
+          convenience library.  If you're not using automake, you will
+          need to define `top_build_prefix', `top_builddir', and
+          `top_srcdir' in your makefile so that `LIBLTDL', `LTDLDEPS',
+          and `LTDLINCL' expand correctly.
+
+          One advantage of the convenience library is that it is not
+          installed, so the fact that you use `libltdl' will not be
+          apparent to the user, and it won't overwrite a pre-installed
+          version of `libltdl' the system might already have in the
+          installation directory.  On the other hand, if you want to
+          upgrade `libltdl' for any reason (e.g. a bugfix) you'll have
+          to recompile your package instead of just replacing the
+          shared installed version of `libltdl'.  However, if your
+          programs or libraries are linked with other libraries that
+          use such a pre-installed version of `libltdl', you may get
+          linker errors or run-time crashes.  Another problem is that
+          you cannot link the convenience library into more than one
+          libtool library, then link a single program with those
+          libraries, because you may get duplicate symbols.  In general
+          you can safely use the convenience library in programs that
+          don't depend on other libraries that might use `libltdl' too.
+
+    `installable'
+          This keyword will pass options to the `configure' script in
+          the subdirectory named by `LT_CONFIG_LTDL_DIR' in order to
+          cause it to be built as an installable library.  If you're not
+          using automake, you will need to define `top_build_prefix',
+          `top_builddir' and `top_srcdir' in your makefile so that
+          `LIBLTDL', `LTDLDEPS', and `LTDLINCL' are expanded properly.
+
+          Be aware that you could overwrite another `libltdl' already
+          installed to the same directory if you use this option.
+
+   Whatever method you use, `LTDL_INIT' will define the shell variable
+`LIBLTDL' to the link flag that you should use to link with `libltdl',
+the shell variable `LTDLDEPS' to the files that can be used as a
+dependency in `Makefile' rules, and the shell variable `LTDLINCL' to
+the preprocessor flag that you should use to compile programs that
+include `ltdl.h'. So, when you want to link a program with libltdl, be
+it a convenience, installed or installable library, just use
+`$(LTDLINCL)' for preprocessing and compilation, and `$(LIBLTDL)' for
+linking.
+
+   * If your package is built using an installed version of `libltdl',
+     `LIBLTDL' will be set to the compiler flags needed to link against
+     the installed library, `LTDLDEPS' will be empty, and `LTDLINCL'
+     will be set to the compiler flags needed to find the `libltdl'
+     header files.
+
+   * If your package is built using the convenience libltdl, `LIBLTDL'
+     and `LTDLDEPS' will be the pathname for the convenience version of
+     libltdl (starting with `${top_builddir}/' or
+     `${top_build_prefix}') and `LTDLINCL' will be `-I' followed by the
+     directory that contains `ltdl.h' (starting with `${top_srcdir}/').
+
+   * If an installable version of the included `libltdl' is being
+     built, its pathname starting with `${top_builddir}/' or
+     `${top_build_prefix}', will be stored in `LIBLTDL' and `LTDLDEPS',
+     and `LTDLINCL' will be set just like in the case of convenience
+     library.
+
+   You should probably also use the `dlopen' option to `LT_INIT' in
+your `configure.ac', otherwise libtool will assume no dlopening
+mechanism is supported, and revert to dlpreopening, which is probably
+not what you want.  Avoid using the `-static', `-static-libtool-libs',
+or `-all-static' switches when linking programs with libltdl.  This
+will not work on all platforms, because the dlopening functions may not
+be available for static linking.
+
+   The following example shows you how to embed an installable libltdl
+in your package.  In order to use the convenience variant, just replace
+the `LTDL_INIT' option `installable' with `convenience'.  We assume
+that libltdl was embedded using `libtoolize --ltdl'.
+
+   configure.ac:
+     ...
+     # Name the subdirectory that contains libltdl sources
+     LT_CONFIG_LTDL_DIR([libltdl])
+
+     # Configure libtool with dlopen support if possible
+     LT_INIT([dlopen])
+
+     # Enable building of the installable libltdl library
+     LTDL_INIT([installable])
+     ...
+
+   Makefile.am:
+     ...
+     SUBDIRS = libltdl
+
+     AM_CPPFLAGS = $(LTDLINCL)
+
+     myprog_LDFLAGS = -export-dynamic
+     myprog_LDADD = $(LIBLTDL) -dlopen self -dlopen foo1.la
+     myprog_DEPENDENCIES = $(LTDLDEPS) foo1.la
+     ...
+
+ -- Macro: LTDL_INSTALLABLE
+ -- Macro: AC_LIBLTDL_INSTALLABLE
+     These macros are deprecated, the `installable' option to
+     `LTDL_INIT' should be used instead.
+
+ -- Macro: LTDL_CONVENIENCE
+ -- Macro: AC_LIBLTDL_CONVENIENCE
+     These macros are deprecated, the `convenience' option to
+     `LTDL_INIT' should be used instead.
+
+   ---------- Footnotes ----------
+
+   (1) We used to recommend adding the contents of `ltdl.m4' to
+`acinclude.m4', but with `aclocal' from a modern Automake (1.8 or
+newer) and this release of libltdl that is not only unnecessary but
+makes it easy to forget to upgrade `acinclude.m4' if you move to a
+different release of libltdl.
+
+   (2) Even if libltdl is installed, `LTDL_INIT' may fail to detect it
+if libltdl depends on symbols provided by libraries other than the C
+library.
+
+
+File: libtool.info,  Node: Trace interface,  Next: FAQ,  Prev: Using libltdl,  Up: Top
+
+12 Libtool's trace interface
+****************************
+
+This section describes macros whose sole purpose is to be traced using
+Autoconf's `--trace' option (*note The Autoconf Manual:
+(autoconf)autoconf Invocation.) to query the Libtool configuration of a
+project.  These macros are called by Libtool internals and should never
+be called by user code; they should only be traced.
+
+ -- Macro: LT_SUPPORTED_TAG (TAG)
+     This macro is called once for each language enabled in the
+     package.  Its only argument, TAG, is the tag-name corresponding to
+     the language (*note Tags::).
+
+     You can therefore retrieve the list of all tags enabled in a
+     project using the following command:
+          autoconf --trace 'LT_SUPPORTED_TAG:$1'
+
+
+File: libtool.info,  Node: FAQ,  Next: Troubleshooting,  Prev: Trace interface,  Up: Top
+
+13 Frequently Asked Questions about libtool
+*******************************************
+
+This chapter covers some questions that often come up on the mailing
+lists.
+
+* Menu:
+
+* Stripped link flags::         Dropped flags when creating a library
+
+
+File: libtool.info,  Node: Stripped link flags,  Up: FAQ
+
+13.1 Why does libtool strip link flags when creating a library?
+===============================================================
+
+When creating a shared library, but not when compiling or creating a
+program, `libtool' drops some flags from the command line provided by
+the user.  This is done because flags unknown to `libtool' may
+interfere with library creation or require additional support from
+`libtool', and because omitting flags is usually the conservative
+choice for a successful build.
+
+   If you encounter flags that you think are useful to pass, as a
+work-around you can prepend flags with `-Wc,' or `-Xcompiler ' to allow
+them to be passed through to the compiler driver (*note Link mode::).
+Another possibility is to add flags already to the compiler command at
+`configure' run time:
+
+     ./configure CC='gcc -m64'
+
+   If you think `libtool' should let some flag through by default,
+here's how you can test such an inclusion: grab the Libtool development
+tree, edit the `ltmain.m4sh' file in the `libltdl/config' subdirectory
+to pass through the flag (search for `Flags to be passed through'),
+re-bootstrap and build with the flags in question added to `LDFLAGS',
+`CFLAGS', `CXXFLAGS', etc. on the `configure' command line as
+appropriate.  Run the testsuite as described in the `README' file and
+report results to the Libtool bug reporting address
+<bug-libtool@gnu.org>.
+
+
+File: libtool.info,  Node: Troubleshooting,  Next: Maintaining,  Prev: FAQ,  Up: Top
+
+14 Troubleshooting
+******************
+
+Libtool is under constant development, changing to remain up-to-date
+with modern operating systems.  If libtool doesn't work the way you
+think it should on your platform, you should read this chapter to help
+determine what the problem is, and how to resolve it.
+
+* Menu:
+
+* Libtool test suite::          Libtool's self-tests.
+* Reporting bugs::              How to report problems with libtool.
+
+
+File: libtool.info,  Node: Libtool test suite,  Next: Reporting bugs,  Up: Troubleshooting
+
+14.1 The libtool test suite
+===========================
+
+Libtool comes with two integrated sets of tests to check that your build
+is sane, that test its capabilities, and report obvious bugs in the
+libtool program.  These tests, too, are constantly evolving, based on
+past problems with libtool, and known deficiencies in other operating
+systems.
+
+   As described in the `README' file, you may run `make -k check' after
+you have built libtool (possibly before you install it) in order to
+make sure that it meets basic functional requirements.
+
+* Menu:
+
+* Test descriptions::           The contents of the old test suite.
+* When tests fail::             What to do when a test fails.
+
+
+File: libtool.info,  Node: Test descriptions,  Next: When tests fail,  Up: Libtool test suite
+
+14.1.1 Description of test suite
+--------------------------------
+
+Here is a list of the current programs in the old test suite, and what
+they test for:
+
+`cdemo-conf.test'
+`cdemo-make.test'
+`cdemo-exec.test'
+`cdemo-static.test'
+`cdemo-static-make.test'
+`cdemo-static-exec.test'
+`cdemo-shared.test'
+`cdemo-shared-make.test'
+`cdemo-shared-exec.test'
+`cdemo-undef.test'
+`cdemo-undef-make.test'
+`cdemo-undef-exec.test'
+     These programs check to see that the `tests/cdemo' subdirectory of
+     the libtool distribution can be configured and built correctly.
+
+     The `tests/cdemo' subdirectory contains a demonstration of libtool
+     convenience libraries, a mechanism that allows build-time static
+     libraries to be created, in a way that their components can be
+     later linked into programs or other libraries, even shared ones.
+
+     The tests matching `cdemo-*make.test' and `cdemo-*exec.test' are
+     executed three times, under three different libtool configurations:
+     `cdemo-conf.test' configures `cdemo/libtool' to build both static
+     and shared libraries (the default for platforms that support
+     both), `cdemo-static.test' builds only static libraries
+     (`--disable-shared'), and `cdemo-shared.test' builds only shared
+     libraries (`--disable-static').
+
+     The test `cdemo-undef.test' tests the generation of shared
+     libraries with undefined symbols on systems that allow this.
+
+`demo-conf.test'
+`demo-make.test'
+`demo-exec.test'
+`demo-inst.test'
+`demo-unst.test'
+`demo-static.test'
+`demo-static-make.test'
+`demo-static-exec.test'
+`demo-static-inst.test'
+`demo-static-unst.test'
+`demo-shared.test'
+`demo-shared-make.test'
+`demo-shared-exec.test'
+`demo-shared-inst.test'
+`demo-shared-unst.test'
+`demo-nofast.test'
+`demo-nofast-make.test'
+`demo-nofast-exec.test'
+`demo-nofast-inst.test'
+`demo-nofast-unst.test'
+`demo-pic.test'
+`demo-pic-make.test'
+`demo-pic-exec.test'
+`demo-nopic.test'
+`demo-nopic-make.test'
+`demo-nopic-exec.test'
+     These programs check to see that the `tests/demo' subdirectory of
+     the libtool distribution can be configured, built, installed, and
+     uninstalled correctly.
+
+     The `tests/demo' subdirectory contains a demonstration of a trivial
+     package that uses libtool.  The tests matching `demo-*make.test',
+     `demo-*exec.test', `demo-*inst.test' and `demo-*unst.test' are
+     executed four times, under four different libtool configurations:
+     `demo-conf.test' configures `demo/libtool' to build both static
+     and shared libraries, `demo-static.test' builds only static
+     libraries (`--disable-shared'), and `demo-shared.test' builds only
+     shared libraries (`--disable-static').  `demo-nofast.test'
+     configures `demo/libtool' to disable the fast-install mode
+     (`--enable-fast-install=no').  `demo-pic.test' configures
+     `demo/libtool' to prefer building PIC code (`--with-pic'),
+     `demo-nopic.test' to prefer non-PIC code (`--without-pic').
+
+`demo-deplibs.test'
+     Many systems cannot link static libraries into shared libraries.
+     libtool uses a `deplibs_check_method' to prevent such cases.  This
+     tests checks whether libtool's `deplibs_check_method' works
+     properly.
+
+`demo-hardcode.test'
+     On all systems with shared libraries, the location of the library
+     can be encoded in executables that are linked against it *note
+     Linking executables::.  This test checks the conditions under
+     which your system linker hardcodes the library location, and
+     guarantees that they correspond to libtool's own notion of how
+     your linker behaves.
+
+`demo-relink.test'
+`depdemo-relink.test'
+     These tests check whether variable `shlibpath_overrides_runpath' is
+     properly set.  If the test fails, it will indicate what the
+     variable should have been set to.
+
+`demo-noinst-link.test'
+     Checks whether libtool will not try to link with a previously
+     installed version of a library when it should be linking with a
+     just-built one.
+
+`depdemo-conf.test'
+`depdemo-make.test'
+`depdemo-exec.test'
+`depdemo-inst.test'
+`depdemo-unst.test'
+`depdemo-static.test'
+`depdemo-static-make.test'
+`depdemo-static-exec.test'
+`depdemo-static-inst.test'
+`depdemo-static-unst.test'
+`depdemo-shared.test'
+`depdemo-shared-make.test'
+`depdemo-shared-exec.test'
+`depdemo-shared-inst.test'
+`depdemo-shared-unst.test'
+`depdemo-nofast.test'
+`depdemo-nofast-make.test'
+`depdemo-nofast-exec.test'
+`depdemo-nofast-inst.test'
+`depdemo-nofast-unst.test'
+     These programs check to see that the `tests/depdemo' subdirectory
+     of the libtool distribution can be configured, built, installed,
+     and uninstalled correctly.
+
+     The `tests/depdemo' subdirectory contains a demonstration of
+     inter-library dependencies with libtool.  The test programs link
+     some interdependent libraries.
+
+     The tests matching `depdemo-*make.test', `depdemo-*exec.test',
+     `depdemo-*inst.test' and `depdemo-*unst.test' are executed four
+     times, under four different libtool configurations:
+     `depdemo-conf.test' configures `depdemo/libtool' to build both
+     static and shared libraries, `depdemo-static.test' builds only
+     static libraries (`--disable-shared'), and `depdemo-shared.test'
+     builds only shared libraries (`--disable-static').
+     `depdemo-nofast.test' configures `depdemo/libtool' to disable the
+     fast-install mode (`--enable-fast-install=no').
+
+`mdemo-conf.test'
+`mdemo-make.test'
+`mdemo-exec.test'
+`mdemo-inst.test'
+`mdemo-unst.test'
+`mdemo-static.test'
+`mdemo-static-make.test'
+`mdemo-static-exec.test'
+`mdemo-static-inst.test'
+`mdemo-static-unst.test'
+`mdemo-shared.test'
+`mdemo-shared-make.test'
+`mdemo-shared-exec.test'
+`mdemo-shared-inst.test'
+`mdemo-shared-unst.test'
+     These programs check to see that the `tests/mdemo' subdirectory of
+     the libtool distribution can be configured, built, installed, and
+     uninstalled correctly.
+
+     The `tests/mdemo' subdirectory contains a demonstration of a
+     package that uses libtool and the system independent dlopen wrapper
+     `libltdl' to load modules.  The library `libltdl' provides a
+     dlopen wrapper for various platforms (POSIX) including support for
+     dlpreopened modules (*note Dlpreopening::).
+
+     The tests matching `mdemo-*make.test', `mdemo-*exec.test',
+     `mdemo-*inst.test' and `mdemo-*unst.test' are executed three
+     times, under three different libtool configurations:
+     `mdemo-conf.test' configures `mdemo/libtool' to build both static
+     and shared libraries, `mdemo-static.test' builds only static
+     libraries (`--disable-shared'), and `mdemo-shared.test' builds
+     only shared libraries (`--disable-static').
+
+`mdemo-dryrun.test'
+     This test checks whether libtool's `--dry-run' mode works properly.
+
+`mdemo2-conf.test'
+`mdemo2-exec.test'
+`mdemo2-make.test'
+     These programs check to see that the `tests/mdemo2' subdirectory of
+     the libtool distribution can be configured, built, and executed
+     correctly.
+
+     The `tests/mdemo2' directory contains a demonstration of a package
+     that attempts to link with a library (from the `tests/mdemo'
+     directory) that itself does dlopening of libtool modules.
+
+`link.test'
+     This test guarantees that linking directly against a non-libtool
+     static library works properly.
+
+`link-2.test'
+     This test makes sure that files ending in `.lo' are never linked
+     directly into a program file.
+
+`nomode.test'
+     Check whether we can actually get help for libtool.
+
+`objectlist.test'
+     Check that a nonexistent objectlist file is properly detected.
+
+`pdemo-conf.test'
+`pdemo-make.test'
+`pdemo-exec.test'
+`pdemo-inst.test'
+     These programs check to see that the `tests/pdemo' subdirectory of
+     the libtool distribution can be configured, built, and executed
+     correctly.
+
+     The `pdemo-conf.test' lowers the `max_cmd_len' variable in the
+     generated libtool script to test the measures to evade command line
+     length limitations.
+
+`quote.test'
+     This program checks libtool's metacharacter quoting.
+
+`sh.test'
+     Checks for some nonportable or dubious or undesired shell
+     constructs in shell scripts.
+
+`suffix.test'
+     When other programming languages are used with libtool (*note
+     Other languages::), the source files may end in suffixes other
+     than `.c'.  This test validates that libtool can handle suffixes
+     for all the file types that it supports, and that it fails when
+     the suffix is invalid.
+
+`tagdemo-conf.test'
+`tagdemo-make.test'
+`tagdemo-exec.test'
+`tagdemo-static.test'
+`tagdemo-static-make.test'
+`tagdemo-static-exec.test'
+`tagdemo-shared.test'
+`tagdemo-shared-make.test'
+`tagdemo-shared-exec.test'
+`tagdemo-undef.test'
+`tagdemo-undef-make.test'
+`tagdemo-undef-exec.test'
+     These programs check to see that the `tests/tagdemo' subdirectory
+     of the libtool distribution can be configured, built, and executed
+     correctly.
+
+     The `tests/tagdemo' directory contains a demonstration of a package
+     that uses libtool's multi-language support through configuration
+     tags.  It generates a library from C++ sources, which is then
+     linked to a C++ program.
+
+`f77demo-conf.test'
+`f77demo-make.test'
+`f77demo-exec.test'
+`f77demo-static.test'
+`f77demo-static-make.test'
+`f77demo-static-exec.test'
+`f77demo-shared.test'
+`f77demo-shared-make.test'
+`f77demo-shared-exec.test'
+     These programs check to see that the `tests/f77demo' subdirectory
+     of the libtool distribution can be configured, built, and executed
+     correctly.
+
+     The `tests/f77demo' tests test Fortran 77 support in libtool by
+     creating libraries from Fortran 77 sources, and mixed Fortran and C
+     sources, and a Fortran 77 program to use the former library, and a
+     C program to use the latter library.
+
+`fcdemo-conf.test'
+`fcdemo-make.test'
+`fcdemo-exec.test'
+`fcdemo-static.test'
+`fcdemo-static-make.test'
+`fcdemo-static-exec.test'
+`fcdemo-shared.test'
+`fcdemo-shared-make.test'
+`fcdemo-shared-exec.test'
+     These programs check to see that the `tests/fcdemo' subdirectory
+     of the libtool distribution can be configured, built, and executed
+     correctly.
+
+     The `tests/fcdemo' is similar to the `tests/f77demo' directory,
+     except that Fortran 90 is used in combination with the `FC'
+     interface provided by Autoconf and Automake.
+
+
+   The new, Autotest-based test suite uses keywords to classify certain
+test groups:
+
+`CXX'
+`F77'
+`FC'
+`GCJ'
+     The test group exercises one of these `libtool' language tags.
+
+`autoconf'
+`automake'
+     These keywords denote that the respective external program is
+     needed by the test group.  The tests are typically skipped if the
+     program is not installed.  The `automake' keyword may also denote
+     use of the `aclocal' program.
+
+`interactive'
+     This test group may require user interaction on some systems.
+     Typically, this means closing a popup window about a DLL load
+     error on Windows.
+
+`libltdl'
+     Denote that the `libltdl' library is exercised by the test group.
+
+`libtool'
+`libtoolize'
+     Denote that the `libtool' or `libtoolize' scripts are exercised by
+     the test group, respectively.
+
+`recursive'
+     Denote that this test group may recursively re-invoke the test
+     suite itself, with changed settings and maybe a changed `libtool'
+     script.  You may use the `INNER_TESTSUITEFLAGS' variable to pass
+     additional settings to this recursive invocation.  Typically,
+     recursive invocations delimit the set of tests with another
+     keyword, for example by passing `-k libtool' right before the
+     expansion of the `INNER_TESTSUITEFLAGS' variable (without an
+     intervening space, so you get the chance for further delimitation).
+
+     Test groups with the keyword `recursive' should not be denoted with
+     keywords, in order to avoid infinite recursion.  As a consequence,
+     recursive test groups themselves should never require user
+     interaction, while the test groups they invoke may do so.
+
+   There is a convenience target `check-noninteractive' that runs all
+tests from both test suites that do not cause user interaction on
+Windows.  Conversely, the target `check-interactive' runs the
+complement of tests and might require closing popup windows about DLL
+load errors on Windows.
+
+
+File: libtool.info,  Node: When tests fail,  Prev: Test descriptions,  Up: Libtool test suite
+
+14.1.2 When tests fail
+----------------------
+
+When the tests in the old test suite are run via `make check', output
+is caught in per-test `tests/TEST-NAME.log' files and summarized in the
+`test-suite.log' file.  The exit status of each program tells the
+`Makefile' whether or not the test succeeded.
+
+   If a test fails, it means that there is either a programming error in
+libtool, or in the test program itself.
+
+   To investigate a particular test, you may run it directly, as you
+would a normal program.  When the test is invoked in this way, it
+produces output that may be useful in determining what the problem is.
+
+   The new, Autotest-based test suite produces as output a file
+`tests/testsuite.log' which contains information about failed tests.
+
+   You can pass options to the test suite through the `make' variable
+`TESTSUITEFLAGS' (*note The Autoconf Manual: (autoconf)testsuite
+Invocation.).
+
+
+File: libtool.info,  Node: Reporting bugs,  Prev: Libtool test suite,  Up: Troubleshooting
+
+14.2 Reporting bugs
+===================
+
+If you think you have discovered a bug in libtool, you should think
+twice: the libtool maintainer is notorious for passing the buck (or
+maybe that should be "passing the bug").  Libtool was invented to fix
+known deficiencies in shared library implementations, so, in a way, most
+of the bugs in libtool are actually bugs in other operating systems.
+However, the libtool maintainer would definitely be happy to add support
+for somebody else's buggy operating system.  [I wish there was a good
+way to do winking smiley-faces in Texinfo.]
+
+   Genuine bugs in libtool include problems with shell script
+portability, documentation errors, and failures in the test suite
+(*note Libtool test suite::).
+
+   First, check the documentation and help screens to make sure that the
+behaviour you think is a problem is not already mentioned as a feature.
+
+   Then, you should read the Emacs guide to reporting bugs (*note
+Reporting Bugs: (emacs)Bugs.).  Some of the details listed there are
+specific to Emacs, but the principle behind them is a general one.
+
+   Finally, send a bug report to the Libtool bug reporting address
+<bug-libtool@gnu.org> with any appropriate _facts_, such as test suite
+output (*note When tests fail::), all the details needed to reproduce
+the bug, and a brief description of why you think the behaviour is a
+bug.  Be sure to include the word "libtool" in the subject line, as
+well as the version number you are using (which can be found by typing
+`libtool --version').
+
+
+File: libtool.info,  Node: Maintaining,  Next: GNU Free Documentation License,  Prev: Troubleshooting,  Up: Top
+
+15 Maintenance notes for libtool
+********************************
+
+This chapter contains information that the libtool maintainer finds
+important.  It will be of no use to you unless you are considering
+porting libtool to new systems, or writing your own libtool.
+
+* Menu:
+
+* New ports::                   How to port libtool to new systems.
+* Tested platforms::            When libtool was last tested.
+* Platform quirks::             Information about different library systems.
+* libtool script contents::     Configuration information that libtool uses.
+* Cheap tricks::                Making libtool maintainership easier.
+
+
+File: libtool.info,  Node: New ports,  Next: Tested platforms,  Up: Maintaining
+
+15.1 Porting libtool to new systems
+===================================
+
+Before you embark on porting libtool to an unsupported system, it is
+worthwhile to send e-mail to the Libtool mailing list
+<libtool@gnu.org>, to make sure that you are not duplicating existing
+work.
+
+   If you find that any porting documentation is missing, please
+complain!  Complaints with patches and improvements to the
+documentation, or to libtool itself, are more than welcome.
+
+* Menu:
+
+* Information sources::         Where to find relevant documentation
+* Porting inter-library dependencies::  Implementation details explained
+
+
+File: libtool.info,  Node: Information sources,  Next: Porting inter-library dependencies,  Up: New ports
+
+15.1.1 Information sources
+--------------------------
+
+Once it is clear that a new port is necessary, you'll generally need the
+following information:
+
+canonical system name
+     You need the output of `config.guess' for this system, so that you
+     can make changes to the libtool configuration process without
+     affecting other systems.
+
+man pages for `ld' and `cc'
+     These generally describe what flags are used to generate PIC, to
+     create shared libraries, and to link against only static
+     libraries.  You may need to follow some cross references to find
+     the information that is required.
+
+man pages for `ld.so', `rtld', or equivalent
+     These are a valuable resource for understanding how shared
+     libraries are loaded on the system.
+
+man page for `ldconfig', or equivalent
+     This page usually describes how to install shared libraries.
+
+output from `ls -l /lib /usr/lib'
+     This shows the naming convention for shared libraries on the
+     system, including which names should be symbolic links.
+
+any additional documentation
+     Some systems have special documentation on how to build and install
+     shared libraries.
+
+   If you know how to program the Bourne shell, then you can complete
+the port yourself; otherwise, you'll have to find somebody with the
+relevant skills who will do the work.  People on the libtool mailing
+list are usually willing to volunteer to help you with new ports, so
+you can send the information to them.
+
+   To do the port yourself, you'll definitely need to modify the
+`libtool.m4' macros in order to make platform-specific changes to the
+configuration process.  You should search that file for the `PORTME'
+keyword, which will give you some hints on what you'll need to change.
+In general, all that is involved is modifying the appropriate
+configuration variables (*note libtool script contents::).
+
+   Your best bet is to find an already-supported system that is similar
+to yours, and make your changes based on that.  In some cases, however,
+your system will differ significantly from every other supported system,
+and it may be necessary to add new configuration variables, and modify
+the `ltmain.in' script accordingly.  Be sure to write to the mailing
+list before you make changes to `ltmain.in', since they may have advice
+on the most effective way of accomplishing what you want.
+
+
+File: libtool.info,  Node: Porting inter-library dependencies,  Prev: Information sources,  Up: New ports
+
+15.1.2 Porting inter-library dependencies support
+-------------------------------------------------
+
+Since version 1.2c, libtool has re-introduced the ability to do
+inter-library dependency on some platforms, thanks to a patch by Toshio
+Kuratomi <badger@prtr-13.ucsc.edu>.  Here's a shortened version of the
+message that contained his patch:
+
+   The basic architecture is this: in `libtool.m4', the person who
+writes libtool makes sure `$deplibs' is included in `$archive_cmds'
+somewhere and also sets the variable `$deplibs_check_method', and maybe
+`$file_magic_cmd' when `deplibs_check_method' is file_magic.
+
+   `deplibs_check_method' can be one of five things:
+`file_magic [REGEX]'
+     looks in the library link path for libraries that have the right
+     libname.  Then it runs `$file_magic_cmd' on the library and checks
+     for a match against the extended regular expression REGEX.  When
+     `file_magic_test_file' is set by `libtool.m4', it is used as an
+     argument to `$file_magic_cmd' in order to verify whether the
+     regular expression matches its output, and warn the user otherwise.
+
+`test_compile'
+     just checks whether it is possible to link a program out of a list
+     of libraries, and checks which of those are listed in the output of
+     `ldd'.  It is currently unused, and will probably be dropped in the
+     future.
+
+`pass_all'
+     will pass everything without any checking.  This may work on
+     platforms in which code is position-independent by default and
+     inter-library dependencies are properly supported by the dynamic
+     linker, for example, on DEC OSF/1 3 and 4.
+
+`none'
+     It causes deplibs to be reassigned `deplibs=""'.  That way
+     `archive_cmds' can contain deplibs on all platforms, but not have
+     deplibs used unless needed.
+
+`unknown'
+     is the default for all systems unless overridden in `libtool.m4'.
+     It is the same as `none', but it documents that we really don't
+     know what the correct value should be, and we welcome patches that
+     improve it.
+
+   Then in `ltmain.in' we have the real workhorse: a little
+initialization and postprocessing (to setup/release variables for use
+with eval echo libname_spec etc.) and a case statement that decides the
+method that is being used.  This is the real code... I wish I could
+condense it a little more, but I don't think I can without function
+calls.  I've mostly optimized it (moved things out of loops, etc.) but
+there is probably some fat left.  I thought I should stop while I was
+ahead, work on whatever bugs you discover, etc. before thinking about
+more than obvious optimizations.
+
+
+File: libtool.info,  Node: Tested platforms,  Next: Platform quirks,  Prev: New ports,  Up: Maintaining
+
+15.2 Tested platforms
+=====================
+
+This table describes when libtool was last known to be tested on
+platforms where it claims to support shared libraries:
+
+     -------------------------------------------------------
+     canonical host name          compiler  libtool results
+       (tools versions)                     release
+     -------------------------------------------------------
+     alpha-dec-osf5.1		cc	 1.3e	  ok (1.910)
+     alpha-dec-osf4.0f               gcc      1.3e     ok (1.910)
+     alpha-dec-osf4.0f               cc       1.3e     ok (1.910)
+     alpha-dec-osf3.2                gcc      0.8      ok
+     alpha-dec-osf3.2                cc       0.8      ok
+     alpha-dec-osf2.1                gcc      1.2f     NS
+     alpha*-unknown-linux-gnu        gcc      1.3b     ok
+       (egcs-1.1.2, GNU ld 2.9.1.0.23)
+     hppa2.0w-hp-hpux11.00           cc       1.2f     ok
+     hppa2.0-hp-hpux10.20            cc       1.3.2    ok
+     hppa1.1-hp-hpux10.20            gcc      1.2f     ok
+     hppa1.1-hp-hpux10.20            cc       1.3c     ok (1.821)
+     hppa1.1-hp-hpux10.10            gcc      1.2f     ok
+     hppa1.1-hp-hpux10.10            cc       1.2f     ok
+     hppa1.1-hp-hpux9.07             gcc      1.2f     ok
+     hppa1.1-hp-hpux9.07             cc       1.2f     ok
+     hppa1.1-hp-hpux9.05             gcc      1.2f     ok
+     hppa1.1-hp-hpux9.05             cc       1.2f     ok
+     hppa1.1-hp-hpux9.01             gcc      1.2f     ok
+     hppa1.1-hp-hpux9.01             cc       1.2f     ok
+     i*86-*-beos                     gcc      1.2f     ok
+     i*86-*-bsdi4.0.1                gcc      1.3c     ok
+       (gcc-2.7.2.1)
+     i*86-*-bsdi4.0                  gcc      1.2f     ok
+     i*86-*-bsdi3.1                  gcc      1.2e     NS
+     i*86-*-bsdi3.0                  gcc      1.2e     NS
+     i*86-*-bsdi2.1                  gcc      1.2e     NS
+     i*86-pc-cygwin                  gcc      1.3b     NS
+       (egcs-1.1 stock b20.1 compiler)
+     i*86-*-dguxR4.20MU01            gcc      1.2      ok
+     i*86-*-freebsd4.3		gcc      1.3e     ok (1.912)
+     i*86-*-freebsdelf4.0            gcc      1.3c     ok
+       (egcs-1.1.2)
+     i*86-*-freebsdelf3.2            gcc      1.3c     ok
+       (gcc-2.7.2.1)
+     i*86-*-freebsdelf3.1            gcc      1.3c     ok
+       (gcc-2.7.2.1)
+     i*86-*-freebsdelf3.0            gcc      1.3c     ok
+     i*86-*-freebsd3.0               gcc      1.2e     ok
+     i*86-*-freebsd2.2.8             gcc      1.3c     ok
+       (gcc-2.7.2.1)
+     i*86-*-freebsd2.2.6             gcc      1.3b     ok
+       (egcs-1.1 & gcc-2.7.2.1, native ld)
+     i*86-*-freebsd2.1.5             gcc      0.5      ok
+     i*86-*-netbsd1.5                gcc      1.3e     ok (1.901)
+       (egcs-1.1.2)
+     i*86-*-netbsd1.4                gcc      1.3c     ok
+       (egcs-1.1.1)
+     i*86-*-netbsd1.4.3A             gcc      1.3e     ok (1.901)
+     i*86-*-netbsd1.3.3              gcc      1.3c     ok
+       (gcc-2.7.2.2+myc2)
+     i*86-*-netbsd1.3.2              gcc      1.2e     ok
+     i*86-*-netbsd1.3I               gcc      1.2e     ok
+       (egcs 1.1?)
+     i*86-*-netbsd1.2                gcc      0.9g     ok
+     i*86-*-linux-gnu		gcc	 1.3e	  ok (1.901)
+       (Red Hat 7.0, gcc "2.96")
+     i*86-*-linux-gnu		gcc	 1.3e	  ok (1.911)
+       (SuSE 7.0, gcc 2.95.2)
+     i*86-*-linux-gnulibc1           gcc      1.2f     ok
+     i*86-*-openbsd2.5               gcc      1.3c     ok
+       (gcc-2.8.1)
+     i*86-*-openbsd2.4               gcc      1.3c     ok
+       (gcc-2.8.1)
+     i*86-*-solaris2.7               gcc      1.3b     ok
+       (egcs-1.1.2, native ld)
+     i*86-*-solaris2.6               gcc      1.2f     ok
+     i*86-*-solaris2.5.1             gcc      1.2f     ok
+     i*86-ncr-sysv4.3.03             gcc      1.2f     ok
+     i*86-ncr-sysv4.3.03             cc       1.2e     ok
+       (cc -Hnocopyr)
+     i*86-pc-sco3.2v5.0.5		cc	 1.3c	  ok
+     i*86-pc-sco3.2v5.0.5		gcc	 1.3c	  ok
+       (gcc 95q4c)
+     i*86-pc-sco3.2v5.0.5		gcc	 1.3c	  ok
+       (egcs-1.1.2)
+     i*86-sco-sysv5uw7.1.1		gcc	 1.3e	  ok (1.901)
+       (gcc-2.95.2, SCO linker)
+     i*86-UnixWare7.1.0-sysv5	cc	 1.3c	  ok
+     i*86-UnixWare7.1.0-sysv5	gcc	 1.3c	  ok
+       (egcs-1.1.1)
+     m68k-next-nextstep3             gcc      1.2f     NS
+     m68k-sun-sunos4.1.1             gcc      1.2f     NS
+       (gcc-2.5.7)
+     m88k-dg-dguxR4.12TMU01          gcc      1.2      ok
+     m88k-motorola-sysv4             gcc      1.3      ok
+       (egcs-1.1.2)
+     mips-sgi-irix6.5                gcc      1.2f     ok
+       (gcc-2.8.1)
+     mips-sgi-irix6.4                gcc      1.2f     ok
+     mips-sgi-irix6.3                gcc      1.3b     ok
+       (egcs-1.1.2, native ld)
+     mips-sgi-irix6.3                cc       1.3b     ok
+       (cc 7.0)
+     mips-sgi-irix6.2                gcc      1.2f     ok
+     mips-sgi-irix6.2                cc       0.9      ok
+     mips-sgi-irix5.3                gcc      1.2f     ok
+       (egcs-1.1.1)
+     mips-sgi-irix5.3                gcc      1.2f     NS
+       (gcc-2.6.3)
+     mips-sgi-irix5.3                cc       0.8      ok
+     mips-sgi-irix5.2                gcc      1.3b     ok
+       (egcs-1.1.2, native ld)
+     mips-sgi-irix5.2                cc       1.3b     ok
+       (cc 3.18)
+     mips-sni-sysv4			cc       1.3.5    ok
+       (Siemens C-compiler)
+     mips-sni-sysv4			gcc      1.3.5    ok
+       (gcc-2.7.2.3, GNU assembler 2.8.1, native ld)
+     mipsel-unknown-openbsd2.1       gcc      1.0      ok
+     powerpc-apple-darwin6.4         gcc      1.5      ok
+     (apple dev tools released 12/2002)
+     powerpc-ibm-aix4.3.1.0          gcc      1.2f     ok
+       (egcs-1.1.1)
+     powerpc-ibm-aix4.2.1.0          gcc      1.2f     ok
+       (egcs-1.1.1)
+     powerpc-ibm-aix4.1.5.0          gcc      1.2f     ok
+       (egcs-1.1.1)
+     powerpc-ibm-aix4.1.5.0          gcc      1.2f     NS
+       (gcc-2.8.1)
+     powerpc-ibm-aix4.1.4.0          gcc      1.0      ok
+     powerpc-ibm-aix4.1.4.0          xlc      1.0i     ok
+     rs6000-ibm-aix4.1.5.0           gcc      1.2f     ok
+       (gcc-2.7.2)
+     rs6000-ibm-aix4.1.4.0           gcc      1.2f     ok
+       (gcc-2.7.2)
+     rs6000-ibm-aix3.2.5             gcc      1.0i     ok
+     rs6000-ibm-aix3.2.5             xlc      1.0i     ok
+     sparc-sun-solaris2.8		gcc	 1.3e	  ok (1.913)
+       (gcc-2.95.3 & native ld)
+     sparc-sun-solaris2.7            gcc      1.3e     ok (1.913)
+       (gcc-2.95.3 & native ld)
+     sparc-sun-solaris2.6            gcc      1.3e     ok (1.913)
+       (gcc-2.95.3 & native ld)
+     sparc-sun-solaris2.5.1          gcc      1.3e     ok (1.911)
+     sparc-sun-solaris2.5            gcc      1.3b     ok
+       (egcs-1.1.2, GNU ld 2.9.1 & native ld)
+     sparc-sun-solaris2.5            cc       1.3b     ok
+       (SC 3.0.1)
+     sparc-sun-solaris2.4            gcc      1.0a     ok
+     sparc-sun-solaris2.4            cc       1.0a     ok
+     sparc-sun-solaris2.3            gcc      1.2f     ok
+     sparc-sun-sunos4.1.4            gcc      1.2f     ok
+     sparc-sun-sunos4.1.4            cc       1.0f     ok
+     sparc-sun-sunos4.1.3_U1         gcc      1.2f     ok
+     sparc-sun-sunos4.1.3C           gcc      1.2f     ok
+     sparc-sun-sunos4.1.3            gcc      1.3b     ok
+       (egcs-1.1.2, GNU ld 2.9.1 & native ld)
+     sparc-sun-sunos4.1.3            cc       1.3b     ok
+     sparc-unknown-bsdi4.0           gcc      1.2c     ok
+     sparc-unknown-linux-gnulibc1    gcc      1.2f     ok
+     sparc-unknown-linux-gnu         gcc      1.3b     ok
+       (egcs-1.1.2, GNU ld 2.9.1.0.23)
+     sparc64-unknown-linux-gnu       gcc      1.2f     ok
+
+     Notes:
+     - "ok" means "all tests passed".
+     - "NS" means "Not Shared", but OK for static libraries
+
+   Note: The vendor-distributed HP-UX `sed'(1) programs are horribly
+broken, and cannot handle libtool's requirements, so users may report
+unusual problems.  There is no workaround except to install a working
+`sed' (such as GNU `sed') on these systems.
+
+   Note: The vendor-distributed NCR MP-RAS `cc' programs emits
+copyright on standard error that confuse tests on size of
+`conftest.err'.  The workaround is to specify `CC' when run `configure'
+with `CC='cc -Hnocopyr''.
+
+
+File: libtool.info,  Node: Platform quirks,  Next: libtool script contents,  Prev: Tested platforms,  Up: Maintaining
+
+15.3 Platform quirks
+====================
+
+This section is dedicated to the sanity of the libtool maintainers.  It
+describes the programs that libtool uses, how they vary from system to
+system, and how to test for them.
+
+   Because libtool is a shell script, it can be difficult to understand
+just by reading it from top to bottom.  This section helps show why
+libtool does things a certain way.  Combined with the scripts
+themselves, you should have a better sense of how to improve libtool, or
+write your own.
+
+* Menu:
+
+* References::                  Finding more information.
+* Compilers::                   Creating object files from source files.
+* Reloadable objects::          Binding object files together.
+* Multiple dependencies::       Removing duplicate dependent libraries.
+* Archivers::                   Programs that create static archives.
+* Cross compiling::             Issues that arise when cross compiling.
+* File name conversion::        Converting file names between platforms.
+* Windows DLLs::                Windows header defines.
+
+
+File: libtool.info,  Node: References,  Next: Compilers,  Up: Platform quirks
+
+15.3.1 References
+-----------------
+
+The following is a list of valuable documentation references:
+
+   * SGI's IRIX Manual Pages can be found at
+     `http://techpubs.sgi.com/cgi-bin/infosrch.cgi?cmd=browse&db=man'.
+
+   * Sun's free service area
+     (`http://www.sun.com/service/online/free.html') and documentation
+     server (`http://docs.sun.com/').
+
+   * Compaq's Tru64 UNIX online documentation is at
+     (`http://tru64unix.compaq.com/faqs/publications/pub_page/doc_list.html')
+     with C++ documentation at
+     (`http://tru64unix.compaq.com/cplus/docs/index.htm').
+
+   * Hewlett-Packard has online documentation at
+     (`http://docs.hp.com/index.html').
+
+   * IBM has online documentation at
+     (`http://www.rs6000.ibm.com/resource/aix_resource/Pubs/').
+
+
+File: libtool.info,  Node: Compilers,  Next: Reloadable objects,  Prev: References,  Up: Platform quirks
+
+15.3.2 Compilers
+----------------
+
+The only compiler characteristics that affect libtool are the flags
+needed (if any) to generate PIC objects.  In general, if a C compiler
+supports certain PIC flags, then any derivative compilers support the
+same flags.  Until there are some noteworthy exceptions to this rule,
+this section will document only C compilers.
+
+   The following C compilers have standard command line options,
+regardless of the platform:
+
+`gcc'
+     This is the GNU C compiler, which is also the system compiler for
+     many free operating systems (FreeBSD, GNU/Hurd, GNU/Linux, Lites,
+     NetBSD, and OpenBSD, to name a few).
+
+     The `-fpic' or `-fPIC' flags can be used to generate
+     position-independent code.  `-fPIC' is guaranteed to generate
+     working code, but the code is slower on m68k, m88k, and Sparc
+     chips.  However, using `-fpic' on those chips imposes arbitrary
+     size limits on the shared libraries.
+
+   The rest of this subsection lists compilers by the operating system
+that they are bundled with:
+
+`aix3*'
+`aix4*'
+     Most AIX compilers have no PIC flags, since AIX (with the
+     exception of AIX for IA-64) runs on PowerPC and RS/6000 chips. (1)
+
+`hpux10*'
+     Use `+Z' to generate PIC.
+
+`osf3*'
+     Digital/UNIX 3.x does not have PIC flags, at least not on the
+     PowerPC platform.
+
+`solaris2*'
+     Use `-KPIC' to generate PIC.
+
+`sunos4*'
+     Use `-PIC' to generate PIC.
+
+   ---------- Footnotes ----------
+
+   (1) All code compiled for the PowerPC and RS/6000 chips
+(`powerpc-*-*', `powerpcle-*-*', and `rs6000-*-*') is
+position-independent, regardless of the operating system or compiler
+suite.  So, "regular objects" can be used to build shared libraries on
+these systems and no special PIC compiler flags are required.
+
+
+File: libtool.info,  Node: Reloadable objects,  Next: Multiple dependencies,  Prev: Compilers,  Up: Platform quirks
+
+15.3.3 Reloadable objects
+-------------------------
+
+On all known systems, a reloadable object can be created by running `ld
+-r -o OUTPUT.o INPUT1.o INPUT2.o'.  This reloadable object may be
+treated as exactly equivalent to other objects.
+
+
+File: libtool.info,  Node: Multiple dependencies,  Next: Archivers,  Prev: Reloadable objects,  Up: Platform quirks
+
+15.3.4 Multiple dependencies
+----------------------------
+
+On most modern platforms the order in which dependent libraries are
+listed has no effect on object generation.  In theory, there are
+platforms that require libraries that provide missing symbols to other
+libraries to be listed after those libraries whose symbols they provide.
+
+   Particularly, if a pair of static archives each resolve some of the
+other's symbols, it might be necessary to list one of those archives
+both before and after the other one.  Libtool does not currently cope
+with this situation well, since duplicate libraries are removed from
+the link line by default.  Libtool provides the command line option
+`--preserve-dup-deps' to preserve all duplicate dependencies in cases
+where it is necessary.
+
+
+File: libtool.info,  Node: Archivers,  Next: Cross compiling,  Prev: Multiple dependencies,  Up: Platform quirks
+
+15.3.5 Archivers
+----------------
+
+On all known systems, building a static library can be accomplished by
+running `ar cru libNAME.a OBJ1.o OBJ2.o ...', where the `.a' file is
+the output library, and each `.o' file is an object file.
+
+   On all known systems, if there is a program named `ranlib', then it
+must be used to "bless" the created library before linking against it,
+with the `ranlib libNAME.a' command.  Some systems, like Irix, use the
+`ar ts' command, instead.
+
+
+File: libtool.info,  Node: Cross compiling,  Next: File name conversion,  Prev: Archivers,  Up: Platform quirks
+
+15.3.6 Cross compiling
+----------------------
+
+Most build systems support the ability to compile libraries and
+applications on one platform for use on a different platform, provided
+a compiler capable of generating the appropriate output is available.
+In such cross compiling scenarios, the platform on which the libraries
+or applications are compiled is called the "build platform", while the
+platform on which the libraries or applications are intended to be used
+or executed is called the "host platform".  *note The GNU Build System:
+(automake)GNU Build System, of which libtool is a part, supports cross
+compiling via arguments passed to the configure script: `--build=...'
+and `--host=...'. However, when the build platform and host platform
+are very different, libtool is required to make certain accommodations
+to support these scenarios.
+
+   In most cases, because the build platform and host platform differ,
+the cross-compiled libraries and executables can't be executed or
+tested on the build platform where they were compiled.  The testsuites
+of most build systems will often skip any tests that involve executing
+such foreign executables when cross-compiling.  However, if the build
+platform and host platform are sufficiently similar, it is often
+possible to run cross-compiled applications.  Libtool's own testsuite
+often attempts to execute cross-compiled tests, but will mark any
+failures as _skipped_ since the failure might simply be due to the
+differences between the two platforms.
+
+   In addition to cases where the host platform and build platform are
+extremely similar (e.g. `i586-pc-linux-gnu' and `i686-pc-linux-gnu'),
+there is another case in which cross-compiled host applications may be
+executed on the build platform.  This is possible when the build
+platform supports an emulation or API-enhanced environment for the host
+platform.  One example of this situation would be if the build platform
+were MinGW, and the host platform were Cygwin (or vice versa).  Both of
+these platforms can actually operate within a single Windows instance,
+so Cygwin applications can be launched from a MinGW context, and vice
+versa--provided certain care is taken.  Another example would be if the
+build platform were GNU/Linux on an x86 32bit processor, and the host
+platform were MinGW.  In this situation, the Wine
+(http://www.winehq.org/) environment can be used to launch Windows
+applications from the GNU/Linux operating system; again, provided
+certain care is taken.
+
+   One particular issue occurs when a Windows platform such as MinGW,
+Cygwin, or MSYS is the host or build platform, while the other platform
+is a Unix-style system.  In these cases, there are often conflicts
+between the format of the file names and paths expected within host
+platform libraries and executables, and those employed on the build
+platform.
+
+   This situation is best described using a concrete example: suppose
+the build platform is GNU/Linux with canonical triplet
+`i686-pc-linux-gnu'.  Suppose further that the host platform is MinGW
+with canonical triplet `i586-pc-mingw32'.  On the GNU/Linux platform
+there is a cross compiler following the usual naming conventions of
+such compilers, where the compiler name is prefixed by the host
+canonical triplet (or suitable alias).  (For more information
+concerning canonical triplets and platform aliases, see *note
+Specifying Target Triplets: (autoconf)Specifying Target Triplets. and
+*note Canonicalizing: (autoconf)Canonicalizing.)  In this case, the C
+compiler is named `i586-pc-mingw32-gcc'.
+
+   As described in *note Wrapper executables::, for the MinGW host
+platform libtool uses a wrapper executable to set various environment
+variables before launching the actual program executable.  Like the
+program executable, the wrapper executable is cross-compiled for the
+host platform (that is, for MinGW).  As described above, ordinarily a
+host platform executable cannot be executed on the build platform, but
+in this case the Wine environment could be used to launch the MinGW
+application from GNU/Linux.  However, the wrapper executable, as a host
+platform (MinGW) application, must set the `PATH' variable so that the
+true application's dependent libraries can be located--but the contents
+of the `PATH' variable must be structured for MinGW.  Libtool must use
+the Wine file name mapping facilities to determine the correct value so
+that the wrapper executable can set the `PATH' variable to point to the
+correct location.
+
+   For example, suppose we are compiling an application in `/var/tmp' on
+GNU/Linux, using separate source code and build directories:
+
+     `/var/tmp/foo-1.2.3/app/'          (application source code)
+     `/var/tmp/foo-1.2.3/lib/'          (library source code)
+     `/var/tmp/BUILD/app/'              (application build objects here)
+     `/var/tmp/BUILD/lib/'              (library build objects here)
+
+   Since the library will be built in `/var/tmp/BUILD/lib', the wrapper
+executable (which will be in `/var/tmp/BUILD/app') must add that
+directory to `PATH' (actually, it must add the directory named OBJDIR
+under `/var/tmp/BUILD/lib', but we'll ignore that detail for now).
+However, Windows does not have a concept of Unix-style file or
+directory names such as `/var/tmp/BUILD/lib'.  Therefore, Wine provides
+a mapping from Windows file names such as `C:\Program Files' to specific
+Unix-style file names.  Wine also provides a utility that can be used
+to map Unix-style file names to Windows file names.
+
+   In this case, the wrapper executable should actually add the value
+
+     Z:\var\tmp\BUILD\lib
+
+to the `PATH'.  libtool contains support for path conversions of this
+type, for a certain limited set of build and host platform
+combinations. In this case, libtool will invoke Wine's `winepath'
+utility to ensure that the correct `PATH' value is used.  For more
+information, see *note File name conversion::.
+
+
+File: libtool.info,  Node: File name conversion,  Next: Windows DLLs,  Prev: Cross compiling,  Up: Platform quirks
+
+15.3.7 File name conversion
+---------------------------
+
+In certain situations, libtool must convert file names and paths between
+formats appropriate to different platforms.  Usually this occurs when
+cross-compiling, and affects only the ability to launch host platform
+executables on the build platform using an emulation or API-enhancement
+environment such as Wine.  Failure to convert paths (*note File Name
+Conversion Failure::) will cause a warning to be issued, but rarely
+causes the build to fail--and should have no affect on the compiled
+products, once installed properly on the host platform.  For more
+information, *note Cross compiling::.
+
+   However, file name conversion may also occur in another scenario:
+when using a Unix emulation system on Windows (such as Cygwin or MSYS),
+combined with a native Windows compiler such as MinGW or MSVC.  Only a
+limited set of such scenarios are currently supported; in other cases
+file name conversion is skipped.  The lack of file name conversion
+usually means that uninstalled executables can't be launched, but only
+rarely causes the build to fail (*note File Name Conversion Failure::).
+
+   libtool supports file name conversion in the following scenarios:
+
+build platform     host platform      Notes
+--------------------------------------------------------------------------- 
+MinGW (MSYS)       MinGW (Windows)    *note Native MinGW File Name
+                                      Conversion::
+Cygwin             MinGW (Windows)    *note Cygwin/Windows File Name
+                                      Conversion::
+Unix + Wine        MinGW (Windows)    Requires Wine. *note Unix/Windows
+                                      File Name Conversion::
+MinGW (MSYS)       Cygwin             Requires `LT_CYGPATH'. *note
+                                      LT_CYGPATH::. Provided for testing
+                                      purposes only.
+Unix + Wine        Cygwin             Requires both Wine and
+                                      `LT_CYGPATH', but does not yet work
+                                      with Cygwin 1.7.7 and Wine-1.2.
+                                      See *note Unix/Windows File Name
+                                      Conversion:: and *note LT_CYGPATH::.
+
+* Menu:
+
+* File Name Conversion Failure::  What happens when file name conversion fails
+* Native MinGW File Name Conversion::  MSYS file name conversion idiosyncrasies
+* Cygwin/Windows File Name Conversion::  Using `cygpath' to convert Cygwin file names
+* Unix/Windows File Name Conversion::  Using Wine to convert Unix paths
+* LT_CYGPATH::                  Invoking `cygpath' from other environments
+* Cygwin to MinGW Cross::       Other notes concerning MinGW cross
+
+
+File: libtool.info,  Node: File Name Conversion Failure,  Next: Native MinGW File Name Conversion,  Up: File name conversion
+
+15.3.7.1 File Name Conversion Failure
+.....................................
+
+In most cases, file name conversion is not needed or attempted.
+However, when libtool detects that a specific combination of build and
+host platform does require file name conversion, it is possible that
+the conversion may fail.  In these cases, you may see a warning such as
+the following:
+
+     Could not determine the host file name corresponding to
+       `... a file name ...'
+     Continuing, but uninstalled executables may not work.
+
+or
+
+     Could not determine the host path corresponding to
+       `... a path ...'
+     Continuing, but uninstalled executables may not work.
+
+This should not cause the build to fail.  At worst, it means that the
+wrapper executable will specify file names or paths appropriate for the
+build platform.  Since those are not appropriate for the host platform,
+the uninstalled executables would not operate correctly, even when the
+wrapper executable is launched via the appropriate emulation or
+API-enhancement (e.g. Wine).  Simply install the executables on the
+host platform, and execute them there.
+
+
+File: libtool.info,  Node: Native MinGW File Name Conversion,  Next: Cygwin/Windows File Name Conversion,  Prev: File Name Conversion Failure,  Up: File name conversion
+
+15.3.7.2 Native MinGW File Name Conversion
+..........................................
+
+MSYS is a Unix emulation environment for Windows, and is specifically
+designed such that in normal usage it _pretends_ to be MinGW or native
+Windows, but understands Unix-style file names and paths, and supports
+standard Unix tools and shells.  Thus, "native" MinGW builds are
+actually an odd sort of cross-compile, from an MSYS Unix emulation
+environment "pretending" to be MinGW, to actual native Windows.
+
+   When an MSYS shell launches a native Windows executable (as opposed
+to other _MSYS_ executables), it uses a system of heuristics to detect
+any command-line arguments that contain file names or paths.  It
+automatically converts these file names from the MSYS (Unix-like)
+format, to the corresponding Windows file name, before launching the
+executable.  However, this auto-conversion facility is only available
+when using the MSYS runtime library.  The wrapper executable itself is
+a MinGW application (that is, it does not use the MSYS runtime
+library).  The wrapper executable must set `PATH' to, and call
+`_spawnv' with, values that have already been converted from MSYS
+format to Windows.  Thus, when libtool writes the source code for the
+wrapper executable, it must manually convert MSYS paths to Windows
+format, so that the Windows values can be hard-coded into the wrapper
+executable.
+
+
+File: libtool.info,  Node: Cygwin/Windows File Name Conversion,  Next: Unix/Windows File Name Conversion,  Prev: Native MinGW File Name Conversion,  Up: File name conversion
+
+15.3.7.3 Cygwin/Windows File Name Conversion
+............................................
+
+Cygwin provides a Unix emulation environment for Windows.  As part of
+that emulation, it provides a file system mapping that presents the
+Windows file system in a Unix-compatible manner.  Cygwin also provides
+a utility `cygpath' that can be used to convert file names and paths
+between the two representations.  In a correctly configured Cygwin
+installation, `cygpath' is always present, and is in the `PATH'.
+
+   Libtool uses `cygpath' to convert from Cygwin (Unix-style) file names
+and paths to Windows format when the build platform is Cygwin and the
+host platform is MinGW.
+
+   When the host platform is Cygwin, but the build platform is MSYS or
+some Unix system, libtool also uses `cygpath' to convert from Windows
+to Cygwin format (after first converting from the build platform format
+to Windows format; see *note Native MinGW File Name Conversion:: and
+*note Unix/Windows File Name Conversion::).  Because the build platform
+is not Cygwin, `cygpath' is not (and should not be) in the `PATH'.
+Therefore, in this configuration the environment variable `LT_CYGPATH'
+is required. *Note LT_CYGPATH::.
+
+
+File: libtool.info,  Node: Unix/Windows File Name Conversion,  Next: LT_CYGPATH,  Prev: Cygwin/Windows File Name Conversion,  Up: File name conversion
+
+15.3.7.4 Unix/Windows File Name Conversion
+..........................................
+
+Wine (http://www.winehq.org/) provides an interpretation environment for
+some Unix platforms in which Windows applications can be executed.  It
+provides a mapping between the Unix file system and a virtual Windows
+file system used by the Windows programs.  For the file name conversion
+to work, Wine must be installed and properly configured on the build
+platform, and the `winepath' application must be in the build
+platform's `PATH'.  In addition, on 32bit GNU/Linux it is usually
+helpful if the binfmt extension is enabled.
+
+
+File: libtool.info,  Node: LT_CYGPATH,  Next: Cygwin to MinGW Cross,  Prev: Unix/Windows File Name Conversion,  Up: File name conversion
+
+15.3.7.5 LT_CYGPATH
+...................
+
+For some cross-compile configurations (where the host platform is
+Cygwin), the `cygpath' program is used to convert file names from the
+build platform notation to the Cygwin form (technically, this
+conversion is from Windows notation to Cygwin notation; the conversion
+from the build platform format to Windows notation is performed via
+other means).  However, because the `cygpath' program is not (and
+should not be) in the `PATH' on the build platform, `LT_CYGPATH' must
+specify the full build platform file name (that is, the full Unix or
+MSYS file name) of the `cygpath' program.
+
+   The reason `cygpath' should not be in the build platform `PATH' is
+twofold: first, `cygpath' is usually installed in the same directory as
+many other Cygwin executables, such as `sed', `cp', etc.  If the build
+platform environment had this directory in its `PATH', then these
+Cygwin versions of common Unix utilities might be used in preference to
+the ones provided by the build platform itself, with deleterious
+effects.  Second, especially when Cygwin-1.7 or later is used, multiple
+Cygwin installations can coexist within the same Windows instance.
+Each installation will have separate "mount tables" specified in
+`CYGROOT-N/etc/fstab'.  These "mount tables" control how that instance
+of Cygwin will map Windows file names and paths to Cygwin form.  Each
+installation's `cygpath' utility automatically deduces the appropriate
+`/etc/fstab' file.  Since each `CYGROOT-N/etc/fstab' mount table may
+specify different mappings, it matters which `cygpath' is used.
+
+   Note that `cygpath' is a Cygwin application; to execute this tool
+from Unix requires a working and properly configured Wine installation,
+as well as enabling the GNU/Linux `binfmt' extension.  Furthermore, the
+Cygwin `setup.exe' tool should have been used, via Wine, to properly
+install Cygwin into the Wine file system (and registry).
+
+   Unfortunately, Wine support for Cygwin is intermittent.  Recent
+releases of Cygwin (1.7 and above) appear to require more Windows API
+support than Wine provides (as of Wine version 1.2); most Cygwin
+applications fail to execute.  This includes `cygpath' itself.  Hence,
+it is best _not_ to use the LT_CYGPATH machinery in libtool when
+performing Unix to Cygwin cross-compiles.  Similarly, it is best _not_
+to enable the GNU/Linux binfmt support in this configuration, because
+while Wine will fail to execute the compiled Cygwin applications, it
+will still exit with status zero.  This tends to confuse build systems
+and test suites (including libtool's own testsuite, resulting in
+spurious reported failures).  Wine support for the older Cygwin-1.5
+series appears satisfactory, but the Cygwin team no longer supports
+Cygwin-1.5.  It is hoped that Wine will eventually be improved such that
+Cygwin-1.7 will again operate correctly under Wine.  Until then,
+libtool will report warnings as described in *note File Name Conversion
+Failure:: in these scenarios.
+
+   However, `LT_CYGPATH' is also used for the MSYS to Cygwin cross
+compile scenario, and operates as expected.
+
+
+File: libtool.info,  Node: Cygwin to MinGW Cross,  Prev: LT_CYGPATH,  Up: File name conversion
+
+15.3.7.6 Cygwin to MinGW Cross
+..............................
+
+There are actually three different scenarios that could all
+legitimately be called a "Cygwin to MinGW" cross compile.  The current
+(and standard) definition is when there is a compiler that produces
+native Windows libraries and applications, but which itself is a Cygwin
+application, just as would be expected in any other cross compile setup.
+
+   However, historically there were two other definitions, which we
+will refer to as the _fake_ one, and the _lying_ one.
+
+   In the _fake_ Cygwin to MinGW cross compile case, you actually use a
+native MinGW compiler, but you do so from within a Cygwin environment:
+
+     export PATH="/c/MinGW/bin:${PATH}"
+     configure --build=i686-pc-cygwin \
+     	--host=mingw32 \
+     	NM=/c/MinGW/bin/nm.exe
+
+   In this way, the build system "knows" that you are cross compiling,
+and the file name conversion logic will be used.  However, because the
+tools (`mingw32-gcc', `nm', `ar') used are actually native Windows
+applications, they will not understand any Cygwin (that is, Unix-like)
+absolute file names passed as command line arguments (and, unlike MSYS,
+Cygwin does not automatically convert such arguments).  However, so
+long as only relative file names are used in the build system, and
+non-Windows-supported Unix idioms such as symlinks and mount points are
+avoided, this scenario should work.
+
+   If you must use absolute file names, you will have to force Libtool
+to convert file names for the toolchain in this case, by doing the
+following before you run configure:
+
+     export lt_cv_to_tool_file_cmd=func_convert_file_cygwin_to_w32
+   
+   In the _lying_ Cygwin to MinGW cross compile case, you lie to the
+build system:
+
+     export PATH="/c/MinGW/bin:${PATH}"
+     configure --build=i686-pc-mingw32 \
+     	--host=i686-pc-mingw32 \
+     	--disable-dependency-tracking
+
+and claim that the build platform is MinGW, even though you are actually
+running under _Cygwin_ and not MinGW.  In this case, libtool does _not_
+know that you are performing a cross compile, and thinks instead that
+you are performing a native MinGW build.  However, as described in
+(*note Native MinGW File Name Conversion::), that scenario triggers an
+"MSYS to Windows" file name conversion.  This, of course, is the wrong
+conversion since we are actually running under Cygwin.  Also, the
+toolchain is expecting Windows file names (not Cygwin) but unless told
+so Libtool will feed Cygwin file names to the toolchain in this case.
+To force the correct file name conversions in this situation, you
+should do the following _before_ running configure:
+
+     export lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+     export lt_cv_to_tool_file_cmd=func_convert_file_cygwin_to_w32
+   
+   Note that this relies on internal implementation details of libtool,
+and is subject to change.  Also, `--disable-dependency-tracking' is
+required, because otherwise the MinGW GCC will generate dependency
+files that contain Windows file names.  This, in turn, will confuse the
+Cygwin `make' program, which does not accept Windows file names:
+
+     Makefile:1: *** target pattern contains no `%'.  Stop.
+
+   There have also always been a number of other details required for
+the _lying_ case to operate correctly, such as the use of so-called
+"identity mounts":
+
+     # CYGWIN-ROOT/etc/fstab
+     D:/foo    /foo     some_fs binary 0 0
+     D:/bar    /bar     some_fs binary 0 0
+     E:/grill  /grill   some_fs binary 0 0
+
+   In this way, top-level directories of each drive are available using
+identical names within Cygwin.
+
+   Note that you also need to ensure that the standard Unix directories
+(like `/bin', `/lib', `/usr', `/etc') appear in the root of a drive.
+This means that you must install Cygwin itself into the `C:/' root
+directory (or `D:/', or `E:/', etc)--instead of the recommended
+installation into `C:/cygwin/'.  In addition, all file names used in
+the build system must be relative, symlinks should not be used within
+the source or build directory trees, and all `-M*' options to `gcc'
+except `-MMD' must be avoided.
+
+   This is quite a fragile setup, but it has been in historical use,
+and so is documented here.
+
+
+File: libtool.info,  Node: Windows DLLs,  Prev: File name conversion,  Up: Platform quirks
+
+15.3.8 Windows DLLs
+-------------------
+
+This topic describes a couple of ways to portably create Windows Dynamic
+Link Libraries (DLLs).  Libtool knows how to create DLLs using GNU tools
+and using Microsoft tools.
+
+   A typical library has a "hidden" implementation with an interface
+described in a header file.  On just about every system, the interface
+could be something like this:
+
+   Example `foo.h':
+
+     #ifndef FOO_H
+     #define FOO_H
+
+     int one (void);
+     int two (void);
+     extern int three;
+
+     #endif /* FOO_H */
+
+And the implementation could be something like this:
+
+   Example `foo.c':
+
+     #include "foo.h"
+
+     int one (void)
+     {
+       return 1;
+     }
+
+     int two (void)
+     {
+       return three - one ();
+     }
+
+     int three = 3;
+
+   When using contemporary GNU tools to create the Windows DLL, the
+above code will work there too, thanks to its auto-import/auto-export
+features.  But that is not the case when using older GNU tools or
+perhaps more interestingly when using proprietary tools.  In those
+cases the code will need additional decorations on the interface
+symbols with `__declspec(dllimport)' and `__declspec(dllexport)'
+depending on whether the library is built or it's consumed and how it's
+built and consumed.  However, it should be noted that it would have
+worked also with Microsoft tools, if only the variable `three' hadn't
+been there, due to the fact the Microsoft tools will automatically
+import functions (but sadly not variables) and Libtool will
+automatically export non-static symbols as described next.
+
+   With Microsoft tools, Libtool digs through the object files that
+make up the library, looking for non-static symbols to automatically
+export.  I.e., Libtool with Microsoft tools tries to mimic the
+auto-export feature of contemporary GNU tools.  It should be noted that
+the GNU auto-export feature is turned off when an explicit
+`__declspec(dllexport)' is seen.  The GNU tools do this to not make
+more symbols visible for projects that have already taken the trouble
+to decorate symbols.  There is no similar way to limit which symbols
+are visible in the code when Libtool is using Microsoft tools.  In
+order to limit symbol visibility in that case you need to use one of
+the options `-export-symbols' or `-export-symbols-regex'.
+
+   No matching help with auto-import is provided by Libtool, which is
+why variables must be decorated to import them from a DLL for
+everything but contemporary GNU tools.  As stated above, functions are
+automatically imported by both contemporary GNU tools and Microsoft
+tools, but for other proprietary tools the auto-import status of
+functions is unknown.
+
+   When the objects that form the library are built, there are generally
+two copies built for each object.  One copy is used when linking the DLL
+and one copy is used for the static library.  On Windows systems, a pair
+of defines are commonly used to discriminate how the interface symbols
+should be decorated.  The first define is `-DDLL_EXPORT' which is
+automatically provided by Libtool when `libtool' builds the copy of the
+object that is destined for the DLL.  The second define is
+`-DLIBFOO_BUILD' (or similar) which is often added by the package
+providing the library and is used when building the library, but not
+when consuming the library.
+
+   However, the matching double compile is not performed when consuming
+libraries.  It is therefore not possible to reliably distinguish if the
+consumer is importing from a DLL or if it is going to use a static
+library.
+
+   With contemporary GNU tools, auto-import often saves the day, but see
+the GNU ld documentation and its `--enable-auto-import' option for some
+corner cases when it does not (*note `--enable-auto-import':
+(ld)Options.).
+
+   With Microsoft tools you typically get away with always compiling the
+code such that variables are expected to be imported from a DLL and
+functions are expected to be found in a static library.  The tools will
+then automatically import the function from a DLL if that is where they
+are found.  If the variables are not imported from a DLL as expected,
+but are found in a static library that is otherwise pulled in by some
+function, the linker will issue a warning (LNK4217) that a locally
+defined symbol is imported, but it still works.  In other words, this
+scheme will not work to only consume variables from a library.  There is
+also a price connected to this liberal use of imports in that an extra
+indirection is introduced when you are consuming the static version of
+the library.  That extra indirection is unavoidable when the DLL is
+consumed, but it is not needed when consuming the static library.
+
+   For older GNU tools and other proprietary tools there is no generic
+way to make it possible to consume either of the DLL or the static
+library without user intervention, the tools need to be told what is
+intended.  One common assumption is that if a DLL is being built
+(`DLL_EXPORT' is defined) then that DLL is going to consume any
+dependent libraries as DLLs.  If that assumption is made everywhere, it
+is possible to select how an end-user application is consuming
+libraries by adding a single flag `-DDLL_EXPORT' when a DLL build is
+required.  This is of course an all or nothing deal, either everything
+as DLLs or everything as static libraries.
+
+   To sum up the above, the header file of the foo library needs to be
+changed into something like this:
+
+   Modified `foo.h':
+
+     #ifndef FOO_H
+     #define FOO_H
+
+     #if defined _WIN32 && !defined __GNUC__
+     # ifdef LIBFOO_BUILD
+     #  ifdef DLL_EXPORT
+     #   define LIBFOO_SCOPE            __declspec (dllexport)
+     #   define LIBFOO_SCOPE_VAR extern __declspec (dllexport)
+     #  endif
+     # elif defined _MSC_VER
+     #  define LIBFOO_SCOPE
+     #  define LIBFOO_SCOPE_VAR  extern __declspec (dllimport)
+     # elif defined DLL_EXPORT
+     #  define LIBFOO_SCOPE             __declspec (dllimport)
+     #  define LIBFOO_SCOPE_VAR  extern __declspec (dllimport)
+     # endif
+     #endif
+     #ifndef LIBFOO_SCOPE
+     # define LIBFOO_SCOPE
+     # define LIBFOO_SCOPE_VAR extern
+     #endif
+
+     LIBFOO_SCOPE     int one (void);
+     LIBFOO_SCOPE     int two (void);
+     LIBFOO_SCOPE_VAR int three;
+
+     #endif /* FOO_H */
+
+   When the targets are limited to contemporary GNU tools and Microsoft
+tools, the above can be simplified to the following:
+
+   Simplified `foo.h':
+
+     #ifndef FOO_H
+     #define FOO_H
+
+     #if defined _WIN32 && !defined __GNUC__ && !defined LIBFOO_BUILD
+     # define LIBFOO_SCOPE_VAR extern __declspec (dllimport)
+     #else
+     # define LIBFOO_SCOPE_VAR extern
+     #endif
+
+     int one (void);
+     int two (void);
+     LIBFOO_SCOPE_VAR int three;
+
+     #endif /* FOO_H */
+
+   This last simplified version can of course only work when Libtool is
+used to build the DLL, as no symbols would be exported otherwise (i.e.,
+when using Microsoft tools).
+
+   It should be noted that there are various projects that attempt to
+relax these requirements by various low level tricks, but they are not
+discussed here.  Examples are FlexDLL
+(http://alain.frisch.fr/flexdll.html) and edll
+(http://edll.sourceforge.net/).
+
+
+File: libtool.info,  Node: libtool script contents,  Next: Cheap tricks,  Prev: Platform quirks,  Up: Maintaining
+
+15.4 `libtool' script contents
+==============================
+
+Since version 1.4, the `libtool' script is generated by `configure'
+(*note Configuring::).  In earlier versions, `configure' achieved this
+by calling a helper script called `ltconfig'.  From libtool version 0.7
+to 1.0, this script simply set shell variables, then sourced the
+libtool backend, `ltmain.sh'.  `ltconfig' from libtool version 1.1
+through 1.3 inlined the contents of `ltmain.sh' into the generated
+`libtool', which improved performance on many systems.  The tests that
+`ltconfig' used to perform are now kept in `libtool.m4' where they can
+be written using Autoconf.  This has the runtime performance benefits
+of inlined `ltmain.sh', _and_ improves the build time a little while
+considerably easing the amount of raw shell code that used to need
+maintaining.
+
+   The convention used for naming variables that hold shell commands for
+delayed evaluation, is to use the suffix `_cmd' where a single line of
+valid shell script is needed, and the suffix `_cmds' where multiple
+lines of shell script *may* be delayed for later evaluation.  By
+convention, `_cmds' variables delimit the evaluation units with the `~'
+character where necessary.
+
+   Here is a listing of each of the configuration variables, and how
+they are used within `ltmain.sh' (*note Configuring::):
+
+ -- Variable: AR
+     The name of the system library archiver.
+
+ -- Variable: CC
+     The name of the compiler used to configure libtool.  This will
+     always contain the compiler for the current language (*note
+     Tags::).
+
+ -- Variable: ECHO
+     An `echo' program that does not interpret backslashes as an escape
+     character.  It may be given only one argument, so due quoting is
+     necessary.
+
+ -- Variable: LD
+     The name of the linker that libtool should use internally for
+     reloadable linking and possibly shared libraries.
+
+ -- Variable: LTCC
+ -- Variable: LTCFLAGS
+     The name of the C compiler and C compiler flags used to configure
+     libtool.
+
+ -- Variable: NM
+     The name of a BSD- or MS-compatible program that produces listings
+     of global symbols.  For BSD `nm', the symbols should be in one the
+     following formats:
+
+          ADDRESS C GLOBAL-VARIABLE-NAME
+          ADDRESS D GLOBAL-VARIABLE-NAME
+          ADDRESS T GLOBAL-FUNCTION-NAME
+
+     For MS `dumpbin', the symbols should be in one of the following
+     formats:
+
+          COUNTER SIZE    UNDEF    notype       External     | GLOBAL-VAR
+          COUNTER ADDRESS SECTION  notype       External     | GLOBAL-VAR
+          COUNTER ADDRESS SECTION  notype ()    External     | GLOBAL-FUNC
+
+     The SIZE of the global variables are not zero and the SECTION of
+     the global functions are not "UNDEF". Symbols in "pick any"
+     sections ("pick any" appears in the section header) are not global
+     either.
+
+ -- Variable: RANLIB
+     Set to the name of the `ranlib' program, if any.
+
+ -- Variable: allow_undefined_flag
+     The flag that is used by `archive_cmds' in order to declare that
+     there will be unresolved symbols in the resulting shared library.
+     Empty, if no such flag is required.  Set to `unsupported' if there
+     is no way to generate a shared library with references to symbols
+     that aren't defined in that library.
+
+ -- Variable: always_export_symbols
+     Whether libtool should automatically generate a list of exported
+     symbols using `export_symbols_cmds' before linking an archive.
+     Set to `yes' or `no'.  Default is `no'.
+
+ -- Variable: archive_cmds
+ -- Variable: archive_expsym_cmds
+ -- Variable: old_archive_cmds
+     Commands used to create shared libraries, shared libraries with
+     `-export-symbols' and static libraries, respectively.
+
+ -- Variable: archiver_list_spec
+     Specify filename containing input files for `AR'.
+
+ -- Variable: old_archive_from_new_cmds
+     If the shared library depends on a static library,
+     `old_archive_from_new_cmds' contains the commands used to create
+     that static library.  If this variable is not empty,
+     `old_archive_cmds' is not used.
+
+ -- Variable: old_archive_from_expsyms_cmds
+     If a static library must be created from the export symbol list in
+     order to correctly link with a shared library,
+     `old_archive_from_expsyms_cmds' contains the commands needed to
+     create that static library.  When these commands are executed, the
+     variable `soname' contains the name of the shared library in
+     question, and the `$objdir/$newlib' contains the path of the
+     static library these commands should build.  After executing these
+     commands, libtool will proceed to link against `$objdir/$newlib'
+     instead of `soname'.
+
+ -- Variable: lock_old_archive_extraction
+     Set to `yes' if the extraction of a static library requires locking
+     the library file.  This is required on Darwin.
+
+ -- Variable: build
+ -- Variable: build_alias
+ -- Variable: build_os
+     Set to the specified and canonical names of the system that
+     libtool was built on.
+
+ -- Variable: build_libtool_libs
+     Whether libtool should build shared libraries on this system.  Set
+     to `yes' or `no'.
+
+ -- Variable: build_old_libs
+     Whether libtool should build static libraries on this system.  Set
+     to `yes' or `no'.
+
+ -- Variable: compiler_c_o
+     Whether the compiler supports the `-c' and `-o' options
+     simultaneously.  Set to `yes' or `no'.
+
+ -- Variable: compiler_needs_object
+     Whether the compiler has to see an object listed on the command
+     line in order to successfully invoke the linker.  If `no', then a
+     set of convenience archives or a set of object file names can be
+     passed via linker-specific options or linker scripts.
+
+ -- Variable: dlopen_support
+     Whether `dlopen' is supported on the platform.  Set to `yes' or
+     `no'.
+
+ -- Variable: dlopen_self
+     Whether it is possible to `dlopen' the executable itself.  Set to
+     `yes' or `no'.
+
+ -- Variable: dlopen_self_static
+     Whether it is possible to `dlopen' the executable itself, when it
+     is linked statically (`-all-static').  Set to `yes' or `no'.
+
+ -- Variable: exclude_expsyms
+     List of symbols that should not be listed in the preloaded symbols.
+
+ -- Variable: export_dynamic_flag_spec
+     Compiler link flag that allows a dlopened shared library to
+     reference symbols that are defined in the program.
+
+ -- Variable: export_symbols_cmds
+     Commands to extract exported symbols from `libobjs' to the file
+     `export_symbols'.
+
+ -- Variable: extract_expsyms_cmds
+     Commands to extract the exported symbols list from a shared
+     library.  These commands are executed if there is no file
+     `$objdir/$soname-def', and should write the names of the exported
+     symbols to that file, for the use of
+     `old_archive_from_expsyms_cmds'.
+
+ -- Variable: fast_install
+     Determines whether libtool will privilege the installer or the
+     developer.  The assumption is that installers will seldom run
+     programs in the build tree, and the developer will seldom install.
+     This is only meaningful on platforms where
+     `shlibpath_overrides_runpath' is not `yes', so `fast_install' will
+     be set to `needless' in this case.  If `fast_install' set to
+     `yes', libtool will create programs that search for installed
+     libraries, and, if a program is run in the build tree, a new copy
+     will be linked on-demand to use the yet-to-be-installed libraries.
+     If set to `no', libtool will create programs that use the
+     yet-to-be-installed libraries, and will link a new copy of the
+     program at install time.  The default value is `yes' or
+     `needless', depending on platform and configuration flags, and it
+     can be turned from `yes' to `no' with the configure flag
+     `--disable-fast-install'.
+
+     On some systems, the linker always hardcodes paths to dependent
+     libraries into the output.  In this case, `fast_install' is never
+     set to `yes', and relinking at install time is triggered.  This
+     also means that `DESTDIR' installation does not work as expected.
+
+ -- Variable: file_magic_glob
+     How to find potential files when `deplibs_check_method' is
+     `file_magic'. `file_magic_glob' is a `sed' expression, and the
+     `sed' instance is fed potential file names that are transformed by
+     the `file_magic_glob' expression. Useful when the shell does not
+     support the shell option `nocaseglob', making `want_nocaseglob'
+     inappropriate. Normally disabled (i.e.  `file_magic_glob' is
+     empty).
+
+ -- Variable: finish_cmds
+     Commands to tell the dynamic linker how to find shared libraries
+     in a specific directory.
+
+ -- Variable: finish_eval
+     Same as `finish_cmds', except the commands are not displayed.
+
+ -- Variable: global_symbol_pipe
+     A pipeline that takes the output of `NM', and produces a listing of
+     raw symbols followed by their C names.  For example:
+
+          $ eval "$NM progname | $global_symbol_pipe"
+          D SYMBOL1 C-SYMBOL1
+          T SYMBOL2 C-SYMBOL2
+          C SYMBOL3 C-SYMBOL3
+          ...
+          $
+
+     The first column contains the symbol type (used to tell data from
+     code) but its meaning is system dependent.
+
+ -- Variable: global_symbol_to_cdecl
+     A pipeline that translates the output of `global_symbol_pipe' into
+     proper C declarations.  Since some platforms, such as HP/UX, have
+     linkers that differentiate code from data, data symbols are
+     declared as data, and code symbols are declared as functions.
+
+ -- Variable: hardcode_action
+     Either `immediate' or `relink', depending on whether shared
+     library paths can be hardcoded into executables before they are
+     installed, or if they need to be relinked.
+
+ -- Variable: hardcode_direct
+     Set to `yes' or `no', depending on whether the linker hardcodes
+     directories if a library is directly specified on the command line
+     (such as `DIR/libNAME.a') when `hardcode_libdir_flag_spec' is
+     specified.
+
+ -- Variable: hardcode_direct_absolute
+     Some architectures hardcode "absolute" library directories that
+     can not be overridden by `shlibpath_var' when `hardcode_direct' is
+     `yes'.  In that case set `hardcode_direct_absolute' to `yes', or
+     otherwise `no'.
+
+ -- Variable: hardcode_into_libs
+     Whether the platform supports hardcoding of run-paths into
+     libraries.  If enabled, linking of programs will be much simpler
+     but libraries will need to be relinked during installation.  Set
+     to `yes' or `no'.
+
+ -- Variable: hardcode_libdir_flag_spec
+     Flag to hardcode a `libdir' variable into a binary, so that the
+     dynamic linker searches `libdir' for shared libraries at runtime.
+     If it is empty, libtool will try to use some other hardcoding
+     mechanism.
+
+ -- Variable: hardcode_libdir_separator
+     If the compiler only accepts a single `hardcode_libdir_flag', then
+     this variable contains the string that should separate multiple
+     arguments to that flag.
+
+ -- Variable: hardcode_minus_L
+     Set to `yes' or `no', depending on whether the linker hardcodes
+     directories specified by `-L' flags into the resulting executable
+     when `hardcode_libdir_flag_spec' is specified.
+
+ -- Variable: hardcode_shlibpath_var
+     Set to `yes' or `no', depending on whether the linker hardcodes
+     directories by writing the contents of `$shlibpath_var' into the
+     resulting executable when `hardcode_libdir_flag_spec' is
+     specified.  Set to `unsupported' if directories specified by
+     `$shlibpath_var' are searched at run time, but not at link time.
+
+ -- Variable: host
+ -- Variable: host_alias
+ -- Variable: host_os
+     Set to the specified and canonical names of the system that
+     libtool was configured for.
+
+ -- Variable: include_expsyms
+     List of symbols that must always be exported when using
+     `export_symbols'.
+
+ -- Variable: inherit_rpath
+     Whether the linker adds runtime paths of dependency libraries to
+     the runtime path list, requiring libtool to relink the output when
+     installing.  Set to `yes' or `no'.  Default is `no'.
+
+ -- Variable: install_override_mode
+     Permission mode override for installation of shared libraries.  If
+     the runtime linker fails to load libraries with wrong permissions,
+     then it may fail to execute programs that are needed during
+     installation, because these need the library that has just been
+     installed.  In this case, it is necessary to pass the mode to
+     `install' with `-m INSTALL_OVERRIDE_MODE'.
+
+ -- Variable: libext
+     The standard old archive suffix (normally `a').
+
+ -- Variable: libname_spec
+     The format of a library name prefix.  On all Unix systems, static
+     libraries are called `libNAME.a', but on some systems (such as
+     OS/2 or MS-DOS), the library is just called `NAME.a'.
+
+ -- Variable: library_names_spec
+     A list of shared library names.  The first is the name of the file,
+     the rest are symbolic links to the file.  The name in the list is
+     the file name that the linker finds when given `-lNAME'.
+
+ -- Variable: link_all_deplibs
+     Whether libtool must link a program against all its dependency
+     libraries.  Set to `yes' or `no'.  Default is `unknown', which is
+     a synonym for `yes'.
+
+ -- Variable: link_static_flag
+     Linker flag (passed through the C compiler) used to prevent dynamic
+     linking.
+
+ -- Variable: macro_version
+ -- Variable: macro_revision
+     The release and revision from which the libtool.m4 macros were
+     taken.  This is used to ensure that macros and `ltmain.sh'
+     correspond to the same Libtool version.
+
+ -- Variable: max_cmd_len
+     The approximate longest command line that can be passed to `$SHELL'
+     without being truncated, as computed by `LT_CMD_MAX_LEN'.
+
+ -- Variable: need_lib_prefix
+     Whether we can `dlopen' modules without a `lib' prefix.  Set to
+     `yes' or `no'.  By default, it is `unknown', which means the same
+     as `yes', but documents that we are not really sure about it.
+     `no' means that it is possible to `dlopen' a module without the
+     `lib' prefix.
+
+ -- Variable: need_version
+     Whether versioning is required for libraries, i.e. whether the
+     dynamic linker requires a version suffix for all libraries.  Set
+     to `yes' or `no'.  By default, it is `unknown', which means the
+     same as `yes', but documents that we are not really sure about it.
+
+ -- Variable: need_locks
+     Whether files must be locked to prevent conflicts when compiling
+     simultaneously.  Set to `yes' or `no'.
+
+ -- Variable: nm_file_list_spec
+     Specify filename containing input files for `NM'.
+
+ -- Variable: no_builtin_flag
+     Compiler flag to disable builtin functions that conflict with
+     declaring external global symbols as `char'.
+
+ -- Variable: no_undefined_flag
+     The flag that is used by `archive_cmds' in order to declare that
+     there will be no unresolved symbols in the resulting shared
+     library.  Empty, if no such flag is required.
+
+ -- Variable: objdir
+     The name of the directory that contains temporary libtool files.
+
+ -- Variable: objext
+     The standard object file suffix (normally `o').
+
+ -- Variable: pic_flag
+     Any additional compiler flags for building library object files.
+
+ -- Variable: postinstall_cmds
+ -- Variable: old_postinstall_cmds
+     Commands run after installing a shared or static library,
+     respectively.
+
+ -- Variable: postuninstall_cmds
+ -- Variable: old_postuninstall_cmds
+     Commands run after uninstalling a shared or static library,
+     respectively.
+
+ -- Variable: postlink_cmds
+     Commands necessary for finishing linking programs. `postlink_cmds'
+     are executed immediately after the program is linked.  Any
+     occurrence of the string `@OUTPUT@' in `postlink_cmds' is replaced
+     by the name of the created executable (i.e. not the wrapper, if a
+     wrapper is generated) prior to execution.  Similarly,
+     `@TOOL_OUTPUT@' is replaced by the toolchain format of `@OUTPUT@'.
+     Normally disabled (i.e. `postlink_cmds' empty).
+
+ -- Variable: reload_cmds
+ -- Variable: reload_flag
+     Commands to create a reloadable object.  Set `reload_cmds' to
+     `false' on systems that cannot create reloadable objects.
+
+ -- Variable: runpath_var
+     The environment variable that tells the linker which directories to
+     hardcode in the resulting executable.
+
+ -- Variable: shlibpath_overrides_runpath
+     Indicates whether it is possible to override the hard-coded library
+     search path of a program with an environment variable.  If this is
+     set to no, libtool may have to create two copies of a program in
+     the build tree, one to be installed and one to be run in the build
+     tree only.  When each of these copies is created depends on the
+     value of `fast_install'.  The default value is `unknown', which is
+     equivalent to `no'.
+
+ -- Variable: shlibpath_var
+     The environment variable that tells the dynamic linker where to
+     find shared libraries.
+
+ -- Variable: soname_spec
+     The name coded into shared libraries, if different from the real
+     name of the file.
+
+ -- Variable: striplib
+ -- Variable: old_striplib
+     Command to strip a shared (`striplib') or static (`old_striplib')
+     library, respectively.  If these variables are empty, the strip
+     flag in the install mode will be ignored for libraries (*note
+     Install mode::).
+
+ -- Variable: sys_lib_dlsearch_path_spec
+     Expression to get the run-time system library search path.
+     Directories that appear in this list are never hard-coded into
+     executables.
+
+ -- Variable: sys_lib_search_path_spec
+     Expression to get the compile-time system library search path.
+     This variable is used by libtool when it has to test whether a
+     certain library is shared or static.  The directories listed in
+     `shlibpath_var' are automatically appended to this list, every time
+     libtool runs (i.e., not at configuration time), because some
+     linkers use this variable to extend the library search path.
+     Linker switches such as `-L' also augment the search path.
+
+ -- Variable: thread_safe_flag_spec
+     Linker flag (passed through the C compiler) used to generate
+     thread-safe libraries.
+
+ -- Variable: to_host_file_cmd
+     If the toolchain is not native to the build platform (e.g. if you
+     are using MSYS to drive the scripting, but are using the MinGW
+     native Windows compiler) this variable describes how to convert
+     file names from the format used by the build platform to the
+     format used by host platform.  Normally set to
+     `func_convert_file_noop', libtool will autodetect most cases in
+     which other values should be used.  On rare occasions, it may be
+     necessary to override the autodetected value (*note Cygwin to
+     MinGW Cross::).
+
+ -- Variable: to_tool_file_cmd
+     If the toolchain is not native to the build platform (e.g. if you
+     are using some Unix to drive the scripting together with a Windows
+     toolchain running in Wine) this variable describes how to convert
+     file names from the format used by the build platform to the
+     format used by the toolchain.  Normally set to
+     `func_convert_file_noop'.
+
+ -- Variable: version_type
+     The library version numbering type.  One of `libtool',
+     `freebsd-aout', `freebsd-elf', `irix', `linux', `osf', `sunos',
+     `windows', or `none'.
+
+ -- Variable: want_nocaseglob
+     Find potential files using the shell option `nocaseglob', when
+     `deplibs_check_method' is `file_magic'. Normally set to `no'. Set
+     to `yes' to enable the `nocaseglob' shell option when looking for
+     potential file names in a case-insensitive manner.
+
+ -- Variable: whole_archive_flag_spec
+     Compiler flag to generate shared objects from convenience archives.
+
+ -- Variable: wl
+     The C compiler flag that allows libtool to pass a flag directly to
+     the linker.  Used as: `${wl}SOME-FLAG'.
+
+   Variables ending in `_cmds' or `_eval' contain a `~'-separated list
+of commands that are `eval'ed one after another.  If any of the
+commands return a nonzero exit status, libtool generally exits with an
+error message.
+
+   Variables ending in `_spec' are `eval'ed before being used by
+libtool.
+
+
+File: libtool.info,  Node: Cheap tricks,  Prev: libtool script contents,  Up: Maintaining
+
+15.5 Cheap tricks
+=================
+
+Here are a few tricks that you can use in order to make maintainership
+easier:
+
+   * When people report bugs, ask them to use the `--config',
+     `--debug', or `--features' flags, if you think they will help you.
+     These flags are there to help you get information directly, rather
+     than having to trust second-hand observation.
+
+   * Rather than reconfiguring libtool every time I make a change to
+     `ltmain.in', I keep a permanent `libtool' script in my `PATH',
+     which sources `ltmain.in' directly.
+
+     The following steps describe how to create such a script, where
+     `/home/src/libtool' is the directory containing the libtool source
+     tree, `/home/src/libtool/libtool' is a libtool script that has been
+     configured for your platform, and `~/bin' is a directory in your
+     `PATH':
+
+          trick$ cd ~/bin
+          trick$ sed 's%^\(macro_version=\).*$%\1@VERSION@%;
+                      s%^\(macro_revision=\).*$%\1@package_revision@%;
+                      /^# ltmain\.sh/q' /home/src/libtool/libtool > libtool
+          trick$ echo '. /home/src/libtool/ltmain.in' >> libtool
+          trick$ chmod +x libtool
+          trick$ libtool --version
+          ltmain.sh (GNU @PACKAGE@@TIMESTAMP@) @VERSION@
+
+          Copyright (C) 2011 Free Software Foundation, Inc.
+          This is free software; see the source for copying conditions.  There is NO
+          warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+          trick$
+
+   The output of the final `libtool --version' command shows that the
+`ltmain.in' script is being used directly.  Now, modify `~/bin/libtool'
+or `/home/src/libtool/ltmain.in' directly in order to test new changes
+without having to rerun `configure'.
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-2 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-2
new file mode 100644
index 0000000..607fef5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/libtool.info-2
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info
new file mode 100644
index 0000000..a75386b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info
@@ -0,0 +1,133 @@
+This is m4.info, produced by makeinfo version 5.1 from m4.texi.
+
+This manual (22 September 2013) is for GNU M4 (version 1.4.17), a
+package containing an implementation of the m4 macro language.
+
+   Copyright (C) 1989-1994, 2004-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, no Front-Cover Texts, and
+     no Back-Cover Texts.  A copy of the license is included in the
+     section entitled "GNU Free Documentation License."
+INFO-DIR-SECTION Text creation and manipulation
+START-INFO-DIR-ENTRY
+* M4: (m4).                     A powerful macro processor.
+END-INFO-DIR-ENTRY
+
+
+Indirect:
+m4.info-1: 813
+m4.info-2: 301580
+
+Tag Table:
+(Indirect)
+Node: Top813
+Node: Preliminaries9617
+Node: Intro10303
+Node: History11934
+Node: Bugs16028
+Node: Manual17279
+Node: Invoking m420671
+Node: Operation modes22815
+Node: Preprocessor features25782
+Node: Limits control28878
+Node: Frozen state32775
+Node: Debugging options33574
+Node: Command line files35554
+Node: Syntax37127
+Node: Names38246
+Node: Quoted strings38708
+Node: Comments39357
+Node: Other tokens40234
+Node: Input processing40812
+Ref: Input processing-Footnote-148738
+Node: Macros48933
+Node: Invocation49427
+Node: Inhibiting Invocation50228
+Node: Macro Arguments54364
+Node: Quoting Arguments57423
+Node: Macro expansion59546
+Node: Definitions60247
+Node: Define61032
+Node: Arguments63468
+Node: Pseudo Arguments67138
+Node: Undefine70680
+Node: Defn71810
+Node: Pushdef76254
+Node: Indir78879
+Node: Builtin81002
+Node: Conditionals83219
+Node: Ifdef84161
+Node: Ifelse85022
+Node: Shift88335
+Node: Forloop98803
+Node: Foreach101440
+Node: Stacks106928
+Node: Composition109968
+Node: Debugging115907
+Node: Dumpdef116492
+Node: Trace117845
+Node: Debug Levels121402
+Node: Debug Output126093
+Node: Input Control127372
+Node: Dnl127909
+Node: Changequote129808
+Node: Changecom135890
+Node: Changeword139568
+Node: M4wrap145009
+Node: File Inclusion149005
+Node: Include149322
+Node: Search Path152030
+Node: Diversions152947
+Node: Divert154630
+Node: Undivert157155
+Node: Divnum160477
+Node: Cleardivert160941
+Node: Text handling162145
+Node: Len162868
+Node: Index macro163253
+Node: Regexp164125
+Node: Substr166656
+Node: Translit167700
+Node: Patsubst170420
+Node: Format174943
+Node: Arithmetic178134
+Node: Incr178583
+Node: Eval179356
+Node: Shell commands187288
+Node: Platform macros188210
+Node: Syscmd190325
+Node: Esyscmd192605
+Node: Sysval194114
+Node: Mkstemp195796
+Node: Miscellaneous199746
+Node: Errprint200179
+Node: Location201396
+Node: M4exit204167
+Node: Frozen files206252
+Node: Using frozen files207034
+Node: Frozen file format210299
+Node: Compatibility213365
+Node: Extensions214430
+Node: Incompatibilities218297
+Node: Other Incompatibilities227211
+Node: Answers229857
+Node: Improved exch230639
+Node: Improved forloop231177
+Node: Improved foreach236507
+Node: Improved copy249467
+Node: Improved m4wrap253421
+Node: Improved cleardivert255845
+Node: Improved capitalize256826
+Node: Improved fatal_error261686
+Node: Copying This Package262746
+Node: GNU General Public License263225
+Node: Copying This Manual301580
+Node: GNU Free Documentation License302100
+Node: Indices327210
+Node: Macro index327490
+Node: Concept index333873
+
+End Tag Table
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-1
new file mode 100644
index 0000000..db2ee65
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-1
@@ -0,0 +1,7818 @@
+This is m4.info, produced by makeinfo version 5.1 from m4.texi.
+
+This manual (22 September 2013) is for GNU M4 (version 1.4.17), a
+package containing an implementation of the m4 macro language.
+
+   Copyright (C) 1989-1994, 2004-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, no Front-Cover Texts, and
+     no Back-Cover Texts.  A copy of the license is included in the
+     section entitled "GNU Free Documentation License."
+INFO-DIR-SECTION Text creation and manipulation
+START-INFO-DIR-ENTRY
+* M4: (m4).                     A powerful macro processor.
+END-INFO-DIR-ENTRY
+
+
+File: m4.info,  Node: Top,  Next: Preliminaries,  Up: (dir)
+
+GNU M4
+******
+
+This manual (22 September 2013) is for GNU M4 (version 1.4.17), a
+package containing an implementation of the m4 macro language.
+
+   Copyright (C) 1989-1994, 2004-2013 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.3 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, no Front-Cover Texts, and
+     no Back-Cover Texts.  A copy of the license is included in the
+     section entitled "GNU Free Documentation License."
+
+   GNU 'm4' is an implementation of the traditional UNIX macro
+processor.  It is mostly SVR4 compatible, although it has some
+extensions (for example, handling more than 9 positional parameters to
+macros).  'm4' also has builtin functions for including files, running
+shell commands, doing arithmetic, etc.  Autoconf needs GNU 'm4' for
+generating 'configure' scripts, but not for running them.
+
+   GNU 'm4' was originally written by Rene' Seindal, with subsequent
+changes by Franc,ois Pinard and other volunteers on the Internet.  All
+names and email addresses can be found in the files 'm4-1.4.17/AUTHORS'
+and 'm4-1.4.17/THANKS' from the GNU M4 distribution.
+
+   This is release 1.4.17.  It is now considered stable: future releases
+in the 1.4.x series are only meant to fix bugs, increase speed, or
+improve documentation.  However...
+
+   An experimental feature, which would improve 'm4' usefulness, allows
+for changing the syntax for what is a "word" in 'm4'.  You should use:
+     ./configure --enable-changeword
+if you want this feature compiled in.  The current implementation slows
+down 'm4' considerably and is hardly acceptable.  In the future, 'm4'
+2.0 will come with a different set of new features that provide similar
+capabilities, but without the inefficiencies, so changeword will go away
+and _you should not count on it_.
+
+* Menu:
+
+* Preliminaries::               Introduction and preliminaries
+* Invoking m4::                 Invoking 'm4'
+* Syntax::                      Lexical and syntactic conventions
+
+* Macros::                      How to invoke macros
+* Definitions::                 How to define new macros
+* Conditionals::                Conditionals, loops, and recursion
+
+* Debugging::                   How to debug macros and input
+
+* Input Control::               Input control
+* File Inclusion::              File inclusion
+* Diversions::                  Diverting and undiverting output
+
+* Text handling::               Macros for text handling
+* Arithmetic::                  Macros for doing arithmetic
+* Shell commands::              Macros for running shell commands
+* Miscellaneous::               Miscellaneous builtin macros
+* Frozen files::                Fast loading of frozen state
+
+* Compatibility::               Compatibility with other versions of 'm4'
+* Answers::                     Correct version of some examples
+
+* Copying This Package::        How to make copies of the overall M4 package
+* Copying This Manual::         How to make copies of this manual
+* Indices::                     Indices of concepts and macros
+
+ -- The Detailed Node Listing --
+
+Introduction and preliminaries
+
+* Intro::                       Introduction to 'm4'
+* History::                     Historical references
+* Bugs::                        Problems and bugs
+* Manual::                      Using this manual
+
+Invoking 'm4'
+
+* Operation modes::             Command line options for operation modes
+* Preprocessor features::       Command line options for preprocessor features
+* Limits control::              Command line options for limits control
+* Frozen state::                Command line options for frozen state
+* Debugging options::           Command line options for debugging
+* Command line files::          Specifying input files on the command line
+
+Lexical and syntactic conventions
+
+* Names::                       Macro names
+* Quoted strings::              Quoting input to 'm4'
+* Comments::                    Comments in 'm4' input
+* Other tokens::                Other kinds of input tokens
+* Input processing::            How 'm4' copies input to output
+
+How to invoke macros
+
+* Invocation::                  Macro invocation
+* Inhibiting Invocation::       Preventing macro invocation
+* Macro Arguments::             Macro arguments
+* Quoting Arguments::           On Quoting Arguments to macros
+* Macro expansion::             Expanding macros
+
+How to define new macros
+
+* Define::                      Defining a new macro
+* Arguments::                   Arguments to macros
+* Pseudo Arguments::            Special arguments to macros
+* Undefine::                    Deleting a macro
+* Defn::                        Renaming macros
+* Pushdef::                     Temporarily redefining macros
+
+* Indir::                       Indirect call of macros
+* Builtin::                     Indirect call of builtins
+
+Conditionals, loops, and recursion
+
+* Ifdef::                       Testing if a macro is defined
+* Ifelse::                      If-else construct, or multibranch
+* Shift::                       Recursion in 'm4'
+* Forloop::                     Iteration by counting
+* Foreach::                     Iteration by list contents
+* Stacks::                      Working with definition stacks
+* Composition::                 Building macros with macros
+
+How to debug macros and input
+
+* Dumpdef::                     Displaying macro definitions
+* Trace::                       Tracing macro calls
+* Debug Levels::                Controlling debugging output
+* Debug Output::                Saving debugging output
+
+Input control
+
+* Dnl::                         Deleting whitespace in input
+* Changequote::                 Changing the quote characters
+* Changecom::                   Changing the comment delimiters
+* Changeword::                  Changing the lexical structure of words
+* M4wrap::                      Saving text until end of input
+
+File inclusion
+
+* Include::                     Including named files
+* Search Path::                 Searching for include files
+
+Diverting and undiverting output
+
+* Divert::                      Diverting output
+* Undivert::                    Undiverting output
+* Divnum::                      Diversion numbers
+* Cleardivert::                 Discarding diverted text
+
+Macros for text handling
+
+* Len::                         Calculating length of strings
+* Index macro::                 Searching for substrings
+* Regexp::                      Searching for regular expressions
+* Substr::                      Extracting substrings
+* Translit::                    Translating characters
+* Patsubst::                    Substituting text by regular expression
+* Format::                      Formatting strings (printf-like)
+
+Macros for doing arithmetic
+
+* Incr::                        Decrement and increment operators
+* Eval::                        Evaluating integer expressions
+
+Macros for running shell commands
+
+* Platform macros::             Determining the platform
+* Syscmd::                      Executing simple commands
+* Esyscmd::                     Reading the output of commands
+* Sysval::                      Exit status
+* Mkstemp::                     Making temporary files
+
+Miscellaneous builtin macros
+
+* Errprint::                    Printing error messages
+* Location::                    Printing current location
+* M4exit::                      Exiting from 'm4'
+
+Fast loading of frozen state
+
+* Using frozen files::          Using frozen files
+* Frozen file format::          Frozen file format
+
+Compatibility with other versions of 'm4'
+
+* Extensions::                  Extensions in GNU M4
+* Incompatibilities::           Facilities in System V m4 not in GNU M4
+* Other Incompatibilities::     Other incompatibilities
+
+Correct version of some examples
+
+* Improved exch::               Solution for 'exch'
+* Improved forloop::            Solution for 'forloop'
+* Improved foreach::            Solution for 'foreach'
+* Improved copy::               Solution for 'copy'
+* Improved m4wrap::             Solution for 'm4wrap'
+* Improved cleardivert::        Solution for 'cleardivert'
+* Improved capitalize::         Solution for 'capitalize'
+* Improved fatal_error::        Solution for 'fatal_error'
+
+How to make copies of the overall M4 package
+
+* GNU General Public License::  License for copying the M4 package
+
+How to make copies of this manual
+
+* GNU Free Documentation License::  License for copying this manual
+
+Indices of concepts and macros
+
+* Macro index::                 Index for all 'm4' macros
+* Concept index::               Index for many concepts
+
+
+
+File: m4.info,  Node: Preliminaries,  Next: Invoking m4,  Prev: Top,  Up: Top
+
+1 Introduction and preliminaries
+********************************
+
+This first chapter explains what GNU 'm4' is, where 'm4' comes from, how
+to read and use this documentation, how to call the 'm4' program, and
+how to report bugs about it.  It concludes by giving tips for reading
+the remainder of the manual.
+
+   The following chapters then detail all the features of the 'm4'
+language.
+
+* Menu:
+
+* Intro::                       Introduction to 'm4'
+* History::                     Historical references
+* Bugs::                        Problems and bugs
+* Manual::                      Using this manual
+
+
+File: m4.info,  Node: Intro,  Next: History,  Up: Preliminaries
+
+1.1 Introduction to 'm4'
+========================
+
+'m4' is a macro processor, in the sense that it copies its input to the
+output, expanding macros as it goes.  Macros are either builtin or
+user-defined, and can take any number of arguments.  Besides just doing
+macro expansion, 'm4' has builtin functions for including named files,
+running shell commands, doing integer arithmetic, manipulating text in
+various ways, performing recursion, etc.... 'm4' can be used either as a
+front-end to a compiler, or as a macro processor in its own right.
+
+   The 'm4' macro processor is widely available on all UNIXes, and has
+been standardized by POSIX. Usually, only a small percentage of users
+are aware of its existence.  However, those who find it often become
+committed users.  The popularity of GNU Autoconf, which requires GNU
+'m4' for _generating_ 'configure' scripts, is an incentive for many to
+install it, while these people will not themselves program in 'm4'.  GNU
+'m4' is mostly compatible with the System V, Release 4 version, except
+for some minor differences.  *Note Compatibility::, for more details.
+
+   Some people find 'm4' to be fairly addictive.  They first use 'm4'
+for simple problems, then take bigger and bigger challenges, learning
+how to write complex sets of 'm4' macros along the way.  Once really
+addicted, users pursue writing of sophisticated 'm4' applications even
+to solve simple problems, devoting more time debugging their 'm4'
+scripts than doing real work.  Beware that 'm4' may be dangerous for the
+health of compulsive programmers.
+
+
+File: m4.info,  Node: History,  Next: Bugs,  Prev: Intro,  Up: Preliminaries
+
+1.2 Historical references
+=========================
+
+Macro languages were invented early in the history of computing.  In the
+1950s Alan Perlis suggested that the macro language be independent of
+the language being processed.  Techniques such as conditional and
+recursive macros, and using macros to define other macros, were
+described by Doug McIlroy of Bell Labs in "Macro Instruction Extensions
+of Compiler Languages", _Communications of the ACM_ 3, 4 (1960), 214-20,
+<http://dx.doi.org/10.1145/367177.367223>.
+
+   An important precursor of 'm4' was GPM; see C. Strachey, "A general
+purpose macrogenerator", _Computer Journal_ 8, 3 (1965), 225-41,
+<http://dx.doi.org/10.1093/comjnl/8.3.225>.  GPM is also succinctly
+described in David Gries's book _Compiler Construction for Digital
+Computers_, Wiley (1971).  Strachey was a brilliant programmer: GPM fit
+into 250 machine instructions!
+
+   Inspired by GPM while visiting Strachey's Lab in 1968, McIlroy wrote
+a model preprocessor in that fit into a page of Snobol 3 code, and
+McIlroy and Robert Morris developed a series of further models at Bell
+Labs.  Andrew D. Hall followed up with M6, a general purpose macro
+processor used to port the Fortran source code of the Altran computer
+algebra system; see Hall's "The M6 Macro Processor", Computing Science
+Technical Report #2, Bell Labs (1972),
+<http://cm.bell-labs.com/cm/cs/cstr/2.pdf>.  M6's source code consisted
+of about 600 Fortran statements.  Its name was the first of the 'm4'
+line.
+
+   The Brian Kernighan and P.J. Plauger book _Software Tools_,
+Addison-Wesley (1976), describes and implements a Unix macro-processor
+language, which inspired Dennis Ritchie to write 'm3', a macro processor
+for the AP-3 minicomputer.
+
+   Kernighan and Ritchie then joined forces to develop the original
+'m4', described in "The M4 Macro Processor", Bell Laboratories (1977),
+<http://wolfram.schneider.org/bsd/7thEdManVol2/m4/m4.pdf>.  It had only
+21 builtin macros.
+
+   While 'GPM' was more _pure_, 'm4' is meant to deal with the true
+intricacies of real life: macros can be recognized without being
+pre-announced, skipping whitespace or end-of-lines is easier, more
+constructs are builtin instead of derived, etc.
+
+   Originally, the Kernighan and Plauger macro-processor, and then 'm3',
+formed the engine for the Rational FORTRAN preprocessor, that is, the
+'Ratfor' equivalent of 'cpp'.  Later, 'm4' was used as a front-end for
+'Ratfor', 'C' and 'Cobol'.
+
+   Rene' Seindal released his implementation of 'm4', GNU 'm4', in 1990,
+with the aim of removing the artificial limitations in many of the
+traditional 'm4' implementations, such as maximum line length, macro
+size, or number of macros.
+
+   The late Professor A. Dain Samples described and implemented a
+further evolution in the form of 'M5': "User's Guide to the M5 Macro
+Language: 2nd edition", Electronic Announcement on comp.compilers
+newsgroup (1992).
+
+   Franc,ois Pinard took over maintenance of GNU 'm4' in 1992, until
+1994 when he released GNU 'm4' 1.4, which was the stable release for 10
+years.  It was at this time that GNU Autoconf decided to require GNU
+'m4' as its underlying engine, since all other implementations of 'm4'
+had too many limitations.
+
+   More recently, in 2004, Paul Eggert released 1.4.1 and 1.4.2 which
+addressed some long standing bugs in the venerable 1.4 release.  Then in
+2005, Gary V. Vaughan collected together the many patches to GNU 'm4'
+1.4 that were floating around the net and released 1.4.3 and 1.4.4.  And
+in 2006, Eric Blake joined the team and prepared patches for the release
+of 1.4.5, 1.4.6, 1.4.7, and 1.4.8.  More bug fixes were incorporated in
+2007, with releases 1.4.9 and 1.4.10.  Eric continued with some
+portability fixes for 1.4.11 and 1.4.12 in 2008, 1.4.13 in 2009, 1.4.14
+and 1.4.15 in 2010, and 1.4.16 in 2011.
+
+   Meanwhile, development has continued on new features for 'm4', such
+as dynamic module loading and additional builtins.  When complete, GNU
+'m4' 2.0 will start a new series of releases.
+
+
+File: m4.info,  Node: Bugs,  Next: Manual,  Prev: History,  Up: Preliminaries
+
+1.3 Problems and bugs
+=====================
+
+If you have problems with GNU M4 or think you've found a bug, please
+report it.  Before reporting a bug, make sure you've actually found a
+real bug.  Carefully reread the documentation and see if it really says
+you can do what you're trying to do.  If it's not clear whether you
+should be able to do something or not, report that too; it's a bug in
+the documentation!
+
+   Before reporting a bug or trying to fix it yourself, try to isolate
+it to the smallest possible input file that reproduces the problem.
+Then send us the input file and the exact results 'm4' gave you.  Also
+say what you expected to occur; this will help us decide whether the
+problem was really in the documentation.
+
+   Once you've got a precise problem, send e-mail to <bug-m4@gnu.org>.
+Please include the version number of 'm4' you are using.  You can get
+this information with the command 'm4 --version'.  Also provide details
+about the platform you are executing on.
+
+   Non-bug suggestions are always welcome as well.  If you have
+questions about things that are unclear in the documentation or are just
+obscure features, please report them too.
+
+
+File: m4.info,  Node: Manual,  Prev: Bugs,  Up: Preliminaries
+
+1.4 Using this manual
+=====================
+
+This manual contains a number of examples of 'm4' input and output, and
+a simple notation is used to distinguish input, output and error
+messages from 'm4'.  Examples are set out from the normal text, and
+shown in a fixed width font, like this
+
+     This is an example of an example!
+
+   To distinguish input from output, all output from 'm4' is prefixed by
+the string '=>', and all error messages by the string 'error->'.  When
+showing how command line options affect matters, the command line is
+shown with a prompt '$ 'like this'', otherwise, you can assume that a
+simple 'm4' invocation will work.  Thus:
+
+     $ command line to invoke m4
+     Example of input line
+     =>Output line from m4
+     error->and an error message
+
+   The sequence '^D' in an example indicates the end of the input file.
+The sequence '<NL>' refers to the newline character.  The majority of
+these examples are self-contained, and you can run them with similar
+results by invoking 'm4 -d'.  In fact, the testsuite that is bundled in
+the GNU M4 package consists of the examples in this document!  Some of
+the examples assume that your current directory is located where you
+unpacked the installation, so if you plan on following along, you may
+find it helpful to do this now:
+
+     $ cd m4-1.4.17
+
+   As each of the predefined macros in 'm4' is described, a prototype
+call of the macro will be shown, giving descriptive names to the
+arguments, e.g.,
+
+ -- Composite: example (STRING, [COUNT = '1']
+     [ARGUMENT]This is a sample prototype.  There is not really a macro
+     named 'example', but this documents that if there were, it would be
+     a Composite macro, rather than a Builtin.  It requires at least one
+     argument, STRING.  Remember that in 'm4', there must not be a space
+     between the macro name and the opening parenthesis, unless it was
+     intended to call the macro without any arguments.  The brackets
+     around COUNT and ARGUMENT show that these arguments are optional.
+     If COUNT is omitted, the macro behaves as if count were '1',
+     whereas if ARGUMENT is omitted, the macro behaves as if it were the
+     empty string.  A blank argument is not the same as an omitted
+     argument.  For example, 'example(`a')', 'example(`a',`1')', and
+     'example(`a',`1',)' would behave identically with COUNT set to '1';
+     while 'example(`a',)' and 'example(`a',`')' would explicitly pass
+     the empty string for COUNT.  The ellipses ('...') show that the
+     macro processes additional arguments after ARGUMENT, rather than
+     ignoring them.
+
+   All macro arguments in 'm4' are strings, but some are given special
+interpretation, e.g., as numbers, file names, regular expressions, etc.
+The documentation for each macro will state how the parameters are
+interpreted, and what happens if the argument cannot be parsed according
+to the desired interpretation.  Unless specified otherwise, a parameter
+specified to be a number is parsed as a decimal, even if the argument
+has leading zeros; and parsing the empty string as a number results in 0
+rather than an error, although a warning will be issued.
+
+   This document consistently writes and uses "builtin", without a
+hyphen, as if it were an English word.  This is how the 'builtin'
+primitive is spelled within 'm4'.
+
+
+File: m4.info,  Node: Invoking m4,  Next: Syntax,  Prev: Preliminaries,  Up: Top
+
+2 Invoking 'm4'
+***************
+
+The format of the 'm4' command is:
+
+     m4 [OPTION...] [FILE...]
+
+   All options begin with '-', or if long option names are used, with
+'--'.  A long option name need not be written completely, any
+unambiguous prefix is sufficient.  POSIX requires 'm4' to recognize
+arguments intermixed with files, even when 'POSIXLY_CORRECT' is set in
+the environment.  Most options take effect at startup regardless of
+their position, but some are documented below as taking effect after any
+files that occurred earlier in the command line.  The argument '--' is a
+marker to denote the end of options.
+
+   With short options, options that do not take arguments may be
+combined into a single command line argument with subsequent options,
+options with mandatory arguments may be provided either as a single
+command line argument or as two arguments, and options with optional
+arguments must be provided as a single argument.  In other words, 'm4
+-QPDfoo -d a -df' is equivalent to 'm4 -Q -P -D foo -d -df -- ./a',
+although the latter form is considered canonical.
+
+   With long options, options with mandatory arguments may be provided
+with an equal sign ('=') in a single argument, or as two arguments, and
+options with optional arguments must be provided as a single argument.
+In other words, 'm4 --def foo --debug a' is equivalent to 'm4
+--define=foo --debug= -- ./a', although the latter form is considered
+canonical (not to mention more robust, in case a future version of 'm4'
+introduces an option named '--default').
+
+   'm4' understands the following options, grouped by functionality.
+
+* Menu:
+
+* Operation modes::             Command line options for operation modes
+* Preprocessor features::       Command line options for preprocessor features
+* Limits control::              Command line options for limits control
+* Frozen state::                Command line options for frozen state
+* Debugging options::           Command line options for debugging
+* Command line files::          Specifying input files on the command line
+
+
+File: m4.info,  Node: Operation modes,  Next: Preprocessor features,  Up: Invoking m4
+
+2.1 Command line options for operation modes
+============================================
+
+Several options control the overall operation of 'm4':
+
+'--help'
+     Print a help summary on standard output, then immediately exit 'm4'
+     without reading any input files or performing any other actions.
+
+'--version'
+     Print the version number of the program on standard output, then
+     immediately exit 'm4' without reading any input files or performing
+     any other actions.
+
+'-E'
+'--fatal-warnings'
+     Controls the effect of warnings.  If unspecified, then execution
+     continues and exit status is unaffected when a warning is printed.
+     If specified exactly once, warnings become fatal; when one is
+     issued, execution continues, but the exit status will be non-zero.
+     If specified multiple times, then execution halts with non-zero
+     status the first time a warning is issued.  The introduction of
+     behavior levels is new to M4 1.4.9; for behavior consistent with
+     earlier versions, you should specify '-E' twice.
+
+'-i'
+'--interactive'
+'-e'
+     Makes this invocation of 'm4' interactive.  This means that all
+     output will be unbuffered, and interrupts will be ignored.  The
+     spelling '-e' exists for compatibility with other 'm4'
+     implementations, and issues a warning because it may be withdrawn
+     in a future version of GNU M4.
+
+'-P'
+'--prefix-builtins'
+     Internally modify _all_ builtin macro names so they all start with
+     the prefix 'm4_'.  For example, using this option, one should write
+     'm4_define' instead of 'define', and 'm4___file__' instead of
+     '__file__'.  This option has no effect if '-R' is also specified.
+
+'-Q'
+'--quiet'
+'--silent'
+     Suppress warnings, such as missing or superfluous arguments in
+     macro calls, or treating the empty string as zero.
+
+'--warn-macro-sequence[=REGEXP]'
+     Issue a warning if the regular expression REGEXP has a non-empty
+     match in any macro definition (either by 'define' or 'pushdef').
+     Empty matches are ignored; therefore, supplying the empty string as
+     REGEXP disables any warning.  If the optional REGEXP is not
+     supplied, then the default regular expression is
+     '\$\({[^}]*}\|[0-9][0-9]+\)' (a literal '$' followed by multiple
+     digits or by an open brace), since these sequences will change
+     semantics in the default operation of GNU M4 2.0 (due to a change
+     in how more than 9 arguments in a macro definition will be handled,
+     *note Arguments::).  Providing an alternate regular expression can
+     provide a useful reverse lookup feature of finding where a macro is
+     defined to have a given definition.
+
+'-W REGEXP'
+'--word-regexp=REGEXP'
+     Use REGEXP as an alternative syntax for macro names.  This
+     experimental option will not be present in all GNU 'm4'
+     implementations (*note Changeword::).
+
+
+File: m4.info,  Node: Preprocessor features,  Next: Limits control,  Prev: Operation modes,  Up: Invoking m4
+
+2.2 Command line options for preprocessor features
+==================================================
+
+Several options allow 'm4' to behave more like a preprocessor.  Macro
+definitions and deletions can be made on the command line, the search
+path can be altered, and the output file can track where the input came
+from.  These features occur with the following options:
+
+'-D NAME[=VALUE]'
+'--define=NAME[=VALUE]'
+     This enters NAME into the symbol table.  If '=VALUE' is missing,
+     the value is taken to be the empty string.  The VALUE can be any
+     string, and the macro can be defined to take arguments, just as if
+     it was defined from within the input.  This option may be given
+     more than once; order with respect to file names is significant,
+     and redefining the same NAME loses the previous value.
+
+'-I DIRECTORY'
+'--include=DIRECTORY'
+     Make 'm4' search DIRECTORY for included files that are not found in
+     the current working directory.  *Note Search Path::, for more
+     details.  This option may be given more than once.
+
+'-s'
+'--synclines'
+     Generate synchronization lines, for use by the C preprocessor or
+     other similar tools.  Order is significant with respect to file
+     names.  This option is useful, for example, when 'm4' is used as a
+     front end to a compiler.  Source file name and line number
+     information is conveyed by directives of the form '#line LINENUM
+     "FILE"', which are inserted as needed into the middle of the
+     output.  Such directives mean that the following line originated or
+     was expanded from the contents of input file FILE at line LINENUM.
+     The '"FILE"' part is often omitted when the file name did not
+     change from the previous directive.
+
+     Synchronization directives are always given on complete lines by
+     themselves.  When a synchronization discrepancy occurs in the
+     middle of an output line, the associated synchronization directive
+     is delayed until the next newline that does not occur in the middle
+     of a quoted string or comment.
+
+          define(`twoline', `1
+          2')
+          =>#line 2 "stdin"
+          =>
+          changecom(`/*', `*/')
+          =>
+          define(`comment', `/*1
+          2*/')
+          =>#line 5
+          =>
+          dnl no line
+          hello
+          =>#line 7
+          =>hello
+          twoline
+          =>1
+          =>#line 8
+          =>2
+          comment
+          =>/*1
+          =>2*/
+          one comment `two
+          three'
+          =>#line 10
+          =>one /*1
+          =>2*/ two
+          =>three
+          goodbye
+          =>#line 12
+          =>goodbye
+
+'-U NAME'
+'--undefine=NAME'
+     This deletes any predefined meaning NAME might have.  Obviously,
+     only predefined macros can be deleted in this way.  This option may
+     be given more than once; undefining a NAME that does not have a
+     definition is silently ignored.  Order is significant with respect
+     to file names.
+
+
+File: m4.info,  Node: Limits control,  Next: Frozen state,  Prev: Preprocessor features,  Up: Invoking m4
+
+2.3 Command line options for limits control
+===========================================
+
+There are some limits within 'm4' that can be tuned.  For compatibility,
+'m4' also accepts some options that control limits in other
+implementations, but which are automatically unbounded (limited only by
+your hardware and operating system constraints) in GNU 'm4'.
+
+'-g'
+'--gnu'
+     Enable all the extensions in this implementation.  In this release
+     of M4, this option is always on by default; it is currently only
+     useful when overriding a prior use of '--traditional'.  However,
+     having GNU behavior as default makes it impossible to write a
+     strictly POSIX-compliant client that avoids all incompatible GNU M4
+     extensions, since such a client would have to use the non-POSIX
+     command-line option to force full POSIX behavior.  Thus, a future
+     version of M4 will be changed to implicitly use the option
+     '--traditional' if the environment variable 'POSIXLY_CORRECT' is
+     set.  Projects that intentionally use GNU extensions should
+     consider using '--gnu' to state their intentions, so that the
+     project will not mysteriously break if the user upgrades to a newer
+     M4 and has 'POSIXLY_CORRECT' set in their environment.
+
+'-G'
+'--traditional'
+     Suppress all the extensions made in this implementation, compared
+     to the System V version.  *Note Compatibility::, for a list of
+     these.
+
+'-H NUM'
+'--hashsize=NUM'
+     Make the internal hash table for symbol lookup be NUM entries big.
+     For better performance, the number should be prime, but this is not
+     checked.  The default is 509 entries.  It should not be necessary
+     to increase this value, unless you define an excessive number of
+     macros.
+
+'-L NUM'
+'--nesting-limit=NUM'
+     Artificially limit the nesting of macro calls to NUM levels,
+     stopping program execution if this limit is ever exceeded.  When
+     not specified, nesting defaults to unlimited on platforms that can
+     detect stack overflow, and to 1024 levels otherwise.  A value of
+     zero means unlimited; but then heavily nested code could
+     potentially cause a stack overflow.
+
+     The precise effect of this option is more correctly associated with
+     textual nesting than dynamic recursion.  It has been useful when
+     some complex 'm4' input was generated by mechanical means, and also
+     in diagnosing recursive algorithms that do not scale well.  Most
+     users never need to change this option from its default.
+
+     This option does _not_ have the ability to break endless rescanning
+     loops, since these do not necessarily consume much memory or stack
+     space.  Through clever usage of rescanning loops, one can request
+     complex, time-consuming computations from 'm4' with useful results.
+     Putting limitations in this area would break 'm4' power.  There are
+     many pathological cases: 'define(`a', `a')a' is only the simplest
+     example (but *note Compatibility::).  Expecting GNU 'm4' to detect
+     these would be a little like expecting a compiler system to detect
+     and diagnose endless loops: it is a quite _hard_ problem in
+     general, if not undecidable!
+
+'-B NUM'
+'-S NUM'
+'-T NUM'
+     These options are present for compatibility with System V 'm4', but
+     do nothing in this implementation.  They may disappear in future
+     releases, and issue a warning to that effect.
+
+'-N NUM'
+'--diversions=NUM'
+     These options are present only for compatibility with previous
+     versions of GNU 'm4', and were controlling the number of possible
+     diversions which could be used at the same time.  They do nothing,
+     because there is no fixed limit anymore.  They may disappear in
+     future releases, and issue a warning to that effect.
+
+
+File: m4.info,  Node: Frozen state,  Next: Debugging options,  Prev: Limits control,  Up: Invoking m4
+
+2.4 Command line options for frozen state
+=========================================
+
+GNU 'm4' comes with a feature of freezing internal state (*note Frozen
+files::).  This can be used to speed up 'm4' execution when reusing a
+common initialization script.
+
+'-F FILE'
+'--freeze-state=FILE'
+     Once execution is finished, write out the frozen state on the
+     specified FILE.  It is conventional, but not required, for FILE to
+     end in '.m4f'.
+
+'-R FILE'
+'--reload-state=FILE'
+     Before execution starts, recover the internal state from the
+     specified frozen FILE.  The options '-D', '-U', and '-t' take
+     effect after state is reloaded, but before the input files are
+     read.
+
+
+File: m4.info,  Node: Debugging options,  Next: Command line files,  Prev: Frozen state,  Up: Invoking m4
+
+2.5 Command line options for debugging
+======================================
+
+Finally, there are several options for aiding in debugging 'm4' scripts.
+
+'-d[FLAGS]'
+'--debug[=FLAGS]'
+     Set the debug-level according to the flags FLAGS.  The debug-level
+     controls the format and amount of information presented by the
+     debugging functions.  *Note Debug Levels::, for more details on the
+     format and meaning of FLAGS.  If omitted, FLAGS defaults to 'aeq'.
+
+'--debugfile[=FILE]'
+'-o FILE'
+'--error-output=FILE'
+     Redirect 'dumpdef' output, debug messages, and trace output to the
+     named FILE.  Warnings, error messages, and 'errprint' output are
+     still printed to standard error.  If these options are not used, or
+     if FILE is unspecified (only possible for '--debugfile'), debug
+     output goes to standard error; if FILE is the empty string, debug
+     output is discarded.  *Note Debug Output::, for more details.  The
+     option '--debugfile' may be given more than once, and order is
+     significant with respect to file names.  The spellings '-o' and
+     '--error-output' are misleading and inconsistent with other GNU
+     tools; for now they are silently accepted as synonyms of
+     '--debugfile' and only recognized once, but in a future version of
+     M4, using them will cause a warning to be issued.
+
+'-l NUM'
+'--arglength=NUM'
+     Restrict the size of the output generated by macro tracing to NUM
+     characters per trace line.  If unspecified or zero, output is
+     unlimited.  *Note Debug Levels::, for more details.
+
+'-t NAME'
+'--trace=NAME'
+     This enables tracing for the macro NAME, at any point where it is
+     defined.  NAME need not be defined when this option is given.  This
+     option may be given more than once, and order is significant with
+     respect to file names.  *Note Trace::, for more details.
+
+
+File: m4.info,  Node: Command line files,  Prev: Debugging options,  Up: Invoking m4
+
+2.6 Specifying input files on the command line
+==============================================
+
+The remaining arguments on the command line are taken to be input file
+names.  If no names are present, standard input is read.  A file name of
+'-' is taken to mean standard input.  It is conventional, but not
+required, for input files to end in '.m4'.
+
+   The input files are read in the sequence given.  Standard input can
+be read more than once, so the file name '-' may appear multiple times
+on the command line; this makes a difference when input is from a
+terminal or other special file type.  It is an error if an input file
+ends in the middle of argument collection, a comment, or a quoted
+string.
+
+   The options '--define' ('-D'), '--undefine' ('-U'), '--synclines'
+('-s'), and '--trace' ('-t') only take effect after processing input
+from any file names that occur earlier on the command line.  For
+example, assume the file 'foo' contains:
+
+     $ cat foo
+     bar
+
+   The text 'bar' can then be redefined over multiple uses of 'foo':
+
+     $ m4 -Dbar=hello foo -Dbar=world foo
+     =>hello
+     =>world
+
+   If none of the input files invoked 'm4exit' (*note M4exit::), the
+exit status of 'm4' will be 0 for success, 1 for general failure (such
+as problems with reading an input file), and 63 for version mismatch
+(*note Using frozen files::).
+
+   If you need to read a file whose name starts with a '-', you can
+specify it as './-file', or use '--' to mark the end of options.
+
+
+File: m4.info,  Node: Syntax,  Next: Macros,  Prev: Invoking m4,  Up: Top
+
+3 Lexical and syntactic conventions
+***********************************
+
+As 'm4' reads its input, it separates it into "tokens".  A token is
+either a name, a quoted string, or any single character, that is not a
+part of either a name or a string.  Input to 'm4' can also contain
+comments.  GNU 'm4' does not yet understand multibyte locales; all
+operations are byte-oriented rather than character-oriented (although if
+your locale uses a single byte encoding, such as ISO-8859-1, you will
+not notice a difference).  However, 'm4' is eight-bit clean, so you can
+use non-ASCII characters in quoted strings (*note Changequote::),
+comments (*note Changecom::), and macro names (*note Indir::), with the
+exception of the NUL character (the zero byte ''\0'').
+
+* Menu:
+
+* Names::                       Macro names
+* Quoted strings::              Quoting input to 'm4'
+* Comments::                    Comments in 'm4' input
+* Other tokens::                Other kinds of input tokens
+* Input processing::            How 'm4' copies input to output
+
+
+File: m4.info,  Node: Names,  Next: Quoted strings,  Up: Syntax
+
+3.1 Macro names
+===============
+
+A name is any sequence of letters, digits, and the character '_'
+(underscore), where the first character is not a digit.  'm4' will use
+the longest such sequence found in the input.  If a name has a macro
+definition, it will be subject to macro expansion (*note Macros::).
+Names are case-sensitive.
+
+   Examples of legal names are: 'foo', '_tmp', and 'name01'.
+
+
+File: m4.info,  Node: Quoted strings,  Next: Comments,  Prev: Names,  Up: Syntax
+
+3.2 Quoting input to 'm4'
+=========================
+
+A quoted string is a sequence of characters surrounded by quote strings,
+defaulting to '`' and ''', where the nested begin and end quotes within
+the string are balanced.  The value of a string token is the text, with
+one level of quotes stripped off.  Thus
+
+     `'
+     =>
+
+is the empty string, and double-quoting turns into single-quoting.
+
+     ``quoted''
+     =>`quoted'
+
+   The quote characters can be changed at any time, using the builtin
+macro 'changequote'.  *Note Changequote::, for more information.
+
+
+File: m4.info,  Node: Comments,  Next: Other tokens,  Prev: Quoted strings,  Up: Syntax
+
+3.3 Comments in 'm4' input
+==========================
+
+Comments in 'm4' are normally delimited by the characters '#' and
+newline.  All characters between the comment delimiters are ignored, but
+the entire comment (including the delimiters) is passed through to the
+output--comments are _not_ discarded by 'm4'.
+
+   Comments cannot be nested, so the first newline after a '#' ends the
+comment.  The commenting effect of the begin-comment string can be
+inhibited by quoting it.
+
+     $ m4
+     `quoted text' # `commented text'
+     =>quoted text # `commented text'
+     `quoting inhibits' `#' `comments'
+     =>quoting inhibits # comments
+
+   The comment delimiters can be changed to any string at any time,
+using the builtin macro 'changecom'.  *Note Changecom::, for more
+information.
+
+
+File: m4.info,  Node: Other tokens,  Next: Input processing,  Prev: Comments,  Up: Syntax
+
+3.4 Other kinds of input tokens
+===============================
+
+Any character, that is neither a part of a name, nor of a quoted string,
+nor a comment, is a token by itself.  When not in the context of macro
+expansion, all of these tokens are just copied to output.  However,
+during macro expansion, whitespace characters (space, tab, newline,
+formfeed, carriage return, vertical tab), parentheses ('(' and ')'),
+comma (','), and dollar ('$') have additional roles, explained later.
+
+
+File: m4.info,  Node: Input processing,  Prev: Other tokens,  Up: Syntax
+
+3.5 How 'm4' copies input to output
+===================================
+
+As 'm4' reads the input token by token, it will copy each token directly
+to the output immediately.
+
+   The exception is when it finds a word with a macro definition.  In
+that case 'm4' will calculate the macro's expansion, possibly reading
+more input to get the arguments.  It then inserts the expansion in front
+of the remaining input.  In other words, the resulting text from a macro
+call will be read and parsed into tokens again.
+
+   'm4' expands a macro as soon as possible.  If it finds a macro call
+when collecting the arguments to another, it will expand the second call
+first.  This process continues until there are no more macro calls to
+expand and all the input has been consumed.
+
+   For a running example, examine how 'm4' handles this input:
+
+     format(`Result is %d', eval(`2**15'))
+
+First, 'm4' sees that the token 'format' is a macro name, so it collects
+the tokens '(', '`Result is %d'', ',', and ' ', before encountering
+another potential macro.  Sure enough, 'eval' is a macro name, so the
+nested argument collection picks up '(', '`2**15'', and ')', invoking
+the eval macro with the lone argument of '2**15'.  The expansion of
+'eval(2**15)' is '32768', which is then rescanned as the five tokens
+'3', '2', '7', '6', and '8'; and combined with the next ')', the format
+macro now has all its arguments, as if the user had typed:
+
+     format(`Result is %d', 32768)
+
+The format macro expands to 'Result is 32768', and we have another round
+of scanning for the tokens 'Result', ' ', 'is', ' ', '3', '2', '7', '6',
+and '8'.  None of these are macros, so the final output is
+
+     =>Result is 32768
+
+   As a more complicated example, we will contrast an actual code
+example from the Gnulib project(1), showing both a buggy approach and
+the desired results.  The user desires to output a shell assignment
+statement that takes its argument and turns it into a shell variable by
+converting it to uppercase and prepending a prefix.  The original
+attempt looks like this:
+
+     changequote([,])dnl
+     define([gl_STRING_MODULE_INDICATOR],
+       [
+         dnl comment
+         GNULIB_]translit([$1],[a-z],[A-Z])[=1
+       ])dnl
+       gl_STRING_MODULE_INDICATOR([strcase])
+     =>  
+     =>        GNULIB_strcase=1
+     =>  
+
+   Oops - the argument did not get capitalized.  And although the manual
+is not able to easily show it, both lines that appear empty actually
+contain two trailing spaces.  By stepping through the parse, it is easy
+to see what happened.  First, 'm4' sees the token 'changequote', which
+it recognizes as a macro, followed by '(', '[', ',', ']', and ')' to
+form the argument list.  The macro expands to the empty string, but
+changes the quoting characters to something more useful for generating
+shell code (unbalanced '`' and ''' appear all the time in shell scripts,
+but unbalanced '[]' tend to be rare).  Also in the first line, 'm4' sees
+the token 'dnl', which it recognizes as a builtin macro that consumes
+the rest of the line, resulting in no output for that line.
+
+   The second line starts a macro definition.  'm4' sees the token
+'define', which it recognizes as a macro, followed by a '(',
+'[gl_STRING_MODULE_INDICATOR]', and ','.  Because an unquoted comma was
+encountered, the first argument is known to be the expansion of the
+single-quoted string token, or 'gl_STRING_MODULE_INDICATOR'.  Next, 'm4'
+sees '<NL>', ' ', and ' ', but this whitespace is discarded as part of
+argument collection.  Then comes a rather lengthy single-quoted string
+token, '[<NL>    dnl comment<NL>    GNULIB_]'.  This is followed by the
+token 'translit', which 'm4' recognizes as a macro name, so a nested
+macro expansion has started.
+
+   The arguments to the 'translit' are found by the tokens '(', '[$1]',
+',', '[a-z]', ',', '[A-Z]', and finally ')'.  All three string arguments
+are expanded (or in other words, the quotes are stripped), and since
+neither '$' nor '1' need capitalization, the result of the macro is
+'$1'.  This expansion is rescanned, resulting in the two literal
+characters '$' and '1'.
+
+   Scanning of the outer macro resumes, and picks up with '[=1<NL>  ]',
+and finally ')'.  The collected pieces of expanded text are
+concatenated, with the end result that the macro
+'gl_STRING_MODULE_INDICATOR' is now defined to be the sequence
+'<NL>    dnl comment<NL>    GNULIB_$1=1<NL>  '.  Once again, 'dnl' is
+recognized and avoids a newline in the output.
+
+   The final line is then parsed, beginning with ' ' and ' ' that are
+output literally.  Then 'gl_STRING_MODULE_INDICATOR' is recognized as a
+macro name, with an argument list of '(', '[strcase]', and ')'.  Since
+the definition of the macro contains the sequence '$1', that sequence is
+replaced with the argument 'strcase' prior to starting the rescan.  The
+rescan sees '<NL>' and four spaces, which are output literally, then
+'dnl', which discards the text ' comment<NL>'.  Next comes four more
+spaces, also output literally, and the token 'GNULIB_strcase', which
+resulted from the earlier parameter substitution.  Since that is not a
+macro name, it is output literally, followed by the literal tokens '=',
+'1', '<NL>', and two more spaces.  Finally, the original '<NL>' seen
+after the macro invocation is scanned and output literally.
+
+   Now for a corrected approach.  This rearranges the use of newlines
+and whitespace so that less whitespace is output (which, although
+harmless to shell scripts, can be visually unappealing), and fixes the
+quoting issues so that the capitalization occurs when the macro
+'gl_STRING_MODULE_INDICATOR' is invoked, rather then when it is defined.
+It also adds another layer of quoting to the first argument of
+'translit', to ensure that the output will be rescanned as a string
+rather than a potential uppercase macro name needing further expansion.
+
+     changequote([,])dnl
+     define([gl_STRING_MODULE_INDICATOR],
+       [dnl comment
+       GNULIB_[]translit([[$1]], [a-z], [A-Z])=1dnl
+     ])dnl
+       gl_STRING_MODULE_INDICATOR([strcase])
+     =>    GNULIB_STRCASE=1
+
+   The parsing of the first line is unchanged.  The second line sees the
+name of the macro to define, then sees the discarded '<NL>' and two
+spaces, as before.  But this time, the next token is '[dnl
+comment<NL>  GNULIB_[]translit([[$1]], [a-z], [A-Z])=1dnl<NL>]', which
+includes nested quotes, followed by ')' to end the macro definition and
+'dnl' to skip the newline.  No early expansion of 'translit' occurs, so
+the entire string becomes the definition of the macro.
+
+   The final line is then parsed, beginning with two spaces that are
+output literally, and an invocation of 'gl_STRING_MODULE_INDICATOR' with
+the argument 'strcase'.  Again, the '$1' in the macro definition is
+substituted prior to rescanning.  Rescanning first encounters 'dnl', and
+discards ' comment<NL>'.  Then two spaces are output literally.  Next
+comes the token 'GNULIB_', but that is not a macro, so it is output
+literally.  The token '[]' is an empty string, so it does not affect
+output.  Then the token 'translit' is encountered.
+
+   This time, the arguments to 'translit' are parsed as '(',
+'[[strcase]]', ',', ' ', '[a-z]', ',', ' ', '[A-Z]', and ')'.  The two
+spaces are discarded, and the translit results in the desired result
+'[STRCASE]'.  This is rescanned, but since it is a string, the quotes
+are stripped and the only output is a literal 'STRCASE'.  Then the
+scanner sees '=' and '1', which are output literally, followed by 'dnl'
+which discards the rest of the definition of
+'gl_STRING_MODULE_INDICATOR'.  The newline at the end of output is the
+literal '<NL>' that appeared after the invocation of the macro.
+
+   The order in which 'm4' expands the macros can be further explored
+using the trace facilities of GNU 'm4' (*note Trace::).
+
+   ---------- Footnotes ----------
+
+   (1) Derived from a patch in
+<http://lists.gnu.org/archive/html/bug-gnulib/2007-01/msg00389.html>,
+and a followup patch in
+<http://lists.gnu.org/archive/html/bug-gnulib/2007-02/msg00000.html>
+
+
+File: m4.info,  Node: Macros,  Next: Definitions,  Prev: Syntax,  Up: Top
+
+4 How to invoke macros
+**********************
+
+This chapter covers macro invocation, macro arguments and how macro
+expansion is treated.
+
+* Menu:
+
+* Invocation::                  Macro invocation
+* Inhibiting Invocation::       Preventing macro invocation
+* Macro Arguments::             Macro arguments
+* Quoting Arguments::           On Quoting Arguments to macros
+* Macro expansion::             Expanding macros
+
+
+File: m4.info,  Node: Invocation,  Next: Inhibiting Invocation,  Up: Macros
+
+4.1 Macro invocation
+====================
+
+Macro invocations has one of the forms
+
+     name
+
+which is a macro invocation without any arguments, or
+
+     name(arg1, arg2, ..., argN)
+
+which is a macro invocation with N arguments.  Macros can have any
+number of arguments.  All arguments are strings, but different macros
+might interpret the arguments in different ways.
+
+   The opening parenthesis _must_ follow the NAME directly, with no
+spaces in between.  If it does not, the macro is called with no
+arguments at all.
+
+   For a macro call to have no arguments, the parentheses _must_ be left
+out.  The macro call
+
+     name()
+
+is a macro call with one argument, which is the empty string, not a call
+with no arguments.
+
+
+File: m4.info,  Node: Inhibiting Invocation,  Next: Macro Arguments,  Prev: Invocation,  Up: Macros
+
+4.2 Preventing macro invocation
+===============================
+
+An innovation of the 'm4' language, compared to some of its predecessors
+(like Strachey's 'GPM', for example), is the ability to recognize macro
+calls without resorting to any special, prefixed invocation character.
+While generally useful, this feature might sometimes be the source of
+spurious, unwanted macro calls.  So, GNU 'm4' offers several mechanisms
+or techniques for inhibiting the recognition of names as macro calls.
+
+   First of all, many builtin macros cannot meaningfully be called
+without arguments.  As a GNU extension, for any of these macros,
+whenever an opening parenthesis does not immediately follow their name,
+the builtin macro call is not triggered.  This solves the most usual
+cases, like for 'include' or 'eval'.  Later in this document, the
+sentence "This macro is recognized only with parameters" refers to this
+specific provision of GNU M4, also known as a blind builtin macro.  For
+the builtins defined by POSIX that bear this disclaimer, POSIX
+specifically states that invoking those builtins without arguments is
+unspecified, because many other implementations simply invoke the
+builtin as though it were given one empty argument instead.
+
+     $ m4
+     eval
+     =>eval
+     eval(`1')
+     =>1
+
+   There is also a command line option ('--prefix-builtins', or '-P',
+*note Invoking m4: Operation modes.) that renames all builtin macros
+with a prefix of 'm4_' at startup.  The option has no effect whatsoever
+on user defined macros.  For example, with this option, one has to write
+'m4_dnl' and even 'm4_m4exit'.  It also has no effect on whether a macro
+requires parameters.
+
+     $ m4 -P
+     eval
+     =>eval
+     eval(`1')
+     =>eval(1)
+     m4_eval
+     =>m4_eval
+     m4_eval(`1')
+     =>1
+
+   Another alternative is to redefine problematic macros to a name less
+likely to cause conflicts, using *note Definitions::.
+
+   If your version of GNU 'm4' has the 'changeword' feature compiled in,
+it offers far more flexibility in specifying the syntax of macro names,
+both builtin or user-defined.  *Note Changeword::, for more information
+on this experimental feature.
+
+   Of course, the simplest way to prevent a name from being interpreted
+as a call to an existing macro is to quote it.  The remainder of this
+section studies a little more deeply how quoting affects macro
+invocation, and how quoting can be used to inhibit macro invocation.
+
+   Even if quoting is usually done over the whole macro name, it can
+also be done over only a few characters of this name (provided, of
+course, that the unquoted portions are not also a macro).  It is also
+possible to quote the empty string, but this works only _inside_ the
+name.  For example:
+
+     `divert'
+     =>divert
+     `d'ivert
+     =>divert
+     di`ver't
+     =>divert
+     div`'ert
+     =>divert
+
+all yield the string 'divert'.  While in both:
+
+     `'divert
+     =>
+     divert`'
+     =>
+
+the 'divert' builtin macro will be called, which expands to the empty
+string.
+
+   The output of macro evaluations is always rescanned.  In the
+following example, the input 'x`'y' yields the string 'bCD', exactly as
+if 'm4' has been given 'substr(ab`'cde, `1', `3')' as input:
+
+     define(`cde', `CDE')
+     =>
+     define(`x', `substr(ab')
+     =>
+     define(`y', `cde, `1', `3')')
+     =>
+     x`'y
+     =>bCD
+
+   Unquoted strings on either side of a quoted string are subject to
+being recognized as macro names.  In the following example, quoting the
+empty string allows for the second 'macro' to be recognized as such:
+
+     define(`macro', `m')
+     =>
+     macro(`m')macro
+     =>mmacro
+     macro(`m')`'macro
+     =>mm
+
+   Quoting may prevent recognizing as a macro name the concatenation of
+a macro expansion with the surrounding characters.  In this example:
+
+     define(`macro', `di$1')
+     =>
+     macro(`v')`ert'
+     =>divert
+     macro(`v')ert
+     =>
+
+the input will produce the string 'divert'.  When the quotes were
+removed, the 'divert' builtin was called instead.
+
+
+File: m4.info,  Node: Macro Arguments,  Next: Quoting Arguments,  Prev: Inhibiting Invocation,  Up: Macros
+
+4.3 Macro arguments
+===================
+
+When a name is seen, and it has a macro definition, it will be expanded
+as a macro.
+
+   If the name is followed by an opening parenthesis, the arguments will
+be collected before the macro is called.  If too few arguments are
+supplied, the missing arguments are taken to be the empty string.
+However, some builtins are documented to behave differently for a
+missing optional argument than for an explicit empty string.  If there
+are too many arguments, the excess arguments are ignored.  Unquoted
+leading whitespace is stripped off all arguments, but whitespace
+generated by a macro expansion or occurring after a macro that expanded
+to an empty string remains intact.  Whitespace includes space, tab,
+newline, carriage return, vertical tab, and formfeed.
+
+     define(`macro', `$1')
+     =>
+     macro( unquoted leading space lost)
+     =>unquoted leading space lost
+     macro(` quoted leading space kept')
+     => quoted leading space kept
+     macro(
+      divert `unquoted space kept after expansion')
+     => unquoted space kept after expansion
+     macro(macro(`
+     ')`whitespace from expansion kept')
+     =>
+     =>whitespace from expansion kept
+     macro(`unquoted trailing whitespace kept'
+     )
+     =>unquoted trailing whitespace kept
+     =>
+
+   Normally 'm4' will issue warnings if a builtin macro is called with
+an inappropriate number of arguments, but it can be suppressed with the
+'--quiet' command line option (or '--silent', or '-Q', *note Invoking
+m4: Operation modes.).  For user defined macros, there is no check of
+the number of arguments given.
+
+     $ m4
+     index(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `index'
+     =>0
+     index(`abc',)
+     =>0
+     index(`abc', `b', `ignored')
+     error->m4:stdin:3: Warning: excess arguments to builtin `index' ignored
+     =>1
+
+     $ m4 -Q
+     index(`abc')
+     =>0
+     index(`abc',)
+     =>0
+     index(`abc', `b', `ignored')
+     =>1
+
+   Macros are expanded normally during argument collection, and whatever
+commas, quotes and parentheses that might show up in the resulting
+expanded text will serve to define the arguments as well.  Thus, if FOO
+expands to ', b, c', the macro call
+
+     bar(a foo, d)
+
+is a macro call with four arguments, which are 'a ', 'b', 'c' and 'd'.
+To understand why the first argument contains whitespace, remember that
+unquoted leading whitespace is never part of an argument, but trailing
+whitespace always is.
+
+   It is possible for a macro's definition to change during argument
+collection, in which case the expansion uses the definition that was in
+effect at the time the opening '(' was seen.
+
+     define(`f', `1')
+     =>
+     f(define(`f', `2'))
+     =>1
+     f
+     =>2
+
+   It is an error if the end of file occurs while collecting arguments.
+
+     hello world
+     =>hello world
+     define(
+     ^D
+     error->m4:stdin:2: ERROR: end of file in argument list
+
+
+File: m4.info,  Node: Quoting Arguments,  Next: Macro expansion,  Prev: Macro Arguments,  Up: Macros
+
+4.4 On Quoting Arguments to macros
+==================================
+
+Each argument has unquoted leading whitespace removed.  Within each
+argument, all unquoted parentheses must match.  For example, if FOO is a
+macro,
+
+     foo(() (`(') `(')
+
+is a macro call, with one argument, whose value is '() (() ('.  Commas
+separate arguments, except when they occur inside quotes, comments, or
+unquoted parentheses.  *Note Pseudo Arguments::, for examples.
+
+   It is common practice to quote all arguments to macros, unless you
+are sure you want the arguments expanded.  Thus, in the above example
+with the parentheses, the 'right' way to do it is like this:
+
+     foo(`() (() (')
+
+   It is, however, in certain cases necessary (because nested expansion
+must occur to create the arguments for the outer macro) or convenient
+(because it uses fewer characters) to leave out quotes for some
+arguments, and there is nothing wrong in doing it.  It just makes life a
+bit harder, if you are not careful to follow a consistent quoting style.
+For consistency, this manual follows the rule of thumb that each layer
+of parentheses introduces another layer of single quoting, except when
+showing the consequences of quoting rules.  This is done even when the
+quoted string cannot be a macro, such as with integers when you have not
+changed the syntax via 'changeword' (*note Changeword::).
+
+   The quoting rule of thumb of one level of quoting per parentheses has
+a nice property: when a macro name appears inside parentheses, you can
+determine when it will be expanded.  If it is not quoted, it will be
+expanded prior to the outer macro, so that its expansion becomes the
+argument.  If it is single-quoted, it will be expanded after the outer
+macro.  And if it is double-quoted, it will be used as literal text
+instead of a macro name.
+
+     define(`active', `ACT, IVE')
+     =>
+     define(`show', `$1 $1')
+     =>
+     show(active)
+     =>ACT ACT
+     show(`active')
+     =>ACT, IVE ACT, IVE
+     show(``active'')
+     =>active active
+
+
+File: m4.info,  Node: Macro expansion,  Prev: Quoting Arguments,  Up: Macros
+
+4.5 Macro expansion
+===================
+
+When the arguments, if any, to a macro call have been collected, the
+macro is expanded, and the expansion text is pushed back onto the input
+(unquoted), and reread.  The expansion text from one macro call might
+therefore result in more macros being called, if the calls are included,
+completely or partially, in the first macro calls' expansion.
+
+   Taking a very simple example, if FOO expands to 'bar', and BAR
+expands to 'Hello', the input
+
+     $ m4 -Dbar=Hello -Dfoo=bar
+     foo
+     =>Hello
+
+will expand first to 'bar', and when this is reread and expanded, into
+'Hello'.
+
+
+File: m4.info,  Node: Definitions,  Next: Conditionals,  Prev: Macros,  Up: Top
+
+5 How to define new macros
+**************************
+
+Macros can be defined, redefined and deleted in several different ways.
+Also, it is possible to redefine a macro without losing a previous
+value, and bring back the original value at a later time.
+
+* Menu:
+
+* Define::                      Defining a new macro
+* Arguments::                   Arguments to macros
+* Pseudo Arguments::            Special arguments to macros
+* Undefine::                    Deleting a macro
+* Defn::                        Renaming macros
+* Pushdef::                     Temporarily redefining macros
+
+* Indir::                       Indirect call of macros
+* Builtin::                     Indirect call of builtins
+
+
+File: m4.info,  Node: Define,  Next: Arguments,  Up: Definitions
+
+5.1 Defining a macro
+====================
+
+The normal way to define or redefine macros is to use the builtin
+'define':
+
+ -- Builtin: define (NAME, [EXPANSION]
+     Defines NAME to expand to EXPANSION.  If EXPANSION is not given, it
+     is taken to be empty.
+
+     The expansion of 'define' is void.  The macro 'define' is
+     recognized only with parameters.
+
+   The following example defines the macro FOO to expand to the text
+'Hello World.'.
+
+     define(`foo', `Hello world.')
+     =>
+     foo
+     =>Hello world.
+
+   The empty line in the output is there because the newline is not a
+part of the macro definition, and it is consequently copied to the
+output.  This can be avoided by use of the macro 'dnl'.  *Note Dnl::,
+for details.
+
+   The first argument to 'define' should be quoted; otherwise, if the
+macro is already defined, you will be defining a different macro.  This
+example shows the problems with underquoting, since we did not want to
+redefine 'one':
+
+     define(foo, one)
+     =>
+     define(foo, two)
+     =>
+     one
+     =>two
+
+   GNU 'm4' normally replaces only the _topmost_ definition of a macro
+if it has several definitions from 'pushdef' (*note Pushdef::).  Some
+other implementations of 'm4' replace all definitions of a macro with
+'define'.  *Note Incompatibilities::, for more details.
+
+   As a GNU extension, the first argument to 'define' does not have to
+be a simple word.  It can be any text string, even the empty string.  A
+macro with a non-standard name cannot be invoked in the normal way, as
+the name is not recognized.  It can only be referenced by the builtins
+'indir' (*note Indir::) and 'defn' (*note Defn::).
+
+   Arrays and associative arrays can be simulated by using non-standard
+macro names.
+
+ -- Composite: array (INDEX)
+ -- Composite: array_set (INDEX, [VALUE]
+     Provide access to entries within an array.  'array' reads the entry
+     at location INDEX, and 'array_set' assigns VALUE to location INDEX.
+
+     define(`array', `defn(format(``array[%d]'', `$1'))')
+     =>
+     define(`array_set', `define(format(``array[%d]'', `$1'), `$2')')
+     =>
+     array_set(`4', `array element no. 4')
+     =>
+     array_set(`17', `array element no. 17')
+     =>
+     array(`4')
+     =>array element no. 4
+     array(eval(`10 + 7'))
+     =>array element no. 17
+
+   Change the '%d' to '%s' and it is an associative array.
+
+
+File: m4.info,  Node: Arguments,  Next: Pseudo Arguments,  Prev: Define,  Up: Definitions
+
+5.2 Arguments to macros
+=======================
+
+Macros can have arguments.  The Nth argument is denoted by '$n' in the
+expansion text, and is replaced by the Nth actual argument, when the
+macro is expanded.  Replacement of arguments happens before rescanning,
+regardless of how many nesting levels of quoting appear in the
+expansion.  Here is an example of a macro with two arguments.
+
+ -- Composite: exch (ARG1, ARG2)
+     Expands to ARG2 followed by ARG1, effectively exchanging their
+     order.
+
+     define(`exch', `$2, $1')
+     =>
+     exch(`arg1', `arg2')
+     =>arg2, arg1
+
+   This can be used, for example, if you like the arguments to 'define'
+to be reversed.
+
+     define(`exch', `$2, $1')
+     =>
+     define(exch(``expansion text'', ``macro''))
+     =>
+     macro
+     =>expansion text
+
+   *Note Quoting Arguments::, for an explanation of the double quotes.
+(You should try and improve this example so that clients of 'exch' do
+not have to double quote; or *note Answers: Improved exch.).
+
+   As a special case, the zeroth argument, '$0', is always the name of
+the macro being expanded.
+
+     define(`test', ``Macro name: $0'')
+     =>
+     test
+     =>Macro name: test
+
+   If you want quoted text to appear as part of the expansion text,
+remember that quotes can be nested in quoted strings.  Thus, in
+
+     define(`foo', `This is macro `foo'.')
+     =>
+     foo
+     =>This is macro foo.
+
+The 'foo' in the expansion text is _not_ expanded, since it is a quoted
+string, and not a name.
+
+   GNU 'm4' allows the number following the '$' to consist of one or
+more digits, allowing macros to have any number of arguments.  The
+extension of accepting multiple digits is incompatible with POSIX, and
+is different than traditional implementations of 'm4', which only
+recognize one digit.  Therefore, future versions of GNU M4 will phase
+out this feature.  To portably access beyond the ninth argument, you can
+use the 'argn' macro documented later (*note Shift::).
+
+   POSIX also states that '$' followed immediately by '{' in a macro
+definition is implementation-defined.  This version of M4 passes the
+literal characters '${' through unchanged, but M4 2.0 will implement an
+optional feature similar to 'sh', where '${11}' expands to the eleventh
+argument, to replace the current recognition of '$11'.  Meanwhile, if
+you want to guarantee that you will get a literal '${' in output when
+expanding a macro, even when you upgrade to M4 2.0, you can use nested
+quoting to your advantage:
+
+     define(`foo', `single quoted $`'{1} output')
+     =>
+     define(`bar', ``double quoted $'`{2} output'')
+     =>
+     foo(`a', `b')
+     =>single quoted ${1} output
+     bar(`a', `b')
+     =>double quoted ${2} output
+
+   To help you detect places in your M4 input files that might change in
+behavior due to the changed behavior of M4 2.0, you can use the
+'--warn-macro-sequence' command-line option (*note Invoking m4:
+Operation modes.) with the default regular expression.  This will add a
+warning any time a macro definition includes '$' followed by multiple
+digits, or by '{'.  The warning is not enabled by default, because it
+triggers a number of warnings in Autoconf 2.61 (and Autoconf uses '-E'
+to treat warnings as errors), and because it will still be possible to
+restore older behavior in M4 2.0.
+
+     $ m4 --warn-macro-sequence
+     define(`foo', `$001 ${1} $1')
+     error->m4:stdin:1: Warning: definition of `foo' contains sequence `$001'
+     error->m4:stdin:1: Warning: definition of `foo' contains sequence `${1}'
+     =>
+     foo(`bar')
+     =>bar ${1} bar
+
+
+File: m4.info,  Node: Pseudo Arguments,  Next: Undefine,  Prev: Arguments,  Up: Definitions
+
+5.3 Special arguments to macros
+===============================
+
+There is a special notation for the number of actual arguments supplied,
+and for all the actual arguments.
+
+   The number of actual arguments in a macro call is denoted by '$#' in
+the expansion text.
+
+ -- Composite: nargs (...)
+     Expands to a count of the number of arguments supplied.
+
+     define(`nargs', `$#')
+     =>
+     nargs
+     =>0
+     nargs()
+     =>1
+     nargs(`arg1', `arg2', `arg3')
+     =>3
+     nargs(`commas can be quoted, like this')
+     =>1
+     nargs(arg1#inside comments, commas do not separate arguments
+     still arg1)
+     =>1
+     nargs((unquoted parentheses, like this, group arguments))
+     =>1
+
+   Remember that '#' defaults to the comment character; if you forget
+quotes to inhibit the comment behavior, your macro definition may not
+end where you expected.
+
+     dnl Attempt to define a macro to just `$#'
+     define(underquoted, $#)
+     oops)
+     =>
+     underquoted
+     =>0)
+     =>oops
+
+   The notation '$*' can be used in the expansion text to denote all the
+actual arguments, unquoted, with commas in between.  For example
+
+     define(`echo', `$*')
+     =>
+     echo(arg1,    arg2, arg3 , arg4)
+     =>arg1,arg2,arg3 ,arg4
+
+   Often each argument should be quoted, and the notation '$@' handles
+that.  It is just like '$*', except that it quotes each argument.  A
+simple example of that is:
+
+     define(`echo', `$@')
+     =>
+     echo(arg1,    arg2, arg3 , arg4)
+     =>arg1,arg2,arg3 ,arg4
+
+   Where did the quotes go?  Of course, they were eaten, when the
+expanded text were reread by 'm4'.  To show the difference, try
+
+     define(`echo1', `$*')
+     =>
+     define(`echo2', `$@')
+     =>
+     define(`foo', `This is macro `foo'.')
+     =>
+     echo1(foo)
+     =>This is macro This is macro foo..
+     echo1(`foo')
+     =>This is macro foo.
+     echo2(foo)
+     =>This is macro foo.
+     echo2(`foo')
+     =>foo
+
+*Note Trace::, if you do not understand this.  As another example of the
+difference, remember that comments encountered in arguments are passed
+untouched to the macro, and that quoting disables comments.
+
+     define(`echo1', `$*')
+     =>
+     define(`echo2', `$@')
+     =>
+     define(`foo', `bar')
+     =>
+     echo1(#foo'foo
+     foo)
+     =>#foo'foo
+     =>bar
+     echo2(#foo'foo
+     foo)
+     =>#foobar
+     =>bar'
+
+   A '$' sign in the expansion text, that is not followed by anything
+'m4' understands, is simply copied to the macro expansion, as any other
+text is.
+
+     define(`foo', `$$$ hello $$$')
+     =>
+     foo
+     =>$$$ hello $$$
+
+   If you want a macro to expand to something like '$12', the judicious
+use of nested quoting can put a safe character between the '$' and the
+next character, relying on the rescanning to remove the nested quote.
+This will prevent 'm4' from interpreting the '$' sign as a reference to
+an argument.
+
+     define(`foo', `no nested quote: $1')
+     =>
+     foo(`arg')
+     =>no nested quote: arg
+     define(`foo', `nested quote around $: `$'1')
+     =>
+     foo(`arg')
+     =>nested quote around $: $1
+     define(`foo', `nested empty quote after $: $`'1')
+     =>
+     foo(`arg')
+     =>nested empty quote after $: $1
+     define(`foo', `nested quote around next character: $`1'')
+     =>
+     foo(`arg')
+     =>nested quote around next character: $1
+     define(`foo', `nested quote around both: `$1'')
+     =>
+     foo(`arg')
+     =>nested quote around both: arg
+
+
+File: m4.info,  Node: Undefine,  Next: Defn,  Prev: Pseudo Arguments,  Up: Definitions
+
+5.4 Deleting a macro
+====================
+
+A macro definition can be removed with 'undefine':
+
+ -- Builtin: undefine (NAME...)
+     For each argument, remove the macro NAME.  The macro names must
+     necessarily be quoted, since they will be expanded otherwise.
+
+     The expansion of 'undefine' is void.  The macro 'undefine' is
+     recognized only with parameters.
+
+     foo bar blah
+     =>foo bar blah
+     define(`foo', `some')define(`bar', `other')define(`blah', `text')
+     =>
+     foo bar blah
+     =>some other text
+     undefine(`foo')
+     =>
+     foo bar blah
+     =>foo other text
+     undefine(`bar', `blah')
+     =>
+     foo bar blah
+     =>foo bar blah
+
+   Undefining a macro inside that macro's expansion is safe; the macro
+still expands to the definition that was in effect at the '('.
+
+     define(`f', ``$0':$1')
+     =>
+     f(f(f(undefine(`f')`hello world')))
+     =>f:f:f:hello world
+     f(`bye')
+     =>f(bye)
+
+   It is not an error for NAME to have no macro definition.  In that
+case, 'undefine' does nothing.
+
+
+File: m4.info,  Node: Defn,  Next: Pushdef,  Prev: Undefine,  Up: Definitions
+
+5.5 Renaming macros
+===================
+
+It is possible to rename an already defined macro.  To do this, you need
+the builtin 'defn':
+
+ -- Builtin: defn (NAME...)
+     Expands to the _quoted definition_ of each NAME.  If an argument is
+     not a defined macro, the expansion for that argument is empty.
+
+     If NAME is a user-defined macro, the quoted definition is simply
+     the quoted expansion text.  If, instead, there is only one NAME and
+     it is a builtin, the expansion is a special token, which points to
+     the builtin's internal definition.  This token is only meaningful
+     as the second argument to 'define' (and 'pushdef'), and is silently
+     converted to an empty string in most other contexts.  Combining a
+     builtin with anything else is not supported; a warning is issued
+     and the builtin is omitted from the final expansion.
+
+     The macro 'defn' is recognized only with parameters.
+
+   Its normal use is best understood through an example, which shows how
+to rename 'undefine' to 'zap':
+
+     define(`zap', defn(`undefine'))
+     =>
+     zap(`undefine')
+     =>
+     undefine(`zap')
+     =>undefine(zap)
+
+   In this way, 'defn' can be used to copy macro definitions, and also
+definitions of builtin macros.  Even if the original macro is removed,
+the other name can still be used to access the definition.
+
+   The fact that macro definitions can be transferred also explains why
+you should use '$0', rather than retyping a macro's name in its
+definition:
+
+     define(`foo', `This is `$0'')
+     =>
+     define(`bar', defn(`foo'))
+     =>
+     bar
+     =>This is bar
+
+   Macros used as string variables should be referred through 'defn', to
+avoid unwanted expansion of the text:
+
+     define(`string', `The macro dnl is very useful
+     ')
+     =>
+     string
+     =>The macro 
+     defn(`string')
+     =>The macro dnl is very useful
+     =>
+
+   However, it is important to remember that 'm4' rescanning is purely
+textual.  If an unbalanced end-quote string occurs in a macro
+definition, the rescan will see that embedded quote as the termination
+of the quoted string, and the remainder of the macro's definition will
+be rescanned unquoted.  Thus it is a good idea to avoid unbalanced
+end-quotes in macro definitions or arguments to macros.
+
+     define(`foo', a'a)
+     =>
+     define(`a', `A')
+     =>
+     define(`echo', `$@')
+     =>
+     foo
+     =>A'A
+     defn(`foo')
+     =>aA'
+     echo(foo)
+     =>AA'
+
+   On the other hand, it is possible to exploit the fact that 'defn' can
+concatenate multiple macros prior to the rescanning phase, in order to
+join the definitions of macros that, in isolation, have unbalanced
+quotes.  This is particularly useful when one has used several macros to
+accumulate text that M4 should rescan as a whole.  In the example below,
+note how the use of 'defn' on 'l' in isolation opens a string, which is
+not closed until the next line; but used on 'l' and 'r' together results
+in nested quoting.
+
+     define(`l', `<[>')define(`r', `<]>')
+     =>
+     changequote(`[', `]')
+     =>
+     defn([l])defn([r])
+     ])
+     =><[>]defn([r])
+     =>)
+     defn([l], [r])
+     =><[>][<]>
+
+   Using 'defn' to generate special tokens for builtin macros outside of
+expected contexts can sometimes trigger warnings.  But most of the time,
+such tokens are silently converted to the empty string.
+
+     $ m4 -d
+     defn(`defn')
+     =>
+     define(defn(`divnum'), `cannot redefine a builtin token')
+     error->m4:stdin:2: Warning: define: invalid macro name ignored
+     =>
+     divnum
+     =>0
+     len(defn(`divnum'))
+     =>0
+
+   Also note that 'defn' with multiple arguments can only join text
+macros, not builtins, although a future version of GNU M4 may lift this
+restriction.
+
+     $ m4 -d
+     define(`a', `A')define(`AA', `b')
+     =>
+     traceon(`defn', `define')
+     =>
+     defn(`a', `divnum', `a')
+     error->m4:stdin:3: Warning: cannot concatenate builtin `divnum'
+     error->m4trace: -1- defn(`a', `divnum', `a') -> ``A'`A''
+     =>AA
+     define(`mydivnum', defn(`divnum', `divnum'))mydivnum
+     error->m4:stdin:4: Warning: cannot concatenate builtin `divnum'
+     error->m4:stdin:4: Warning: cannot concatenate builtin `divnum'
+     error->m4trace: -2- defn(`divnum', `divnum')
+     error->m4trace: -1- define(`mydivnum', `')
+     =>
+     traceoff(`defn', `define')
+     =>
+
+
+File: m4.info,  Node: Pushdef,  Next: Indir,  Prev: Defn,  Up: Definitions
+
+5.6 Temporarily redefining macros
+=================================
+
+It is possible to redefine a macro temporarily, reverting to the
+previous definition at a later time.  This is done with the builtins
+'pushdef' and 'popdef':
+
+ -- Builtin: pushdef (NAME, [EXPANSION]
+ -- Builtin: popdef (NAME...)
+     Analogous to 'define' and 'undefine'.
+
+     These macros work in a stack-like fashion.  A macro is temporarily
+     redefined with 'pushdef', which replaces an existing definition of
+     NAME, while saving the previous definition, before the new one is
+     installed.  If there is no previous definition, 'pushdef' behaves
+     exactly like 'define'.
+
+     If a macro has several definitions (of which only one is
+     accessible), the topmost definition can be removed with 'popdef'.
+     If there is no previous definition, 'popdef' behaves like
+     'undefine'.
+
+     The expansion of both 'pushdef' and 'popdef' is void.  The macros
+     'pushdef' and 'popdef' are recognized only with parameters.
+
+     define(`foo', `Expansion one.')
+     =>
+     foo
+     =>Expansion one.
+     pushdef(`foo', `Expansion two.')
+     =>
+     foo
+     =>Expansion two.
+     pushdef(`foo', `Expansion three.')
+     =>
+     pushdef(`foo', `Expansion four.')
+     =>
+     popdef(`foo')
+     =>
+     foo
+     =>Expansion three.
+     popdef(`foo', `foo')
+     =>
+     foo
+     =>Expansion one.
+     popdef(`foo')
+     =>
+     foo
+     =>foo
+
+   If a macro with several definitions is redefined with 'define', the
+topmost definition is _replaced_ with the new definition.  If it is
+removed with 'undefine', _all_ the definitions are removed, and not only
+the topmost one.  However, POSIX allows other implementations that treat
+'define' as replacing an entire stack of definitions with a single new
+definition, so to be portable to other implementations, it may be worth
+explicitly using 'popdef' and 'pushdef' rather than relying on the GNU
+behavior of 'define'.
+
+     define(`foo', `Expansion one.')
+     =>
+     foo
+     =>Expansion one.
+     pushdef(`foo', `Expansion two.')
+     =>
+     foo
+     =>Expansion two.
+     define(`foo', `Second expansion two.')
+     =>
+     foo
+     =>Second expansion two.
+     undefine(`foo')
+     =>
+     foo
+     =>foo
+
+   Local variables within macros are made with 'pushdef' and 'popdef'.
+At the start of the macro a new definition is pushed, within the macro
+it is manipulated and at the end it is popped, revealing the former
+definition.
+
+   It is possible to temporarily redefine a builtin with 'pushdef' and
+'defn'.
+
+
+File: m4.info,  Node: Indir,  Next: Builtin,  Prev: Pushdef,  Up: Definitions
+
+5.7 Indirect call of macros
+===========================
+
+Any macro can be called indirectly with 'indir':
+
+ -- Builtin: indir (NAME, [ARGS...]
+     Results in a call to the macro NAME, which is passed the rest of
+     the arguments ARGS.  If NAME is not defined, an error message is
+     printed, and the expansion is void.
+
+     The macro 'indir' is recognized only with parameters.
+
+   This can be used to call macros with computed or "invalid" names
+('define' allows such names to be defined):
+
+     define(`$$internal$macro', `Internal macro (name `$0')')
+     =>
+     $$internal$macro
+     =>$$internal$macro
+     indir(`$$internal$macro')
+     =>Internal macro (name $$internal$macro)
+
+   The point is, here, that larger macro packages can have private
+macros defined, that will not be called by accident.  They can _only_ be
+called through the builtin 'indir'.
+
+   One other point to observe is that argument collection occurs before
+'indir' invokes NAME, so if argument collection changes the value of
+NAME, that will be reflected in the final expansion.  This is different
+than the behavior when invoking macros directly, where the definition
+that was in effect before argument collection is used.
+
+     $ m4 -d
+     define(`f', `1')
+     =>
+     f(define(`f', `2'))
+     =>1
+     indir(`f', define(`f', `3'))
+     =>3
+     indir(`f', undefine(`f'))
+     error->m4:stdin:4: undefined macro `f'
+     =>
+
+   When handed the result of 'defn' (*note Defn::) as one of its
+arguments, 'indir' defers to the invoked NAME for whether a token
+representing a builtin is recognized or flattened to the empty string.
+
+     $ m4 -d
+     indir(defn(`defn'), `divnum')
+     error->m4:stdin:1: Warning: indir: invalid macro name ignored
+     =>
+     indir(`define', defn(`defn'), `divnum')
+     error->m4:stdin:2: Warning: define: invalid macro name ignored
+     =>
+     indir(`define', `foo', defn(`divnum'))
+     =>
+     foo
+     =>0
+     indir(`divert', defn(`foo'))
+     error->m4:stdin:5: empty string treated as 0 in builtin `divert'
+     =>
+
+
+File: m4.info,  Node: Builtin,  Prev: Indir,  Up: Definitions
+
+5.8 Indirect call of builtins
+=============================
+
+Builtin macros can be called indirectly with 'builtin':
+
+ -- Builtin: builtin (NAME, [ARGS...]
+     Results in a call to the builtin NAME, which is passed the rest of
+     the arguments ARGS.  If NAME does not name a builtin, an error
+     message is printed, and the expansion is void.
+
+     The macro 'builtin' is recognized only with parameters.
+
+   This can be used even if NAME has been given another definition that
+has covered the original, or been undefined so that no macro maps to the
+builtin.
+
+     pushdef(`define', `hidden')
+     =>
+     undefine(`undefine')
+     =>
+     define(`foo', `bar')
+     =>hidden
+     foo
+     =>foo
+     builtin(`define', `foo', defn(`divnum'))
+     =>
+     foo
+     =>0
+     builtin(`define', `foo', `BAR')
+     =>
+     foo
+     =>BAR
+     undefine(`foo')
+     =>undefine(foo)
+     foo
+     =>BAR
+     builtin(`undefine', `foo')
+     =>
+     foo
+     =>foo
+
+   The NAME argument only matches the original name of the builtin, even
+when the '--prefix-builtins' option (or '-P', *note Invoking m4:
+Operation modes.) is in effect.  This is different from 'indir', which
+only tracks current macro names.
+
+     $ m4 -P
+     m4_builtin(`divnum')
+     =>0
+     m4_builtin(`m4_divnum')
+     error->m4:stdin:2: undefined builtin `m4_divnum'
+     =>
+     m4_indir(`divnum')
+     error->m4:stdin:3: undefined macro `divnum'
+     =>
+     m4_indir(`m4_divnum')
+     =>0
+
+   Note that 'indir' and 'builtin' can be used to invoke builtins
+without arguments, even when they normally require parameters to be
+recognized; but it will provoke a warning, and result in a void
+expansion.
+
+     builtin
+     =>builtin
+     builtin()
+     error->m4:stdin:2: undefined builtin `'
+     =>
+     builtin(`builtin')
+     error->m4:stdin:3: Warning: too few arguments to builtin `builtin'
+     =>
+     builtin(`builtin',)
+     error->m4:stdin:4: undefined builtin `'
+     =>
+     builtin(`builtin', ``'
+     ')
+     error->m4:stdin:5: undefined builtin ``'
+     error->'
+     =>
+     indir(`index')
+     error->m4:stdin:7: Warning: too few arguments to builtin `index'
+     =>
+
+
+File: m4.info,  Node: Conditionals,  Next: Debugging,  Prev: Definitions,  Up: Top
+
+6 Conditionals, loops, and recursion
+************************************
+
+Macros, expanding to plain text, perhaps with arguments, are not quite
+enough.  We would like to have macros expand to different things, based
+on decisions taken at run-time.  For that, we need some kind of
+conditionals.  Also, we would like to have some kind of loop construct,
+so we could do something a number of times, or while some condition is
+true.
+
+* Menu:
+
+* Ifdef::                       Testing if a macro is defined
+* Ifelse::                      If-else construct, or multibranch
+* Shift::                       Recursion in 'm4'
+* Forloop::                     Iteration by counting
+* Foreach::                     Iteration by list contents
+* Stacks::                      Working with definition stacks
+* Composition::                 Building macros with macros
+
+
+File: m4.info,  Node: Ifdef,  Next: Ifelse,  Up: Conditionals
+
+6.1 Testing if a macro is defined
+=================================
+
+There are two different builtin conditionals in 'm4'.  The first is
+'ifdef':
+
+ -- Builtin: ifdef (NAME, STRING-1, [STRING-2]
+     If NAME is defined as a macro, 'ifdef' expands to STRING-1,
+     otherwise to STRING-2.  If STRING-2 is omitted, it is taken to be
+     the empty string (according to the normal rules).
+
+     The macro 'ifdef' is recognized only with parameters.
+
+     ifdef(`foo', ``foo' is defined', ``foo' is not defined')
+     =>foo is not defined
+     define(`foo', `')
+     =>
+     ifdef(`foo', ``foo' is defined', ``foo' is not defined')
+     =>foo is defined
+     ifdef(`no_such_macro', `yes', `no', `extra argument')
+     error->m4:stdin:4: Warning: excess arguments to builtin `ifdef' ignored
+     =>no
+
+
+File: m4.info,  Node: Ifelse,  Next: Shift,  Prev: Ifdef,  Up: Conditionals
+
+6.2 If-else construct, or multibranch
+=====================================
+
+The other conditional, 'ifelse', is much more powerful.  It can be used
+as a way to introduce a long comment, as an if-else construct, or as a
+multibranch, depending on the number of arguments supplied:
+
+ -- Builtin: ifelse (COMMENT)
+ -- Builtin: ifelse (STRING-1, STRING-2, EQUAL, [NOT-EQUAL]
+ -- Builtin: ifelse (STRING-1, STRING-2, EQUAL-1, STRING-3, STRING-4,
+          EQUAL-2, ..., [NOT-EQUAL]
+     Used with only one argument, the 'ifelse' simply discards it and
+     produces no output.
+
+     If called with three or four arguments, 'ifelse' expands into
+     EQUAL, if STRING-1 and STRING-2 are equal (character for
+     character), otherwise it expands to NOT-EQUAL.  A final fifth
+     argument is ignored, after triggering a warning.
+
+     If called with six or more arguments, and STRING-1 and STRING-2 are
+     equal, 'ifelse' expands into EQUAL-1, otherwise the first three
+     arguments are discarded and the processing starts again.
+
+     The macro 'ifelse' is recognized only with parameters.
+
+   Using only one argument is a common 'm4' idiom for introducing a
+block comment, as an alternative to repeatedly using 'dnl'.  This
+special usage is recognized by GNU 'm4', so that in this case, the
+warning about missing arguments is never triggered.
+
+     ifelse(`some comments')
+     =>
+     ifelse(`foo', `bar')
+     error->m4:stdin:2: Warning: too few arguments to builtin `ifelse'
+     =>
+
+   Using three or four arguments provides decision points.
+
+     ifelse(`foo', `bar', `true')
+     =>
+     ifelse(`foo', `foo', `true')
+     =>true
+     define(`foo', `bar')
+     =>
+     ifelse(foo, `bar', `true', `false')
+     =>true
+     ifelse(foo, `foo', `true', `false')
+     =>false
+
+   Notice how the first argument was used unquoted; it is common to
+compare the expansion of a macro with a string.  With this macro, you
+can now reproduce the behavior of blind builtins, where the macro is
+recognized only with arguments.
+
+     define(`foo', `ifelse(`$#', `0', ``$0'', `arguments:$#')')
+     =>
+     foo
+     =>foo
+     foo()
+     =>arguments:1
+     foo(`a', `b', `c')
+     =>arguments:3
+
+   For an example of a way to make defining blind macros easier, see
+*note Composition::.
+
+   The macro 'ifelse' can take more than four arguments.  If given more
+than four arguments, 'ifelse' works like a 'case' or 'switch' statement
+in traditional programming languages.  If STRING-1 and STRING-2 are
+equal, 'ifelse' expands into EQUAL-1, otherwise the procedure is
+repeated with the first three arguments discarded.  This calls for an
+example:
+
+     ifelse(`foo', `bar', `third', `gnu', `gnats')
+     error->m4:stdin:1: Warning: excess arguments to builtin `ifelse' ignored
+     =>gnu
+     ifelse(`foo', `bar', `third', `gnu', `gnats', `sixth')
+     =>
+     ifelse(`foo', `bar', `third', `gnu', `gnats', `sixth', `seventh')
+     =>seventh
+     ifelse(`foo', `bar', `3', `gnu', `gnats', `6', `7', `8')
+     error->m4:stdin:4: Warning: excess arguments to builtin `ifelse' ignored
+     =>7
+
+   Naturally, the normal case will be slightly more advanced than these
+examples.  A common use of 'ifelse' is in macros implementing loops of
+various kinds.
+
+
+File: m4.info,  Node: Shift,  Next: Forloop,  Prev: Ifelse,  Up: Conditionals
+
+6.3 Recursion in 'm4'
+=====================
+
+There is no direct support for loops in 'm4', but macros can be
+recursive.  There is no limit on the number of recursion levels, other
+than those enforced by your hardware and operating system.
+
+   Loops can be programmed using recursion and the conditionals
+described previously.
+
+   There is a builtin macro, 'shift', which can, among other things, be
+used for iterating through the actual arguments to a macro:
+
+ -- Builtin: shift (ARG1, ...)
+     Takes any number of arguments, and expands to all its arguments
+     except ARG1, separated by commas, with each argument quoted.
+
+     The macro 'shift' is recognized only with parameters.
+
+     shift
+     =>shift
+     shift(`bar')
+     =>
+     shift(`foo', `bar', `baz')
+     =>bar,baz
+
+   An example of the use of 'shift' is this macro:
+
+ -- Composite: reverse (...)
+     Takes any number of arguments, and reverses their order.
+
+   It is implemented as:
+
+     define(`reverse', `ifelse(`$#', `0', , `$#', `1', ``$1'',
+                               `reverse(shift($@)), `$1'')')
+     =>
+     reverse
+     =>
+     reverse(`foo')
+     =>foo
+     reverse(`foo', `bar', `gnats', `and gnus')
+     =>and gnus, gnats, bar, foo
+
+   While not a very interesting macro, it does show how simple loops can
+be made with 'shift', 'ifelse' and recursion.  It also shows that
+'shift' is usually used with '$@'.  Another example of this is an
+implementation of a short-circuiting conditional operator.
+
+ -- Composite: cond (TEST-1, STRING-1, EQUAL-1, [TEST-2]
+     Similar to 'ifelse', where an equal comparison between the first
+     two strings results in the third, otherwise the first three
+     arguments are discarded and the process repeats.  The difference is
+     that each TEST-<N> is expanded only when it is encountered.  This
+     means that every third argument to 'cond' is normally given one
+     more level of quoting than the corresponding argument to 'ifelse'.
+
+   Here is the implementation of 'cond', along with a demonstration of
+how it can short-circuit the side effects in 'side'.  Notice how all the
+unquoted side effects happen regardless of how many comparisons are made
+with 'ifelse', compared with only the relevant effects with 'cond'.
+
+     define(`cond',
+     `ifelse(`$#', `1', `$1',
+             `ifelse($1, `$2', `$3',
+                     `$0(shift(shift(shift($@))))')')')dnl
+     define(`side', `define(`counter', incr(counter))$1')dnl
+     define(`example1',
+     `define(`counter', `0')dnl
+     ifelse(side(`$1'), `yes', `one comparison: ',
+            side(`$1'), `no', `two comparisons: ',
+            side(`$1'), `maybe', `three comparisons: ',
+            `side(`default answer: ')')counter')dnl
+     define(`example2',
+     `define(`counter', `0')dnl
+     cond(`side(`$1')', `yes', `one comparison: ',
+          `side(`$1')', `no', `two comparisons: ',
+          `side(`$1')', `maybe', `three comparisons: ',
+          `side(`default answer: ')')counter')dnl
+     example1(`yes')
+     =>one comparison: 3
+     example1(`no')
+     =>two comparisons: 3
+     example1(`maybe')
+     =>three comparisons: 3
+     example1(`feeling rather indecisive today')
+     =>default answer: 4
+     example2(`yes')
+     =>one comparison: 1
+     example2(`no')
+     =>two comparisons: 2
+     example2(`maybe')
+     =>three comparisons: 3
+     example2(`feeling rather indecisive today')
+     =>default answer: 4
+
+   Another common task that requires iteration is joining a list of
+arguments into a single string.
+
+ -- Composite: join ([SEPARATOR]
+ -- Composite: joinall ([SEPARATOR]
+     Generate a single-quoted string, consisting of each ARG separated
+     by SEPARATOR.  While 'joinall' always outputs a SEPARATOR between
+     arguments, 'join' avoids the SEPARATOR for an empty ARG.
+
+   Here are some examples of its usage, based on the implementation
+'m4-1.4.17/examples/join.m4' distributed in this package:
+
+     $ m4 -I examples
+     include(`join.m4')
+     =>
+     join,join(`-'),join(`-', `'),join(`-', `', `')
+     =>,,,
+     joinall,joinall(`-'),joinall(`-', `'),joinall(`-', `', `')
+     =>,,,-
+     join(`-', `1')
+     =>1
+     join(`-', `1', `2', `3')
+     =>1-2-3
+     join(`', `1', `2', `3')
+     =>123
+     join(`-', `', `1', `', `', `2', `')
+     =>1-2
+     joinall(`-', `', `1', `', `', `2', `')
+     =>-1---2-
+     join(`,', `1', `2', `3')
+     =>1,2,3
+     define(`nargs', `$#')dnl
+     nargs(join(`,', `1', `2', `3'))
+     =>1
+
+   Examining the implementation shows some interesting points about
+several m4 programming idioms.
+
+     $ m4 -I examples
+     undivert(`join.m4')dnl
+     =>divert(`-1')
+     =># join(sep, args) - join each non-empty ARG into a single
+     =># string, with each element separated by SEP
+     =>define(`join',
+     =>`ifelse(`$#', `2', ``$2'',
+     =>  `ifelse(`$2', `', `', ``$2'_')$0(`$1', shift(shift($@)))')')
+     =>define(`_join',
+     =>`ifelse(`$#$2', `2', `',
+     =>  `ifelse(`$2', `', `', ``$1$2'')$0(`$1', shift(shift($@)))')')
+     =># joinall(sep, args) - join each ARG, including empty ones,
+     =># into a single string, with each element separated by SEP
+     =>define(`joinall', ``$2'_$0(`$1', shift($@))')
+     =>define(`_joinall',
+     =>`ifelse(`$#', `2', `', ``$1$3'$0(`$1', shift(shift($@)))')')
+     =>divert`'dnl
+
+   First, notice that this implementation creates helper macros '_join'
+and '_joinall'.  This division of labor makes it easier to output the
+correct number of SEPARATOR instances: 'join' and 'joinall' are
+responsible for the first argument, without a separator, while '_join'
+and '_joinall' are responsible for all remaining arguments, always
+outputting a separator when outputting an argument.
+
+   Next, observe how 'join' decides to iterate to itself, because the
+first ARG was empty, or to output the argument and swap over to '_join'.
+If the argument is non-empty, then the nested 'ifelse' results in an
+unquoted '_', which is concatenated with the '$0' to form the next macro
+name to invoke.  The 'joinall' implementation is simpler since it does
+not have to suppress empty ARG; it always executes once then defers to
+'_joinall'.
+
+   Another important idiom is the idea that SEPARATOR is reused for each
+iteration.  Each iteration has one less argument, but rather than
+discarding '$1' by iterating with '$0(shift($@))', the macro discards
+'$2' by using '$0(`$1', shift(shift($@)))'.
+
+   Next, notice that it is possible to compare more than one condition
+in a single 'ifelse' test.  The test of '$#$2' against '2' allows
+'_join' to iterate for two separate reasons--either there are still more
+than two arguments, or there are exactly two arguments but the last
+argument is not empty.
+
+   Finally, notice that these macros require exactly two arguments to
+terminate recursion, but that they still correctly result in empty
+output when given no ARGS (i.e., zero or one macro argument).  On the
+first pass when there are too few arguments, the 'shift' results in no
+output, but leaves an empty string to serve as the required second
+argument for the second pass.  Put another way, '`$1', shift($@)' is not
+the same as '$@', since only the former guarantees at least two
+arguments.
+
+   Sometimes, a recursive algorithm requires adding quotes to each
+element, or treating multiple arguments as a single element:
+
+ -- Composite: quote (...)
+ -- Composite: dquote (...)
+ -- Composite: dquote_elt (...)
+     Takes any number of arguments, and adds quoting.  With 'quote',
+     only one level of quoting is added, effectively removing whitespace
+     after commas and turning multiple arguments into a single string.
+     With 'dquote', two levels of quoting are added, one around each
+     element, and one around the list.  And with 'dquote_elt', two
+     levels of quoting are added around each element.
+
+   An actual implementation of these three macros is distributed as
+'m4-1.4.17/examples/quote.m4' in this package.  First, let's examine
+their usage:
+
+     $ m4 -I examples
+     include(`quote.m4')
+     =>
+     -quote-dquote-dquote_elt-
+     =>----
+     -quote()-dquote()-dquote_elt()-
+     =>--`'-`'-
+     -quote(`1')-dquote(`1')-dquote_elt(`1')-
+     =>-1-`1'-`1'-
+     -quote(`1', `2')-dquote(`1', `2')-dquote_elt(`1', `2')-
+     =>-1,2-`1',`2'-`1',`2'-
+     define(`n', `$#')dnl
+     -n(quote(`1', `2'))-n(dquote(`1', `2'))-n(dquote_elt(`1', `2'))-
+     =>-1-1-2-
+     dquote(dquote_elt(`1', `2'))
+     =>``1'',``2''
+     dquote_elt(dquote(`1', `2'))
+     =>``1',`2''
+
+   The last two lines show that when given two arguments, 'dquote'
+results in one string, while 'dquote_elt' results in two.  Now, examine
+the implementation.  Note that 'quote' and 'dquote_elt' make decisions
+based on their number of arguments, so that when called without
+arguments, they result in nothing instead of a quoted empty string; this
+is so that it is possible to distinguish between no arguments and an
+empty first argument.  'dquote', on the other hand, results in a string
+no matter what, since it is still possible to tell whether it was
+invoked without arguments based on the resulting string.
+
+     $ m4 -I examples
+     undivert(`quote.m4')dnl
+     =>divert(`-1')
+     =># quote(args) - convert args to single-quoted string
+     =>define(`quote', `ifelse(`$#', `0', `', ``$*'')')
+     =># dquote(args) - convert args to quoted list of quoted strings
+     =>define(`dquote', ``$@'')
+     =># dquote_elt(args) - convert args to list of double-quoted strings
+     =>define(`dquote_elt', `ifelse(`$#', `0', `', `$#', `1', ```$1''',
+     =>                             ```$1'',$0(shift($@))')')
+     =>divert`'dnl
+
+   It is worth pointing out that 'quote(ARGS)' is more efficient than
+'joinall(`,', ARGS)' for producing the same output.
+
+   One more useful macro based on 'shift' allows portably selecting an
+arbitrary argument (usually greater than the ninth argument), without
+relying on the GNU extension of multi-digit arguments (*note
+Arguments::).
+
+ -- Composite: argn (N, ...)
+     Expands to argument N out of the remaining arguments.  N must be a
+     positive number.  Usually invoked as 'argn(`N',$@)'.
+
+   It is implemented as:
+
+     define(`argn', `ifelse(`$1', 1, ``$2'',
+       `argn(decr(`$1'), shift(shift($@)))')')
+     =>
+     argn(`1', `a')
+     =>a
+     define(`foo', `argn(`11', $@)')
+     =>
+     foo(`a', `b', `c', `d', `e', `f', `g', `h', `i', `j', `k', `l')
+     =>k
+
+
+File: m4.info,  Node: Forloop,  Next: Foreach,  Prev: Shift,  Up: Conditionals
+
+6.4 Iteration by counting
+=========================
+
+Here is an example of a loop macro that implements a simple for loop.
+
+ -- Composite: forloop (ITERATOR, START, END, TEXT)
+     Takes the name in ITERATOR, which must be a valid macro name, and
+     successively assign it each integer value from START to END,
+     inclusive.  For each assignment to ITERATOR, append TEXT to the
+     expansion of the 'forloop'.  TEXT may refer to ITERATOR.  Any
+     definition of ITERATOR prior to this invocation is restored.
+
+   It can, for example, be used for simple counting:
+
+     $ m4 -I examples
+     include(`forloop.m4')
+     =>
+     forloop(`i', `1', `8', `i ')
+     =>1 2 3 4 5 6 7 8 
+
+   For-loops can be nested, like:
+
+     $ m4 -I examples
+     include(`forloop.m4')
+     =>
+     forloop(`i', `1', `4', `forloop(`j', `1', `8', ` (i, j)')
+     ')
+     => (1, 1) (1, 2) (1, 3) (1, 4) (1, 5) (1, 6) (1, 7) (1, 8)
+     => (2, 1) (2, 2) (2, 3) (2, 4) (2, 5) (2, 6) (2, 7) (2, 8)
+     => (3, 1) (3, 2) (3, 3) (3, 4) (3, 5) (3, 6) (3, 7) (3, 8)
+     => (4, 1) (4, 2) (4, 3) (4, 4) (4, 5) (4, 6) (4, 7) (4, 8)
+     =>
+
+   The implementation of the 'forloop' macro is fairly straightforward.
+The 'forloop' macro itself is simply a wrapper, which saves the previous
+definition of the first argument, calls the internal macro '_forloop',
+and re-establishes the saved definition of the first argument.
+
+   The macro '_forloop' expands the fourth argument once, and tests to
+see if the iterator has reached the final value.  If it has not
+finished, it increments the iterator (using the predefined macro 'incr',
+*note Incr::), and recurses.
+
+   Here is an actual implementation of 'forloop', distributed as
+'m4-1.4.17/examples/forloop.m4' in this package:
+
+     $ m4 -I examples
+     undivert(`forloop.m4')dnl
+     =>divert(`-1')
+     =># forloop(var, from, to, stmt) - simple version
+     =>define(`forloop', `pushdef(`$1', `$2')_forloop($@)popdef(`$1')')
+     =>define(`_forloop',
+     =>       `$4`'ifelse($1, `$3', `', `define(`$1', incr($1))$0($@)')')
+     =>divert`'dnl
+
+   Notice the careful use of quotes.  Certain macro arguments are left
+unquoted, each for its own reason.  Try to find out _why_ these
+arguments are left unquoted, and see what happens if they are quoted.
+(As presented, these two macros are useful but not very robust for
+general use.  They lack even basic error handling for cases like START
+less than END, END not numeric, or ITERATOR not being a macro name.  See
+if you can improve these macros; or *note Answers: Improved forloop.).
+
+
+File: m4.info,  Node: Foreach,  Next: Stacks,  Prev: Forloop,  Up: Conditionals
+
+6.5 Iteration by list contents
+==============================
+
+Here is an example of a loop macro that implements list iteration.
+
+ -- Composite: foreach (ITERATOR, PAREN-LIST, TEXT)
+ -- Composite: foreachq (ITERATOR, QUOTE-LIST, TEXT)
+     Takes the name in ITERATOR, which must be a valid macro name, and
+     successively assign it each value from PAREN-LIST or QUOTE-LIST.
+     In 'foreach', PAREN-LIST is a comma-separated list of elements
+     contained in parentheses.  In 'foreachq', QUOTE-LIST is a
+     comma-separated list of elements contained in a quoted string.  For
+     each assignment to ITERATOR, append TEXT to the overall expansion.
+     TEXT may refer to ITERATOR.  Any definition of ITERATOR prior to
+     this invocation is restored.
+
+   As an example, this displays each word in a list inside of a
+sentence, using an implementation of 'foreach' distributed as
+'m4-1.4.17/examples/foreach.m4', and 'foreachq' in
+'m4-1.4.17/examples/foreachq.m4'.
+
+     $ m4 -I examples
+     include(`foreach.m4')
+     =>
+     foreach(`x', (foo, bar, foobar), `Word was: x
+     ')dnl
+     =>Word was: foo
+     =>Word was: bar
+     =>Word was: foobar
+     include(`foreachq.m4')
+     =>
+     foreachq(`x', `foo, bar, foobar', `Word was: x
+     ')dnl
+     =>Word was: foo
+     =>Word was: bar
+     =>Word was: foobar
+
+   It is possible to be more complex; each element of the PAREN-LIST or
+QUOTE-LIST can itself be a list, to pass as further arguments to a
+helper macro.  This example generates a shell case statement:
+
+     $ m4 -I examples
+     include(`foreach.m4')
+     =>
+     define(`_case', `  $1)
+         $2=" $1";;
+     ')dnl
+     define(`_cat', `$1$2')dnl
+     case $`'1 in
+     =>case $1 in
+     foreach(`x', `(`(`a', `vara')', `(`b', `varb')', `(`c', `varc')')',
+             `_cat(`_case', x)')dnl
+     =>  a)
+     =>    vara=" a";;
+     =>  b)
+     =>    varb=" b";;
+     =>  c)
+     =>    varc=" c";;
+     esac
+     =>esac
+
+   The implementation of the 'foreach' macro is a bit more involved; it
+is a wrapper around two helper macros.  First, '_arg1' is needed to grab
+the first element of a list.  Second, '_foreach' implements the
+recursion, successively walking through the original list.  Here is a
+simple implementation of 'foreach':
+
+     $ m4 -I examples
+     undivert(`foreach.m4')dnl
+     =>divert(`-1')
+     =># foreach(x, (item_1, item_2, ..., item_n), stmt)
+     =>#   parenthesized list, simple version
+     =>define(`foreach', `pushdef(`$1')_foreach($@)popdef(`$1')')
+     =>define(`_arg1', `$1')
+     =>define(`_foreach', `ifelse(`$2', `()', `',
+     =>  `define(`$1', _arg1$2)$3`'$0(`$1', (shift$2), `$3')')')
+     =>divert`'dnl
+
+   Unfortunately, that implementation is not robust to macro names as
+list elements.  Each iteration of '_foreach' is stripping another layer
+of quotes, leading to erratic results if list elements are not already
+fully expanded.  The first cut at implementing 'foreachq' takes this
+into account.  Also, when using quoted elements in a PAREN-LIST, the
+overall list must be quoted.  A QUOTE-LIST has the nice property of
+requiring fewer characters to create a list containing the same quoted
+elements.  To see the difference between the two macros, we attempt to
+pass double-quoted macro names in a list, expecting the macro name on
+output after one layer of quotes is removed during list iteration and
+the final layer removed during the final rescan:
+
+     $ m4 -I examples
+     define(`a', `1')define(`b', `2')define(`c', `3')
+     =>
+     include(`foreach.m4')
+     =>
+     include(`foreachq.m4')
+     =>
+     foreach(`x', `(``a'', ``(b'', ``c)'')', `x
+     ')
+     =>1
+     =>(2)1
+     =>
+     =>, x
+     =>)
+     foreachq(`x', ```a'', ``(b'', ``c)''', `x
+     ')dnl
+     =>a
+     =>(b
+     =>c)
+
+   Obviously, 'foreachq' did a better job; here is its implementation:
+
+     $ m4 -I examples
+     undivert(`foreachq.m4')dnl
+     =>include(`quote.m4')dnl
+     =>divert(`-1')
+     =># foreachq(x, `item_1, item_2, ..., item_n', stmt)
+     =>#   quoted list, simple version
+     =>define(`foreachq', `pushdef(`$1')_foreachq($@)popdef(`$1')')
+     =>define(`_arg1', `$1')
+     =>define(`_foreachq', `ifelse(quote($2), `', `',
+     =>  `define(`$1', `_arg1($2)')$3`'$0(`$1', `shift($2)', `$3')')')
+     =>divert`'dnl
+
+   Notice that '_foreachq' had to use the helper macro 'quote' defined
+earlier (*note Shift::), to ensure that the embedded 'ifelse' call does
+not go haywire if a list element contains a comma.  Unfortunately, this
+implementation of 'foreachq' has its own severe flaw.  Whereas the
+'foreach' implementation was linear, this macro is quadratic in the
+number of list elements, and is much more likely to trip up the limit
+set by the command line option '--nesting-limit' (or '-L', *note
+Invoking m4: Limits control.).  Additionally, this implementation does
+not expand 'defn(`ITERATOR')' very well, when compared with 'foreach'.
+
+     $ m4 -I examples
+     include(`foreach.m4')include(`foreachq.m4')
+     =>
+     foreach(`name', `(`a', `b')', ` defn(`name')')
+     => a b
+     foreachq(`name', ``a', `b'', ` defn(`name')')
+     => _arg1(`a', `b') _arg1(shift(`a', `b'))
+
+   It is possible to have robust iteration with linear behavior and sane
+ITERATOR contents for either list style.  See if you can learn from the
+best elements of both of these implementations to create robust macros
+(or *note Answers: Improved foreach.).
+
+
+File: m4.info,  Node: Stacks,  Next: Composition,  Prev: Foreach,  Up: Conditionals
+
+6.6 Working with definition stacks
+==================================
+
+Thanks to 'pushdef', manipulation of a stack is an intrinsic operation
+in 'm4'.  Normally, only the topmost definition in a stack is important,
+but sometimes, it is desirable to manipulate the entire definition
+stack.
+
+ -- Composite: stack_foreach (MACRO, ACTION)
+ -- Composite: stack_foreach_lifo (MACRO, ACTION)
+     For each of the 'pushdef' definitions associated with MACRO, invoke
+     the macro ACTION with a single argument of that definition.
+     'stack_foreach' visits the oldest definition first, while
+     'stack_foreach_lifo' visits the current definition first.  ACTION
+     should not modify or dereference MACRO.  There are a few special
+     macros, such as 'defn', which cannot be used as the MACRO
+     parameter.
+
+   A sample implementation of these macros is distributed in the file
+'m4-1.4.17/examples/stack.m4'.
+
+     $ m4 -I examples
+     include(`stack.m4')
+     =>
+     pushdef(`a', `1')pushdef(`a', `2')pushdef(`a', `3')
+     =>
+     define(`show', ``$1'
+     ')
+     =>
+     stack_foreach(`a', `show')dnl
+     =>1
+     =>2
+     =>3
+     stack_foreach_lifo(`a', `show')dnl
+     =>3
+     =>2
+     =>1
+
+   Now for the implementation.  Note the definition of a helper macro,
+'_stack_reverse', which destructively swaps the contents of one stack of
+definitions into the reverse order in the temporary macro 'tmp-$1'.  By
+calling the helper twice, the original order is restored back into the
+macro '$1'; since the operation is destructive, this explains why '$1'
+must not be modified or dereferenced during the traversal.  The caller
+can then inject additional code to pass the definition currently being
+visited to '$2'.  The choice of helper names is intentional; since '-'
+is not valid as part of a macro name, there is no risk of conflict with
+a valid macro name, and the code is guaranteed to use 'defn' where
+necessary.  Finally, note that any macro used in the traversal of a
+'pushdef' stack, such as 'pushdef' or 'defn', cannot be handled by
+'stack_foreach', since the macro would temporarily be undefined during
+the algorithm.
+
+     $ m4 -I examples
+     undivert(`stack.m4')dnl
+     =>divert(`-1')
+     =># stack_foreach(macro, action)
+     =># Invoke ACTION with a single argument of each definition
+     =># from the definition stack of MACRO, starting with the oldest.
+     =>define(`stack_foreach',
+     =>`_stack_reverse(`$1', `tmp-$1')'dnl
+     =>`_stack_reverse(`tmp-$1', `$1', `$2(defn(`$1'))')')
+     =># stack_foreach_lifo(macro, action)
+     =># Invoke ACTION with a single argument of each definition
+     =># from the definition stack of MACRO, starting with the newest.
+     =>define(`stack_foreach_lifo',
+     =>`_stack_reverse(`$1', `tmp-$1', `$2(defn(`$1'))')'dnl
+     =>`_stack_reverse(`tmp-$1', `$1')')
+     =>define(`_stack_reverse',
+     =>`ifdef(`$1', `pushdef(`$2', defn(`$1'))$3`'popdef(`$1')$0($@)')')
+     =>divert`'dnl
+
+
+File: m4.info,  Node: Composition,  Prev: Stacks,  Up: Conditionals
+
+6.7 Building macros with macros
+===============================
+
+Since m4 is a macro language, it is possible to write macros that can
+build other macros.  First on the list is a way to automate the creation
+of blind macros.
+
+ -- Composite: define_blind (NAME, [VALUE]
+     Defines NAME as a blind macro, such that NAME will expand to VALUE
+     only when given explicit arguments.  VALUE should not be the result
+     of 'defn' (*note Defn::).  This macro is only recognized with
+     parameters, and results in an empty string.
+
+   Defining a macro to define another macro can be a bit tricky.  We
+want to use a literal '$#' in the argument to the nested 'define'.
+However, if '$' and '#' are adjacent in the definition of
+'define_blind', then it would be expanded as the number of arguments to
+'define_blind' rather than the intended number of arguments to NAME.
+The solution is to pass the difficult characters through extra arguments
+to a helper macro '_define_blind'.  When composing macros, it is a
+common idiom to need a helper macro to concatenate text that forms
+parameters in the composed macro, rather than interpreting the text as a
+parameter of the composing macro.
+
+   As for the limitation against using 'defn', there are two reasons.
+If a macro was previously defined with 'define_blind', then it can
+safely be renamed to a new blind macro using plain 'define'; using
+'define_blind' to rename it just adds another layer of 'ifelse',
+occupying memory and slowing down execution.  And if a macro is a
+builtin, then it would result in an attempt to define a macro consisting
+of both text and a builtin token; this is not supported, and the builtin
+token is flattened to an empty string.
+
+   With that explanation, here's the definition, and some sample usage.
+Notice that 'define_blind' is itself a blind macro.
+
+     $ m4 -d
+     define(`define_blind', `ifelse(`$#', `0', ``$0'',
+     `_$0(`$1', `$2', `$'`#', `$'`0')')')
+     =>
+     define(`_define_blind', `define(`$1',
+     `ifelse(`$3', `0', ``$4'', `$2')')')
+     =>
+     define_blind
+     =>define_blind
+     define_blind(`foo', `arguments were $*')
+     =>
+     foo
+     =>foo
+     foo(`bar')
+     =>arguments were bar
+     define(`blah', defn(`foo'))
+     =>
+     blah
+     =>blah
+     blah(`a', `b')
+     =>arguments were a,b
+     defn(`blah')
+     =>ifelse(`$#', `0', ``$0'', `arguments were $*')
+
+   Another interesting composition tactic is argument "currying", or
+factoring a macro that takes multiple arguments for use in a context
+that provides exactly one argument.
+
+ -- Composite: curry (MACRO, ...)
+     Expand to a macro call that takes exactly one argument, then
+     appends that argument to the original arguments and invokes MACRO
+     with the resulting list of arguments.
+
+   A demonstration of currying makes the intent of this macro a little
+more obvious.  The macro 'stack_foreach' mentioned earlier is an example
+of a context that provides exactly one argument to a macro name.  But
+coupled with currying, we can invoke 'reverse' with two arguments for
+each definition of a macro stack.  This example uses the file
+'m4-1.4.17/examples/curry.m4' included in the distribution.
+
+     $ m4 -I examples
+     include(`curry.m4')include(`stack.m4')
+     =>
+     define(`reverse', `ifelse(`$#', `0', , `$#', `1', ``$1'',
+                               `reverse(shift($@)), `$1'')')
+     =>
+     pushdef(`a', `1')pushdef(`a', `2')pushdef(`a', `3')
+     =>
+     stack_foreach(`a', `:curry(`reverse', `4')')
+     =>:1, 4:2, 4:3, 4
+     curry(`curry', `reverse', `1')(`2')(`3')
+     =>3, 2, 1
+
+   Now for the implementation.  Notice how 'curry' leaves off with a
+macro name but no open parenthesis, while still in the middle of
+collecting arguments for '$1'.  The macro '_curry' is the helper macro
+that takes one argument, then adds it to the list and finally supplies
+the closing parenthesis.  The use of a comma inside the 'shift' call
+allows currying to also work for a macro that takes one argument,
+although it often makes more sense to invoke that macro directly rather
+than going through 'curry'.
+
+     $ m4 -I examples
+     undivert(`curry.m4')dnl
+     =>divert(`-1')
+     =># curry(macro, args)
+     =># Expand to a macro call that takes one argument, then invoke
+     =># macro(args, extra).
+     =>define(`curry', `$1(shift($@,)_$0')
+     =>define(`_curry', ``$1')')
+     =>divert`'dnl
+
+   Unfortunately, with M4 1.4.x, 'curry' is unable to handle builtin
+tokens, which are silently flattened to the empty string when passed
+through another text macro.  This limitation will be lifted in a future
+release of M4.
+
+   Putting the last few concepts together, it is possible to copy or
+rename an entire stack of macro definitions.
+
+ -- Composite: copy (SOURCE, DEST)
+ -- Composite: rename (SOURCE, DEST)
+     Ensure that DEST is undefined, then define it to the same stack of
+     definitions currently in SOURCE.  'copy' leaves SOURCE unchanged,
+     while 'rename' undefines SOURCE.  There are only a few macros, such
+     as 'copy' or 'defn', which cannot be copied via this macro.
+
+   The implementation is relatively straightforward (although since it
+uses 'curry', it is unable to copy builtin macros, such as the second
+definition of 'a' as a synonym for 'divnum'.  See if you can design a
+version that works around this limitation, or *note Answers: Improved
+copy.).
+
+     $ m4 -I examples
+     include(`curry.m4')include(`stack.m4')
+     =>
+     define(`rename', `copy($@)undefine(`$1')')dnl
+     define(`copy', `ifdef(`$2', `errprint(`$2 already defined
+     ')m4exit(`1')',
+        `stack_foreach(`$1', `curry(`pushdef', `$2')')')')dnl
+     pushdef(`a', `1')pushdef(`a', defn(`divnum'))pushdef(`a', `2')
+     =>
+     copy(`a', `b')
+     =>
+     rename(`b', `c')
+     =>
+     a b c
+     =>2 b 2
+     popdef(`a', `c')c a
+     => 0
+     popdef(`a', `c')a c
+     =>1 1
+
+
+File: m4.info,  Node: Debugging,  Next: Input Control,  Prev: Conditionals,  Up: Top
+
+7 How to debug macros and input
+*******************************
+
+When writing macros for 'm4', they often do not work as intended on the
+first try (as is the case with most programming languages).
+Fortunately, there is support for macro debugging in 'm4'.
+
+* Menu:
+
+* Dumpdef::                     Displaying macro definitions
+* Trace::                       Tracing macro calls
+* Debug Levels::                Controlling debugging output
+* Debug Output::                Saving debugging output
+
+
+File: m4.info,  Node: Dumpdef,  Next: Trace,  Up: Debugging
+
+7.1 Displaying macro definitions
+================================
+
+If you want to see what a name expands into, you can use the builtin
+'dumpdef':
+
+ -- Builtin: dumpdef ([NAMES...]
+     Accepts any number of arguments.  If called without any arguments,
+     it displays the definitions of all known names, otherwise it
+     displays the definitions of the NAMES given.  The output is printed
+     to the current debug file (usually standard error), and is sorted
+     by name.  If an unknown name is encountered, a warning is printed.
+
+     The expansion of 'dumpdef' is void.
+
+     $ m4 -d
+     define(`foo', `Hello world.')
+     =>
+     dumpdef(`foo')
+     error->foo: =>
+     dumpdef(`define')
+     error->define: =>
+
+   The last example shows how builtin macros definitions are displayed.
+The definition that is dumped corresponds to what would occur if the
+macro were to be called at that point, even if other definitions are
+still live due to redefining a macro during argument collection.
+
+     $ m4 -d
+     pushdef(`f', ``$0'1')pushdef(`f', ``$0'2')
+     =>
+     f(popdef(`f')dumpdef(`f'))
+     error->f: =>f2
+     f(popdef(`f')dumpdef(`f'))
+     error->m4:stdin:3: undefined macro `f'
+     =>f1
+
+   *Note Debug Levels::, for information on controlling the details of
+the display.
+
+
+File: m4.info,  Node: Trace,  Next: Debug Levels,  Prev: Dumpdef,  Up: Debugging
+
+7.2 Tracing macro calls
+=======================
+
+It is possible to trace macro calls and expansions through the builtins
+'traceon' and 'traceoff':
+
+ -- Builtin: traceon ([NAMES...]
+ -- Builtin: traceoff ([NAMES...]
+     When called without any arguments, 'traceon' and 'traceoff' will
+     turn tracing on and off, respectively, for all currently defined
+     macros.
+
+     When called with arguments, only the macros listed in NAMES are
+     affected, whether or not they are currently defined.
+
+     The expansion of 'traceon' and 'traceoff' is void.
+
+   Whenever a traced macro is called and the arguments have been
+collected, the call is displayed.  If the expansion of the macro call is
+not void, the expansion can be displayed after the call.  The output is
+printed to the current debug file (defaulting to standard error, *note
+Debug Output::).
+
+     $ m4 -d
+     define(`foo', `Hello World.')
+     =>
+     define(`echo', `$@')
+     =>
+     traceon(`foo', `echo')
+     =>
+     foo
+     error->m4trace: -1- foo -> `Hello World.'
+     =>Hello World.
+     echo(`gnus', `and gnats')
+     error->m4trace: -1- echo(`gnus', `and gnats') -> ``gnus',`and gnats''
+     =>gnus,and gnats
+
+   The number between dashes is the depth of the expansion.  It is one
+most of the time, signifying an expansion at the outermost level, but it
+increases when macro arguments contain unquoted macro calls.  The
+maximum number that will appear between dashes is controlled by the
+option '--nesting-limit' (or '-L', *note Invoking m4: Limits control.).
+Additionally, the option '--trace' (or '-t') can be used to invoke
+'traceon(NAME)' before parsing input.
+
+     $ m4 -L 3 -t ifelse
+     ifelse(`one level')
+     error->m4trace: -1- ifelse
+     =>
+     ifelse(ifelse(ifelse(`three levels')))
+     error->m4trace: -3- ifelse
+     error->m4trace: -2- ifelse
+     error->m4trace: -1- ifelse
+     =>
+     ifelse(ifelse(ifelse(ifelse(`four levels'))))
+     error->m4:stdin:3: recursion limit of 3 exceeded, use -L<N> to change it
+
+   Tracing by name is an attribute that is preserved whether the macro
+is defined or not.  This allows the selection of macros to trace before
+those macros are defined.
+
+     $ m4 -d
+     traceoff(`foo')
+     =>
+     traceon(`foo')
+     =>
+     foo
+     =>foo
+     defn(`foo')
+     =>
+     define(`foo', `bar')
+     =>
+     foo
+     error->m4trace: -1- foo -> `bar'
+     =>bar
+     undefine(`foo')
+     =>
+     ifdef(`foo', `yes', `no')
+     =>no
+     indir(`foo')
+     error->m4:stdin:9: undefined macro `foo'
+     =>
+     define(`foo', `blah')
+     =>
+     foo
+     error->m4trace: -1- foo -> `blah'
+     =>blah
+     traceoff
+     =>
+     foo
+     =>blah
+
+   Tracing even works on builtins.  However, 'defn' (*note Defn::) does
+not transfer tracing status.
+
+     $ m4 -d
+     traceon(`traceon')
+     =>
+     traceon(`traceoff')
+     error->m4trace: -1- traceon(`traceoff')
+     =>
+     traceoff(`traceoff')
+     error->m4trace: -1- traceoff(`traceoff')
+     =>
+     traceoff(`traceon')
+     =>
+     traceon(`eval', `m4_divnum')
+     =>
+     define(`m4_eval', defn(`eval'))
+     =>
+     define(`m4_divnum', defn(`divnum'))
+     =>
+     eval(divnum)
+     error->m4trace: -1- eval(`0') -> `0'
+     =>0
+     m4_eval(m4_divnum)
+     error->m4trace: -2- m4_divnum -> `0'
+     =>0
+
+   *Note Debug Levels::, for information on controlling the details of
+the display.  The format of the trace output is not specified by POSIX,
+and varies between implementations of 'm4'.
+
+
+File: m4.info,  Node: Debug Levels,  Next: Debug Output,  Prev: Trace,  Up: Debugging
+
+7.3 Controlling debugging output
+================================
+
+The '-d' option to 'm4' (or '--debug', *note Invoking m4: Debugging
+options.) controls the amount of details presented in three categories
+of output.  Trace output is requested by 'traceon' (*note Trace::), and
+each line is prefixed by 'm4trace:' in relation to a macro invocation.
+Debug output tracks useful events not associated with a macro
+invocation, and each line is prefixed by 'm4debug:'.  Finally, 'dumpdef'
+(*note Dumpdef::) output is affected, with no prefix added to the output
+lines.
+
+   The FLAGS following the option can be one or more of the following:
+
+'a'
+     In trace output, show the actual arguments that were collected
+     before invoking the macro.  This applies to all macro calls if the
+     't' flag is used, otherwise only the macros covered by calls of
+     'traceon'.  Arguments are subject to length truncation specified by
+     the command line option '--arglength' (or '-l').
+
+'c'
+     In trace output, show several trace lines for each macro call.  A
+     line is shown when the macro is seen, but before the arguments are
+     collected; a second line when the arguments have been collected and
+     a third line after the call has completed.
+
+'e'
+     In trace output, show the expansion of each macro call, if it is
+     not void.  This applies to all macro calls if the 't' flag is used,
+     otherwise only the macros covered by calls of 'traceon'.  The
+     expansion is subject to length truncation specified by the command
+     line option '--arglength' (or '-l').
+
+'f'
+     In debug and trace output, include the name of the current input
+     file in the output line.
+
+'i'
+     In debug output, print a message each time the current input file
+     is changed.
+
+'l'
+     In debug and trace output, include the current input line number in
+     the output line.
+
+'p'
+     In debug output, print a message when a named file is found through
+     the path search mechanism (*note Search Path::), giving the actual
+     file name used.
+
+'q'
+     In trace and dumpdef output, quote actual arguments and macro
+     expansions in the display with the current quotes.  This is useful
+     in connection with the 'a' and 'e' flags above.
+
+'t'
+     In trace output, trace all macro calls made in this invocation of
+     'm4', regardless of the settings of 'traceon'.
+
+'x'
+     In trace output, add a unique 'macro call id' to each line of the
+     trace output.  This is useful in connection with the 'c' flag
+     above.
+
+'V'
+     A shorthand for all of the above flags.
+
+   If no flags are specified with the '-d' option, the default is 'aeq'.
+The examples throughout this manual assume the default flags.
+
+   There is a builtin macro 'debugmode', which allows on-the-fly control
+of the debugging output format:
+
+ -- Builtin: debugmode ([FLAGS]
+     The argument FLAGS should be a subset of the letters listed above.
+     As special cases, if the argument starts with a '+', the flags are
+     added to the current debug flags, and if it starts with a '-', they
+     are removed.  If no argument is present, all debugging flags are
+     cleared (as if no '-d' was given), and with an empty argument the
+     flags are reset to the default of 'aeq'.
+
+     The expansion of 'debugmode' is void.
+
+     $ m4
+     define(`foo', `FOO')
+     =>
+     traceon(`foo')
+     =>
+     debugmode()
+     =>
+     foo
+     error->m4trace: -1- foo -> `FOO'
+     =>FOO
+     debugmode
+     =>
+     foo
+     error->m4trace: -1- foo
+     =>FOO
+     debugmode(`+l')
+     =>
+     foo
+     error->m4trace:8: -1- foo
+     =>FOO
+
+   The following example demonstrates the behavior of length truncation,
+when specified on the command line.  Note that each argument and the
+final result are individually truncated.  Also, the special tokens for
+builtin functions are not truncated.
+
+     $ m4 -d -l 6
+     define(`echo', `$@')debugmode(`+t')
+     =>
+     echo(`1', `long string')
+     error->m4trace: -1- echo(`1', `long s...') -> ``1',`l...'
+     =>1,long string
+     indir(`echo', defn(`changequote'))
+     error->m4trace: -2- defn(`change...')
+     error->m4trace: -1- indir(`echo', <changequote>) -> ``''
+     =>
+
+   This example shows the effects of the debug flags that are not
+related to macro tracing.
+
+     $ m4 -dip -I examples
+     error->m4debug: input read from stdin
+     include(`foo')dnl
+     error->m4debug: path search for `foo' found `examples/foo'
+     error->m4debug: input read from examples/foo
+     =>bar
+     error->m4debug: input reverted to stdin, line 1
+     ^D
+     error->m4debug: input exhausted
+
+
+File: m4.info,  Node: Debug Output,  Prev: Debug Levels,  Up: Debugging
+
+7.4 Saving debugging output
+===========================
+
+Debug and tracing output can be redirected to files using either the
+'--debugfile' option to 'm4' (*note Invoking m4: Debugging options.), or
+with the builtin macro 'debugfile':
+
+ -- Builtin: debugfile ([FILE]
+     Sends all further debug and trace output to FILE, opened in append
+     mode.  If FILE is the empty string, debug and trace output are
+     discarded.  If 'debugfile' is called without any arguments, debug
+     and trace output are sent to standard error.  This does not affect
+     warnings, error messages, or 'errprint' output, which are always
+     sent to standard error.  If FILE cannot be opened, the current
+     debug file is unchanged, and an error is issued.
+
+     The expansion of 'debugfile' is void.
+
+     $ m4 -d
+     traceon(`divnum')
+     =>
+     divnum(`extra')
+     error->m4:stdin:2: Warning: excess arguments to builtin `divnum' ignored
+     error->m4trace: -1- divnum(`extra') -> `0'
+     =>0
+     debugfile()
+     =>
+     divnum(`extra')
+     error->m4:stdin:4: Warning: excess arguments to builtin `divnum' ignored
+     =>0
+     debugfile
+     =>
+     divnum
+     error->m4trace: -1- divnum -> `0'
+     =>0
+
+
+File: m4.info,  Node: Input Control,  Next: File Inclusion,  Prev: Debugging,  Up: Top
+
+8 Input control
+***************
+
+This chapter describes various builtin macros for controlling the input
+to 'm4'.
+
+* Menu:
+
+* Dnl::                         Deleting whitespace in input
+* Changequote::                 Changing the quote characters
+* Changecom::                   Changing the comment delimiters
+* Changeword::                  Changing the lexical structure of words
+* M4wrap::                      Saving text until end of input
+
+
+File: m4.info,  Node: Dnl,  Next: Changequote,  Up: Input Control
+
+8.1 Deleting whitespace in input
+================================
+
+The builtin 'dnl' stands for "Discard to Next Line":
+
+ -- Builtin: dnl
+     All characters, up to and including the next newline, are discarded
+     without performing any macro expansion.  A warning is issued if the
+     end of the file is encountered without a newline.
+
+     The expansion of 'dnl' is void.
+
+   It is often used in connection with 'define', to remove the newline
+that follows the call to 'define'.  Thus
+
+     define(`foo', `Macro `foo'.')dnl A very simple macro, indeed.
+     foo
+     =>Macro foo.
+
+   The input up to and including the next newline is discarded, as
+opposed to the way comments are treated (*note Comments::).
+
+   Usually, 'dnl' is immediately followed by an end of line or some
+other whitespace.  GNU 'm4' will produce a warning diagnostic if 'dnl'
+is followed by an open parenthesis.  In this case, 'dnl' will collect
+and process all arguments, looking for a matching close parenthesis.
+All predictable side effects resulting from this collection will take
+place.  'dnl' will return no output.  The input following the matching
+close parenthesis up to and including the next newline, on whatever line
+containing it, will still be discarded.
+
+     dnl(`args are ignored, but side effects occur',
+     define(`foo', `like this')) while this text is ignored: undefine(`foo')
+     error->m4:stdin:1: Warning: excess arguments to builtin `dnl' ignored
+     See how `foo' was defined, foo?
+     =>See how foo was defined, like this?
+
+   If the end of file is encountered without a newline character, a
+warning is issued and dnl stops consuming input.
+
+     m4wrap(`m4wrap(`2 hi
+     ')0 hi dnl 1 hi')
+     =>
+     define(`hi', `HI')
+     =>
+     ^D
+     error->m4:stdin:1: Warning: end of file treated as newline
+     =>0 HI 2 HI
+
+
+File: m4.info,  Node: Changequote,  Next: Changecom,  Prev: Dnl,  Up: Input Control
+
+8.2 Changing the quote characters
+=================================
+
+The default quote delimiters can be changed with the builtin
+'changequote':
+
+ -- Builtin: changequote ([START = '`']
+     This sets START as the new begin-quote delimiter and END as the new
+     end-quote delimiter.  If both arguments are missing, the default
+     quotes ('`' and ''') are used.  If START is void, then quoting is
+     disabled.  Otherwise, if END is missing or void, the default
+     end-quote delimiter (''') is used.  The quote delimiters can be of
+     any length.
+
+     The expansion of 'changequote' is void.
+
+     changequote(`[', `]')
+     =>
+     define([foo], [Macro [foo].])
+     =>
+     foo
+     =>Macro foo.
+
+   The quotation strings can safely contain eight-bit characters.  If no
+single character is appropriate, START and END can be of any length.
+Other implementations cap the delimiter length to five characters, but
+GNU has no inherent limit.
+
+     changequote(`[[[', `]]]')
+     =>
+     define([[[foo]]], [[[Macro [[[[[foo]]]]].]]])
+     =>
+     foo
+     =>Macro [[foo]].
+
+   Calling 'changequote' with START as the empty string will effectively
+disable the quoting mechanism, leaving no way to quote text.  However,
+using an empty string is not portable, as some other implementations of
+'m4' revert to the default quoting, while others preserve the prior
+non-empty delimiter.  If START is not empty, then an empty END will use
+the default end-quote delimiter of ''', as otherwise, it would be
+impossible to end a quoted string.  Again, this is not portable, as some
+other 'm4' implementations reuse START as the end-quote delimiter, while
+others preserve the previous non-empty value.  Omitting both arguments
+restores the default begin-quote and end-quote delimiters; fortunately
+this behavior is portable to all implementations of 'm4'.
+
+     define(`foo', `Macro `FOO'.')
+     =>
+     changequote(`', `')
+     =>
+     foo
+     =>Macro `FOO'.
+     `foo'
+     =>`Macro `FOO'.'
+     changequote(`,)
+     =>
+     foo
+     =>Macro FOO.
+
+   There is no way in 'm4' to quote a string containing an unmatched
+begin-quote, except using 'changequote' to change the current quotes.
+
+   If the quotes should be changed from, say, '[' to '[[', temporary
+quote characters have to be defined.  To achieve this, two calls of
+'changequote' must be made, one for the temporary quotes and one for the
+new quotes.
+
+   Macros are recognized in preference to the begin-quote string, so if
+a prefix of START can be recognized as part of a potential macro name,
+the quoting mechanism is effectively disabled.  Unless you use
+'changeword' (*note Changeword::), this means that START should not
+begin with a letter, digit, or '_' (underscore).  However, even though
+quoted strings are not recognized, the quote characters can still be
+discerned in macro expansion and in trace output.
+
+     define(`echo', `$@')
+     =>
+     define(`hi', `HI')
+     =>
+     changequote(`q', `Q')
+     =>
+     q hi Q hi
+     =>q HI Q HI
+     echo(hi)
+     =>qHIQ
+     changequote
+     =>
+     changequote(`-', `EOF')
+     =>
+     - hi EOF hi
+     => hi  HI
+     changequote
+     =>
+     changequote(`1', `2')
+     =>
+     hi1hi2
+     =>hi1hi2
+     hi 1hi2
+     =>HI hi
+
+   Quotes are recognized in preference to argument collection.  In
+particular, if START is a single '(', then argument collection is
+effectively disabled.  For portability with other implementations, it is
+a good idea to avoid '(', ',', and ')' as the first character in START.
+
+     define(`echo', `$#:$@:')
+     =>
+     define(`hi', `HI')
+     =>
+     changequote(`(',`)')
+     =>
+     echo(hi)
+     =>0::hi
+     changequote
+     =>
+     changequote(`((', `))')
+     =>
+     echo(hi)
+     =>1:HI:
+     echo((hi))
+     =>0::hi
+     changequote
+     =>
+     changequote(`,', `)')
+     =>
+     echo(hi,hi)bye)
+     =>1:HIhibye:
+
+   However, if you are not worried about portability, using '(' and ')'
+as quoting characters has an interesting property--you can use it to
+compute a quoted string containing the expansion of any quoted text, as
+long as the expansion results in both balanced quotes and balanced
+parentheses.  The trick is realizing 'expand' uses '$1' unquoted, to
+trigger its expansion using the normal quoting characters, but uses
+extra parentheses to group unquoted commas that occur in the expansion
+without consuming whitespace following those commas.  Then '_expand'
+uses 'changequote' to convert the extra parentheses back into quoting
+characters.  Note that it takes two more 'changequote' invocations to
+restore the original quotes.  Contrast the behavior on whitespace when
+using '$*', via 'quote', to attempt the same task.
+
+     changequote(`[', `]')dnl
+     define([a], [1, (b)])dnl
+     define([b], [2])dnl
+     define([quote], [[$*]])dnl
+     define([expand], [_$0(($1))])dnl
+     define([_expand],
+       [changequote([(], [)])$1changequote`'changequote(`[', `]')])dnl
+     expand([a, a, [a, a], [[a, a]]])
+     =>1, (2), 1, (2), a, a, [a, a]
+     quote(a, a, [a, a], [[a, a]])
+     =>1,(2),1,(2),a, a,[a, a]
+
+   If END is a prefix of START, the end-quote will be recognized in
+preference to a nested begin-quote.  In particular, changing the quotes
+to have the same string for START and END disables nesting of quotes.
+When quote nesting is disabled, it is impossible to double-quote strings
+across macro expansions, so using the same string is not done very
+often.
+
+     define(`hi', `HI')
+     =>
+     changequote(`""', `"')
+     =>
+     ""hi"""hi"
+     =>hihi
+     ""hi" ""hi"
+     =>hi hi
+     ""hi"" "hi"
+     =>hi" "HI"
+     changequote
+     =>
+     `hi`hi'hi'
+     =>hi`hi'hi
+     changequote(`"', `"')
+     =>
+     "hi"hi"hi"
+     =>hiHIhi
+
+   It is an error if the end of file occurs within a quoted string.
+
+     `hello world'
+     =>hello world
+     `dangling quote
+     ^D
+     error->m4:stdin:2: ERROR: end of file in string
+
+     ifelse(`dangling quote
+     ^D
+     error->m4:stdin:1: ERROR: end of file in string
+
+
+File: m4.info,  Node: Changecom,  Next: Changeword,  Prev: Changequote,  Up: Input Control
+
+8.3 Changing the comment delimiters
+===================================
+
+The default comment delimiters can be changed with the builtin macro
+'changecom':
+
+ -- Builtin: changecom ([START]
+     This sets START as the new begin-comment delimiter and END as the
+     new end-comment delimiter.  If both arguments are missing, or START
+     is void, then comments are disabled.  Otherwise, if END is missing
+     or void, the default end-comment delimiter of newline is used.  The
+     comment delimiters can be of any length.
+
+     The expansion of 'changecom' is void.
+
+     define(`comment', `COMMENT')
+     =>
+     # A normal comment
+     =># A normal comment
+     changecom(`/*', `*/')
+     =>
+     # Not a comment anymore
+     =># Not a COMMENT anymore
+     But: /* this is a comment now */ while this is not a comment
+     =>But: /* this is a comment now */ while this is not a COMMENT
+
+   Note how comments are copied to the output, much as if they were
+quoted strings.  If you want the text inside a comment expanded, quote
+the begin-comment delimiter.
+
+   Calling 'changecom' without any arguments, or with START as the empty
+string, will effectively disable the commenting mechanism.  To restore
+the original comment start of '#', you must explicitly ask for it.  If
+START is not empty, then an empty END will use the default end-comment
+delimiter of newline, as otherwise, it would be impossible to end a
+comment.  However, this is not portable, as some other 'm4'
+implementations preserve the previous non-empty delimiters instead.
+
+     define(`comment', `COMMENT')
+     =>
+     changecom
+     =>
+     # Not a comment anymore
+     =># Not a COMMENT anymore
+     changecom(`#', `')
+     =>
+     # comment again
+     =># comment again
+
+   The comment strings can safely contain eight-bit characters.  If no
+single character is appropriate, START and END can be of any length.
+Other implementations cap the delimiter length to five characters, but
+GNU has no inherent limit.
+
+   Comments are recognized in preference to macros.  However, this is
+not compatible with other implementations, where macros and even quoting
+takes precedence over comments, so it may change in a future release.
+For portability, this means that START should not begin with a letter,
+digit, or '_' (underscore), and that neither the start-quote nor the
+start-comment string should be a prefix of the other.
+
+     define(`hi', `HI')
+     =>
+     define(`hi1hi2', `hello')
+     =>
+     changecom(`q', `Q')
+     =>
+     q hi Q hi
+     =>q hi Q HI
+     changecom(`1', `2')
+     =>
+     hi1hi2
+     =>hello
+     hi 1hi2
+     =>HI 1hi2
+
+   Comments are recognized in preference to argument collection.  In
+particular, if START is a single '(', then argument collection is
+effectively disabled.  For portability with other implementations, it is
+a good idea to avoid '(', ',', and ')' as the first character in START.
+
+     define(`echo', `$#:$*:$@:')
+     =>
+     define(`hi', `HI')
+     =>
+     changecom(`(',`)')
+     =>
+     echo(hi)
+     =>0:::(hi)
+     changecom
+     =>
+     changecom(`((', `))')
+     =>
+     echo(hi)
+     =>1:HI:HI:
+     echo((hi))
+     =>0:::((hi))
+     changecom(`,', `)')
+     =>
+     echo(hi,hi)bye)
+     =>1:HI,hi)bye:HI,hi)bye:
+     changecom
+     =>
+     echo(hi,`,`'hi',hi)
+     =>3:HI,,HI,HI:HI,,`'hi,HI:
+     echo(hi,`,`'hi',hi`'changecom(`,,', `hi'))
+     =>3:HI,,`'hi,HI:HI,,`'hi,HI:
+
+   It is an error if the end of file occurs within a comment.
+
+     changecom(`/*', `*/')
+     =>
+     /*dangling comment
+     ^D
+     error->m4:stdin:2: ERROR: end of file in comment
+
+
+File: m4.info,  Node: Changeword,  Next: M4wrap,  Prev: Changecom,  Up: Input Control
+
+8.4 Changing the lexical structure of words
+===========================================
+
+     The macro 'changeword' and all associated functionality is
+     experimental.  It is only available if the '--enable-changeword'
+     option was given to 'configure', at GNU 'm4' installation time.
+     The functionality will go away in the future, to be replaced by
+     other new features that are more efficient at providing the same
+     capabilities.  _Do not rely on it_.  Please direct your comments
+     about it the same way you would do for bugs.
+
+   A file being processed by 'm4' is split into quoted strings, words
+(potential macro names) and simple tokens (any other single character).
+Initially a word is defined by the following regular expression:
+
+     [_a-zA-Z][_a-zA-Z0-9]*
+
+   Using 'changeword', you can change this regular expression:
+
+ -- Optional builtin: changeword (REGEX)
+     Changes the regular expression for recognizing macro names to be
+     REGEX.  If REGEX is empty, use '[_a-zA-Z][_a-zA-Z0-9]*'.  REGEX
+     must obey the constraint that every prefix of the desired final
+     pattern is also accepted by the regular expression.  If REGEX
+     contains grouping parentheses, the macro invoked is the portion
+     that matched the first group, rather than the entire matching
+     string.
+
+     The expansion of 'changeword' is void.  The macro 'changeword' is
+     recognized only with parameters.
+
+   Relaxing the lexical rules of 'm4' might be useful (for example) if
+you wanted to apply translations to a file of numbers:
+
+     ifdef(`changeword', `', `errprint(` skipping: no changeword support
+     ')m4exit(`77')')dnl
+     changeword(`[_a-zA-Z0-9]+')
+     =>
+     define(`1', `0')1
+     =>0
+
+   Tightening the lexical rules is less useful, because it will
+generally make some of the builtins unavailable.  You could use it to
+prevent accidental call of builtins, for example:
+
+     ifdef(`changeword', `', `errprint(` skipping: no changeword support
+     ')m4exit(`77')')dnl
+     define(`_indir', defn(`indir'))
+     =>
+     changeword(`_[_a-zA-Z0-9]*')
+     =>
+     esyscmd(`foo')
+     =>esyscmd(foo)
+     _indir(`esyscmd', `echo hi')
+     =>hi
+     =>
+
+   Because 'm4' constructs its words a character at a time, there is a
+restriction on the regular expressions that may be passed to
+'changeword'.  This is that if your regular expression accepts 'foo', it
+must also accept 'f' and 'fo'.
+
+     ifdef(`changeword', `', `errprint(` skipping: no changeword support
+     ')m4exit(`77')')dnl
+     define(`foo
+     ', `bar
+     ')
+     =>
+     dnl This example wants to recognize changeword, dnl, and `foo\n'.
+     dnl First, we check that our regexp will match.
+     regexp(`changeword', `[cd][a-z]*\|foo[
+     ]')
+     =>0
+     regexp(`foo
+     ', `[cd][a-z]*\|foo[
+     ]')
+     =>0
+     regexp(`f', `[cd][a-z]*\|foo[
+     ]')
+     =>-1
+     foo
+     =>foo
+     changeword(`[cd][a-z]*\|foo[
+     ]')
+     =>
+     dnl Even though `foo\n' matches, we forgot to allow `f'.
+     foo
+     =>foo
+     changeword(`[cd][a-z]*\|fo*[
+     ]?')
+     =>
+     dnl Now we can call `foo\n'.
+     foo
+     =>bar
+
+   'changeword' has another function.  If the regular expression
+supplied contains any grouped subexpressions, then text outside the
+first of these is discarded before symbol lookup.  So:
+
+     ifdef(`changeword', `', `errprint(` skipping: no changeword support
+     ')m4exit(`77')')dnl
+     ifdef(`__unix__', ,
+           `errprint(` skipping: syscmd does not have unix semantics
+     ')m4exit(`77')')dnl
+     changecom(`/*', `*/')dnl
+     define(`foo', `bar')dnl
+     changeword(`#\([_a-zA-Z0-9]*\)')
+     =>
+     #esyscmd(`echo foo \#foo')
+     =>foo bar
+     =>
+
+   'm4' now requires a '#' mark at the beginning of every macro
+invocation, so one can use 'm4' to preprocess plain text without losing
+various words like 'divert'.
+
+   In 'm4', macro substitution is based on text, while in TeX, it is
+based on tokens.  'changeword' can throw this difference into relief.
+For example, here is the same idea represented in TeX and 'm4'.  First,
+the TeX version:
+
+     \def\a{\message{Hello}}
+     \catcode`\@=0
+     \catcode`\\=12
+     @a
+     @bye
+     =>Hello
+
+Then, the 'm4' version:
+
+     ifdef(`changeword', `', `errprint(` skipping: no changeword support
+     ')m4exit(`77')')dnl
+     define(`a', `errprint(`Hello')')dnl
+     changeword(`@\([_a-zA-Z0-9]*\)')
+     =>
+     @a
+     =>errprint(Hello)
+
+   In the TeX example, the first line defines a macro 'a' to print the
+message 'Hello'.  The second line defines <@> to be usable instead of
+<\> as an escape character.  The third line defines <\> to be a normal
+printing character, not an escape.  The fourth line invokes the macro
+'a'.  So, when TeX is run on this file, it displays the message 'Hello'.
+
+   When the 'm4' example is passed through 'm4', it outputs
+'errprint(Hello)'.  The reason for this is that TeX does lexical
+analysis of macro definition when the macro is _defined_.  'm4' just
+stores the text, postponing the lexical analysis until the macro is
+_used_.
+
+   You should note that using 'changeword' will slow 'm4' down by a
+factor of about seven, once it is changed to something other than the
+default regular expression.  You can invoke 'changeword' with the empty
+string to restore the default word definition, and regain the parsing
+speed.
+
+
+File: m4.info,  Node: M4wrap,  Prev: Changeword,  Up: Input Control
+
+8.5 Saving text until end of input
+==================================
+
+It is possible to 'save' some text until the end of the normal input has
+been seen.  Text can be saved, to be read again by 'm4' when the normal
+input has been exhausted.  This feature is normally used to initiate
+cleanup actions before normal exit, e.g., deleting temporary files.
+
+   To save input text, use the builtin 'm4wrap':
+
+ -- Builtin: m4wrap (STRING, ...)
+     Stores STRING in a safe place, to be reread when end of input is
+     reached.  As a GNU extension, additional arguments are concatenated
+     with a space to the STRING.
+
+     The expansion of 'm4wrap' is void.  The macro 'm4wrap' is
+     recognized only with parameters.
+
+     define(`cleanup', `This is the `cleanup' action.
+     ')
+     =>
+     m4wrap(`cleanup')
+     =>
+     This is the first and last normal input line.
+     =>This is the first and last normal input line.
+     ^D
+     =>This is the cleanup action.
+
+   The saved input is only reread when the end of normal input is seen,
+and not if 'm4exit' is used to exit 'm4'.
+
+   It is safe to call 'm4wrap' from saved text, but then the order in
+which the saved text is reread is undefined.  If 'm4wrap' is not used
+recursively, the saved pieces of text are reread in the opposite order
+in which they were saved (LIFO--last in, first out).  However, this
+behavior is likely to change in a future release, to match POSIX, so you
+should not depend on this order.
+
+   It is possible to emulate POSIX behavior even with older versions of
+GNU M4 by including the file 'm4-1.4.17/examples/wrapfifo.m4' from the
+distribution:
+
+     $ m4 -I examples
+     undivert(`wrapfifo.m4')dnl
+     =>dnl Redefine m4wrap to have FIFO semantics.
+     =>define(`_m4wrap_level', `0')dnl
+     =>define(`m4wrap',
+     =>`ifdef(`m4wrap'_m4wrap_level,
+     =>       `define(`m4wrap'_m4wrap_level,
+     =>               defn(`m4wrap'_m4wrap_level)`$1')',
+     =>       `builtin(`m4wrap', `define(`_m4wrap_level',
+     =>                                  incr(_m4wrap_level))dnl
+     =>m4wrap'_m4wrap_level)dnl
+     =>define(`m4wrap'_m4wrap_level, `$1')')')dnl
+     include(`wrapfifo.m4')
+     =>
+     m4wrap(`a`'m4wrap(`c
+     ', `d')')m4wrap(`b')
+     =>
+     ^D
+     =>abc
+
+   It is likewise possible to emulate LIFO behavior without resorting to
+the GNU M4 extension of 'builtin', by including the file
+'m4-1.4.17/examples/wraplifo.m4' from the distribution.  (Unfortunately,
+both examples shown here share some subtle bugs.  See if you can find
+and correct them; or *note Answers: Improved m4wrap.).
+
+     $ m4 -I examples
+     undivert(`wraplifo.m4')dnl
+     =>dnl Redefine m4wrap to have LIFO semantics.
+     =>define(`_m4wrap_level', `0')dnl
+     =>define(`_m4wrap', defn(`m4wrap'))dnl
+     =>define(`m4wrap',
+     =>`ifdef(`m4wrap'_m4wrap_level,
+     =>       `define(`m4wrap'_m4wrap_level,
+     =>               `$1'defn(`m4wrap'_m4wrap_level))',
+     =>       `_m4wrap(`define(`_m4wrap_level', incr(_m4wrap_level))dnl
+     =>m4wrap'_m4wrap_level)dnl
+     =>define(`m4wrap'_m4wrap_level, `$1')')')dnl
+     include(`wraplifo.m4')
+     =>
+     m4wrap(`a`'m4wrap(`c
+     ', `d')')m4wrap(`b')
+     =>
+     ^D
+     =>bac
+
+   Here is an example of implementing a factorial function using
+'m4wrap':
+
+     define(`f', `ifelse(`$1', `0', `Answer: 0!=1
+     ', eval(`$1>1'), `0', `Answer: $2$1=eval(`$2$1')
+     ', `m4wrap(`f(decr(`$1'), `$2$1*')')')')
+     =>
+     f(`10')
+     =>
+     ^D
+     =>Answer: 10*9*8*7*6*5*4*3*2*1=3628800
+
+   Invocations of 'm4wrap' at the same recursion level are concatenated
+and rescanned as usual:
+
+     define(`aa', `AA
+     ')
+     =>
+     m4wrap(`a')m4wrap(`a')
+     =>
+     ^D
+     =>AA
+
+however, the transition between recursion levels behaves like an end of
+file condition between two input files.
+
+     m4wrap(`m4wrap(`)')len(abc')
+     =>
+     ^D
+     error->m4:stdin:1: ERROR: end of file in argument list
+
+
+File: m4.info,  Node: File Inclusion,  Next: Diversions,  Prev: Input Control,  Up: Top
+
+9 File inclusion
+****************
+
+'m4' allows you to include named files at any point in the input.
+
+* Menu:
+
+* Include::                     Including named files
+* Search Path::                 Searching for include files
+
+
+File: m4.info,  Node: Include,  Next: Search Path,  Up: File Inclusion
+
+9.1 Including named files
+=========================
+
+There are two builtin macros in 'm4' for including files:
+
+ -- Builtin: include (FILE)
+ -- Builtin: sinclude (FILE)
+     Both macros cause the file named FILE to be read by 'm4'.  When the
+     end of the file is reached, input is resumed from the previous
+     input file.
+
+     The expansion of 'include' and 'sinclude' is therefore the contents
+     of FILE.
+
+     If FILE does not exist, is a directory, or cannot otherwise be
+     read, the expansion is void, and 'include' will fail with an error
+     while 'sinclude' is silent.  The empty string counts as a file that
+     does not exist.
+
+     The macros 'include' and 'sinclude' are recognized only with
+     parameters.
+
+     include(`none')
+     error->m4:stdin:1: cannot open `none': No such file or directory
+     =>
+     include()
+     error->m4:stdin:2: cannot open `': No such file or directory
+     =>
+     sinclude(`none')
+     =>
+     sinclude()
+     =>
+
+   The rest of this section assumes that 'm4' is invoked with the '-I'
+option (*note Invoking m4: Preprocessor features.) pointing to the
+'m4-1.4.17/examples' directory shipped as part of the GNU 'm4' package.
+The file 'm4-1.4.17/examples/incl.m4' in the distribution contains the
+lines:
+
+     $ cat examples/incl.m4
+     =>Include file start
+     =>foo
+     =>Include file end
+
+   Normally file inclusion is used to insert the contents of a file into
+the input stream.  The contents of the file will be read by 'm4' and
+macro calls in the file will be expanded:
+
+     $ m4 -I examples
+     define(`foo', `FOO')
+     =>
+     include(`incl.m4')
+     =>Include file start
+     =>FOO
+     =>Include file end
+     =>
+
+   The fact that 'include' and 'sinclude' expand to the contents of the
+file can be used to define macros that operate on entire files.  Here is
+an example, which defines 'bar' to expand to the contents of 'incl.m4':
+
+     $ m4 -I examples
+     define(`bar', include(`incl.m4'))
+     =>
+     This is `bar':  >>bar<<
+     =>This is bar:  >>Include file start
+     =>foo
+     =>Include file end
+     =><<
+
+   This use of 'include' is not trivial, though, as files can contain
+quotes, commas, and parentheses, which can interfere with the way the
+'m4' parser works.  GNU 'm4' seamlessly concatenates the file contents
+with the next character, even if the included file ended in the middle
+of a comment, string, or macro call.  These conditions are only treated
+as end of file errors if specified as input files on the command line.
+
+   In GNU 'm4', an alternative method of reading files is using
+'undivert' (*note Undivert::) on a named file.
+
+
+File: m4.info,  Node: Search Path,  Prev: Include,  Up: File Inclusion
+
+9.2 Searching for include files
+===============================
+
+GNU 'm4' allows included files to be found in other directories than the
+current working directory.
+
+   If the '--prepend-include' or '-B' command-line option was provided
+(*note Invoking m4: Preprocessor features.), those directories are
+searched first, in reverse order that those options were listed on the
+command line.  Then 'm4' looks in the current working directory.  Next
+comes the directories specified with the '--include' or '-I' option, in
+the order found on the command line.  Finally, if the 'M4PATH'
+environment variable is set, it is expected to contain a colon-separated
+list of directories, which will be searched in order.
+
+   If the automatic search for include-files causes trouble, the 'p'
+debug flag (*note Debug Levels::) can help isolate the problem.
+
+
+File: m4.info,  Node: Diversions,  Next: Text handling,  Prev: File Inclusion,  Up: Top
+
+10 Diverting and undiverting output
+***********************************
+
+Diversions are a way of temporarily saving output.  The output of 'm4'
+can at any time be diverted to a temporary file, and be reinserted into
+the output stream, "undiverted", again at a later time.
+
+   Numbered diversions are counted from 0 upwards, diversion number 0
+being the normal output stream.  GNU 'm4' tries to keep diversions in
+memory.  However, there is a limit to the overall memory usable by all
+diversions taken together (512K, currently).  When this maximum is about
+to be exceeded, a temporary file is opened to receive the contents of
+the biggest diversion still in memory, freeing this memory for other
+diversions.  When creating the temporary file, 'm4' honors the value of
+the environment variable 'TMPDIR', and falls back to '/tmp'.  Thus, the
+amount of available disk space provides the only real limit on the
+number and aggregate size of diversions.
+
+   Diversions make it possible to generate output in a different order
+than the input was read.  It is possible to implement topological
+sorting dependencies.  For example, GNU Autoconf makes use of diversions
+under the hood to ensure that the expansion of a prerequisite macro
+appears in the output prior to the expansion of a dependent macro,
+regardless of which order the two macros were invoked in the user's
+input file.
+
+* Menu:
+
+* Divert::                      Diverting output
+* Undivert::                    Undiverting output
+* Divnum::                      Diversion numbers
+* Cleardivert::                 Discarding diverted text
+
+
+File: m4.info,  Node: Divert,  Next: Undivert,  Up: Diversions
+
+10.1 Diverting output
+=====================
+
+Output is diverted using 'divert':
+
+ -- Builtin: divert ([NUMBER = '0']
+     The current diversion is changed to NUMBER.  If NUMBER is left out
+     or empty, it is assumed to be zero.  If NUMBER cannot be parsed,
+     the diversion is unchanged.
+
+     The expansion of 'divert' is void.
+
+   When all the 'm4' input will have been processed, all existing
+diversions are automatically undiverted, in numerical order.
+
+     divert(`1')
+     This text is diverted.
+     divert
+     =>
+     This text is not diverted.
+     =>This text is not diverted.
+     ^D
+     =>
+     =>This text is diverted.
+
+   Several calls of 'divert' with the same argument do not overwrite the
+previous diverted text, but append to it.  Diversions are printed after
+any wrapped text is expanded.
+
+     define(`text', `TEXT')
+     =>
+     divert(`1')`diverted text.'
+     divert
+     =>
+     m4wrap(`Wrapped text precedes ')
+     =>
+     ^D
+     =>Wrapped TEXT precedes diverted text.
+
+   If output is diverted to a negative diversion, it is simply
+discarded.  This can be used to suppress unwanted output.  A common
+example of unwanted output is the trailing newlines after macro
+definitions.  Here is a common programming idiom in 'm4' for avoiding
+them.
+
+     divert(`-1')
+     define(`foo', `Macro `foo'.')
+     define(`bar', `Macro `bar'.')
+     divert
+     =>
+
+   Traditional implementations only supported ten diversions.  But as a
+GNU extension, diversion numbers can be as large as positive integers
+will allow, rather than treating a multi-digit diversion number as a
+request to discard text.
+
+     divert(eval(`1<<28'))world
+     divert(`2')hello
+     ^D
+     =>hello
+     =>world
+
+   Note that 'divert' is an English word, but also an active macro
+without arguments.  When processing plain text, the word might appear in
+normal text and be unintentionally swallowed as a macro invocation.  One
+way to avoid this is to use the '-P' option to rename all builtins
+(*note Invoking m4: Operation modes.).  Another is to write a wrapper
+that requires a parameter to be recognized.
+
+     We decided to divert the stream for irrigation.
+     =>We decided to  the stream for irrigation.
+     define(`divert', `ifelse(`$#', `0', ``$0'', `builtin(`$0', $@)')')
+     =>
+     divert(`-1')
+     Ignored text.
+     divert(`0')
+     =>
+     We decided to divert the stream for irrigation.
+     =>We decided to divert the stream for irrigation.
+
+
+File: m4.info,  Node: Undivert,  Next: Divnum,  Prev: Divert,  Up: Diversions
+
+10.2 Undiverting output
+=======================
+
+Diverted text can be undiverted explicitly using the builtin 'undivert':
+
+ -- Builtin: undivert ([DIVERSIONS...]
+     Undiverts the numeric DIVERSIONS given by the arguments, in the
+     order given.  If no arguments are supplied, all diversions are
+     undiverted, in numerical order.
+
+     As a GNU extension, DIVERSIONS may contain non-numeric strings,
+     which are treated as the names of files to copy into the output
+     without expansion.  A warning is issued if a file could not be
+     opened.
+
+     The expansion of 'undivert' is void.
+
+     divert(`1')
+     This text is diverted.
+     divert
+     =>
+     This text is not diverted.
+     =>This text is not diverted.
+     undivert(`1')
+     =>
+     =>This text is diverted.
+     =>
+
+   Notice the last two blank lines.  One of them comes from the newline
+following 'undivert', the other from the newline that followed the
+'divert'!  A diversion often starts with a blank line like this.
+
+   When diverted text is undiverted, it is _not_ reread by 'm4', but
+rather copied directly to the current output, and it is therefore not an
+error to undivert into a diversion.  Undiverting the empty string is the
+same as specifying diversion 0; in either case nothing happens since the
+output has already been flushed.
+
+     divert(`1')diverted text
+     divert
+     =>
+     undivert()
+     =>
+     undivert(`0')
+     =>
+     undivert
+     =>diverted text
+     =>
+     divert(`1')more
+     divert(`2')undivert(`1')diverted text`'divert
+     =>
+     undivert(`1')
+     =>
+     undivert(`2')
+     =>more
+     =>diverted text
+
+   When a diversion has been undiverted, the diverted text is discarded,
+and it is not possible to bring back diverted text more than once.
+
+     divert(`1')
+     This text is diverted first.
+     divert(`0')undivert(`1')dnl
+     =>
+     =>This text is diverted first.
+     undivert(`1')
+     =>
+     divert(`1')
+     This text is also diverted but not appended.
+     divert(`0')undivert(`1')dnl
+     =>
+     =>This text is also diverted but not appended.
+
+   Attempts to undivert the current diversion are silently ignored.
+Thus, when the current diversion is not 0, the current diversion does
+not get rearranged among the other diversions.
+
+     divert(`1')one
+     divert(`2')two
+     divert(`3')three
+     divert(`2')undivert`'dnl
+     divert`'undivert`'dnl
+     =>two
+     =>one
+     =>three
+
+   GNU 'm4' allows named files to be undiverted.  Given a non-numeric
+argument, the contents of the file named will be copied, uninterpreted,
+to the current output.  This complements the builtin 'include' (*note
+Include::).  To illustrate the difference, assume the file 'foo'
+contains:
+
+     $ cat foo
+     bar
+
+then
+
+     define(`bar', `BAR')
+     =>
+     undivert(`foo')
+     =>bar
+     =>
+     include(`foo')
+     =>BAR
+     =>
+
+   If the file is not found (or cannot be read), an error message is
+issued, and the expansion is void.  It is possible to intermix files and
+diversion numbers.
+
+     divert(`1')diversion one
+     divert(`2')undivert(`foo')dnl
+     divert(`3')diversion three
+     divert`'dnl
+     undivert(`1', `2', `foo', `3')dnl
+     =>diversion one
+     =>bar
+     =>bar
+     =>diversion three
+
+
+File: m4.info,  Node: Divnum,  Next: Cleardivert,  Prev: Undivert,  Up: Diversions
+
+10.3 Diversion numbers
+======================
+
+The current diversion is tracked by the builtin 'divnum':
+
+ -- Builtin: divnum
+     Expands to the number of the current diversion.
+
+     Initial divnum
+     =>Initial 0
+     divert(`1')
+     Diversion one: divnum
+     divert(`2')
+     Diversion two: divnum
+     ^D
+     =>
+     =>Diversion one: 1
+     =>
+     =>Diversion two: 2
+
+
+File: m4.info,  Node: Cleardivert,  Prev: Divnum,  Up: Diversions
+
+10.4 Discarding diverted text
+=============================
+
+Often it is not known, when output is diverted, whether the diverted
+text is actually needed.  Since all non-empty diversion are brought back
+on the main output stream when the end of input is seen, a method of
+discarding a diversion is needed.  If all diversions should be
+discarded, the easiest is to end the input to 'm4' with 'divert(`-1')'
+followed by an explicit 'undivert':
+
+     divert(`1')
+     Diversion one: divnum
+     divert(`2')
+     Diversion two: divnum
+     divert(`-1')
+     undivert
+     ^D
+
+No output is produced at all.
+
+   Clearing selected diversions can be done with the following macro:
+
+ -- Composite: cleardivert ([DIVERSIONS...]
+     Discard the contents of each of the listed numeric DIVERSIONS.
+
+     define(`cleardivert',
+     `pushdef(`_n', divnum)divert(`-1')undivert($@)divert(_n)popdef(`_n')')
+     =>
+
+   It is called just like 'undivert', but the effect is to clear the
+diversions, given by the arguments.  (This macro has a nasty bug!  You
+should try to see if you can find it and correct it; or *note Answers:
+Improved cleardivert.).
+
+
+File: m4.info,  Node: Text handling,  Next: Arithmetic,  Prev: Diversions,  Up: Top
+
+11 Macros for text handling
+***************************
+
+There are a number of builtins in 'm4' for manipulating text in various
+ways, extracting substrings, searching, substituting, and so on.
+
+* Menu:
+
+* Len::                         Calculating length of strings
+* Index macro::                 Searching for substrings
+* Regexp::                      Searching for regular expressions
+* Substr::                      Extracting substrings
+* Translit::                    Translating characters
+* Patsubst::                    Substituting text by regular expression
+* Format::                      Formatting strings (printf-like)
+
+
+File: m4.info,  Node: Len,  Next: Index macro,  Up: Text handling
+
+11.1 Calculating length of strings
+==================================
+
+The length of a string can be calculated by 'len':
+
+ -- Builtin: len (STRING)
+     Expands to the length of STRING, as a decimal number.
+
+     The macro 'len' is recognized only with parameters.
+
+     len()
+     =>0
+     len(`abcdef')
+     =>6
+
+
+File: m4.info,  Node: Index macro,  Next: Regexp,  Prev: Len,  Up: Text handling
+
+11.2 Searching for substrings
+=============================
+
+Searching for substrings is done with 'index':
+
+ -- Builtin: index (STRING, SUBSTRING)
+     Expands to the index of the first occurrence of SUBSTRING in
+     STRING.  The first character in STRING has index 0.  If SUBSTRING
+     does not occur in STRING, 'index' expands to '-1'.
+
+     The macro 'index' is recognized only with parameters.
+
+     index(`gnus, gnats, and armadillos', `nat')
+     =>7
+     index(`gnus, gnats, and armadillos', `dag')
+     =>-1
+
+   Omitting SUBSTRING evokes a warning, but still produces output;
+contrast this with an empty SUBSTRING.
+
+     index(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `index'
+     =>0
+     index(`abc', `')
+     =>0
+     index(`abc', `b')
+     =>1
+
+
+File: m4.info,  Node: Regexp,  Next: Substr,  Prev: Index macro,  Up: Text handling
+
+11.3 Searching for regular expressions
+======================================
+
+Searching for regular expressions is done with the builtin 'regexp':
+
+ -- Builtin: regexp (STRING, REGEXP, [REPLACEMENT]
+     Searches for REGEXP in STRING.  The syntax for regular expressions
+     is the same as in GNU Emacs, which is similar to BRE, Basic Regular
+     Expressions in POSIX. *Note Syntax of Regular Expressions:
+     (emacs)Regexps.  Support for ERE, Extended Regular Expressions is
+     not available, but will be added in GNU M4 2.0.
+
+     If REPLACEMENT is omitted, 'regexp' expands to the index of the
+     first match of REGEXP in STRING.  If REGEXP does not match anywhere
+     in STRING, it expands to -1.
+
+     If REPLACEMENT is supplied, and there was a match, 'regexp' changes
+     the expansion to this argument, with '\N' substituted by the text
+     matched by the Nth parenthesized sub-expression of REGEXP, up to
+     nine sub-expressions.  The escape '\&' is replaced by the text of
+     the entire regular expression matched.  For all other characters,
+     '\' treats the next character literally.  A warning is issued if
+     there were fewer sub-expressions than the '\N' requested, or if
+     there is a trailing '\'.  If there was no match, 'regexp' expands
+     to the empty string.
+
+     The macro 'regexp' is recognized only with parameters.
+
+     regexp(`GNUs not Unix', `\<[a-z]\w+')
+     =>5
+     regexp(`GNUs not Unix', `\<Q\w*')
+     =>-1
+     regexp(`GNUs not Unix', `\w\(\w+\)$', `*** \& *** \1 ***')
+     =>*** Unix *** nix ***
+     regexp(`GNUs not Unix', `\<Q\w*', `*** \& *** \1 ***')
+     =>
+
+   Here are some more examples on the handling of backslash:
+
+     regexp(`abc', `\(b\)', `\\\10\a')
+     =>\b0a
+     regexp(`abc', `b', `\1\')
+     error->m4:stdin:2: Warning: sub-expression 1 not present
+     error->m4:stdin:2: Warning: trailing \ ignored in replacement
+     =>
+     regexp(`abc', `\(\(d\)?\)\(c\)', `\1\2\3\4\5\6')
+     error->m4:stdin:3: Warning: sub-expression 4 not present
+     error->m4:stdin:3: Warning: sub-expression 5 not present
+     error->m4:stdin:3: Warning: sub-expression 6 not present
+     =>c
+
+   Omitting REGEXP evokes a warning, but still produces output; contrast
+this with an empty REGEXP argument.
+
+     regexp(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `regexp'
+     =>0
+     regexp(`abc', `')
+     =>0
+     regexp(`abc', `', `\\def')
+     =>\def
+
+
+File: m4.info,  Node: Substr,  Next: Translit,  Prev: Regexp,  Up: Text handling
+
+11.4 Extracting substrings
+==========================
+
+Substrings are extracted with 'substr':
+
+ -- Builtin: substr (STRING, FROM, [LENGTH]
+     Expands to the substring of STRING, which starts at index FROM, and
+     extends for LENGTH characters, or to the end of STRING, if LENGTH
+     is omitted.  The starting index of a string is always 0.  The
+     expansion is empty if there is an error parsing FROM or LENGTH, if
+     FROM is beyond the end of STRING, or if LENGTH is negative.
+
+     The macro 'substr' is recognized only with parameters.
+
+     substr(`gnus, gnats, and armadillos', `6')
+     =>gnats, and armadillos
+     substr(`gnus, gnats, and armadillos', `6', `5')
+     =>gnats
+
+   Omitting FROM evokes a warning, but still produces output.
+
+     substr(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `substr'
+     =>abc
+     substr(`abc',)
+     error->m4:stdin:2: empty string treated as 0 in builtin `substr'
+     =>abc
+
+
+File: m4.info,  Node: Translit,  Next: Patsubst,  Prev: Substr,  Up: Text handling
+
+11.5 Translating characters
+===========================
+
+Character translation is done with 'translit':
+
+ -- Builtin: translit (STRING, CHARS, [REPLACEMENT]
+     Expands to STRING, with each character that occurs in CHARS
+     translated into the character from REPLACEMENT with the same index.
+
+     If REPLACEMENT is shorter than CHARS, the excess characters of
+     CHARS are deleted from the expansion; if CHARS is shorter, the
+     excess characters in REPLACEMENT are silently ignored.  If
+     REPLACEMENT is omitted, all characters in STRING that are present
+     in CHARS are deleted from the expansion.  If a character appears
+     more than once in CHARS, only the first instance is used in making
+     the translation.  Only a single translation pass is made, even if
+     characters in REPLACEMENT also appear in CHARS.
+
+     As a GNU extension, both CHARS and REPLACEMENT can contain
+     character-ranges, e.g., 'a-z' (meaning all lowercase letters) or
+     '0-9' (meaning all digits).  To include a dash '-' in CHARS or
+     REPLACEMENT, place it first or last in the entire string, or as the
+     last character of a range.  Back-to-back ranges can share a common
+     endpoint.  It is not an error for the last character in the range
+     to be 'larger' than the first.  In that case, the range runs
+     backwards, i.e., '9-0' means the string '9876543210'.  The
+     expansion of a range is dependent on the underlying encoding of
+     characters, so using ranges is not always portable between
+     machines.
+
+     The macro 'translit' is recognized only with parameters.
+
+     translit(`GNUs not Unix', `A-Z')
+     =>s not nix
+     translit(`GNUs not Unix', `a-z', `A-Z')
+     =>GNUS NOT UNIX
+     translit(`GNUs not Unix', `A-Z', `z-a')
+     =>tmfs not fnix
+     translit(`+,-12345', `+--1-5', `<;>a-c-a')
+     =><;>abcba
+     translit(`abcdef', `aabdef', `bcged')
+     =>bgced
+
+   In the ASCII encoding, the first example deletes all uppercase
+letters, the second converts lowercase to uppercase, and the third
+'mirrors' all uppercase letters, while converting them to lowercase.
+The two first cases are by far the most common, even though they are not
+portable to EBCDIC or other encodings.  The fourth example shows a range
+ending in '-', as well as back-to-back ranges.  The final example shows
+that 'a' is mapped to 'b', not 'c'; the resulting 'b' is not further
+remapped to 'g'; the 'd' and 'e' are swapped, and the 'f' is discarded.
+
+   Omitting CHARS evokes a warning, but still produces output.
+
+     translit(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `translit'
+     =>abc
+
+
+File: m4.info,  Node: Patsubst,  Next: Format,  Prev: Translit,  Up: Text handling
+
+11.6 Substituting text by regular expression
+============================================
+
+Global substitution in a string is done by 'patsubst':
+
+ -- Builtin: patsubst (STRING, REGEXP, [REPLACEMENT]
+     Searches STRING for matches of REGEXP, and substitutes REPLACEMENT
+     for each match.  The syntax for regular expressions is the same as
+     in GNU Emacs (*note Regexp::).
+
+     The parts of STRING that are not covered by any match of REGEXP are
+     copied to the expansion.  Whenever a match is found, the search
+     proceeds from the end of the match, so a character from STRING will
+     never be substituted twice.  If REGEXP matches a string of zero
+     length, the start position for the search is incremented, to avoid
+     infinite loops.
+
+     When a replacement is to be made, REPLACEMENT is inserted into the
+     expansion, with '\N' substituted by the text matched by the Nth
+     parenthesized sub-expression of PATSUBST, for up to nine
+     sub-expressions.  The escape '\&' is replaced by the text of the
+     entire regular expression matched.  For all other characters, '\'
+     treats the next character literally.  A warning is issued if there
+     were fewer sub-expressions than the '\N' requested, or if there is
+     a trailing '\'.
+
+     The REPLACEMENT argument can be omitted, in which case the text
+     matched by REGEXP is deleted.
+
+     The macro 'patsubst' is recognized only with parameters.
+
+     patsubst(`GNUs not Unix', `^', `OBS: ')
+     =>OBS: GNUs not Unix
+     patsubst(`GNUs not Unix', `\<', `OBS: ')
+     =>OBS: GNUs OBS: not OBS: Unix
+     patsubst(`GNUs not Unix', `\w*', `(\&)')
+     =>(GNUs)() (not)() (Unix)()
+     patsubst(`GNUs not Unix', `\w+', `(\&)')
+     =>(GNUs) (not) (Unix)
+     patsubst(`GNUs not Unix', `[A-Z][a-z]+')
+     =>GN not 
+     patsubst(`GNUs not Unix', `not', `NOT\')
+     error->m4:stdin:6: Warning: trailing \ ignored in replacement
+     =>GNUs NOT Unix
+
+   Here is a slightly more realistic example, which capitalizes
+individual words or whole sentences, by substituting calls of the macros
+'upcase' and 'downcase' into the strings.
+
+ -- Composite: upcase (TEXT)
+ -- Composite: downcase (TEXT)
+ -- Composite: capitalize (TEXT)
+     Expand to TEXT, but with capitalization changed: 'upcase' changes
+     all letters to upper case, 'downcase' changes all letters to lower
+     case, and 'capitalize' changes the first character of each word to
+     upper case and the remaining characters to lower case.
+
+   First, an example of their usage, using implementations distributed
+in 'm4-1.4.17/examples/capitalize.m4'.
+
+     $ m4 -I examples
+     include(`capitalize.m4')
+     =>
+     upcase(`GNUs not Unix')
+     =>GNUS NOT UNIX
+     downcase(`GNUs not Unix')
+     =>gnus not unix
+     capitalize(`GNUs not Unix')
+     =>Gnus Not Unix
+
+   Now for the implementation.  There is a helper macro '_capitalize'
+which puts only its first word in mixed case.  Then 'capitalize' merely
+parses out the words, and replaces them with an invocation of
+'_capitalize'.  (As presented here, the 'capitalize' macro has some
+subtle flaws.  You should try to see if you can find and correct them;
+or *note Answers: Improved capitalize.).
+
+     $ m4 -I examples
+     undivert(`capitalize.m4')dnl
+     =>divert(`-1')
+     =># upcase(text)
+     =># downcase(text)
+     =># capitalize(text)
+     =>#   change case of text, simple version
+     =>define(`upcase', `translit(`$*', `a-z', `A-Z')')
+     =>define(`downcase', `translit(`$*', `A-Z', `a-z')')
+     =>define(`_capitalize',
+     =>       `regexp(`$1', `^\(\w\)\(\w*\)',
+     =>               `upcase(`\1')`'downcase(`\2')')')
+     =>define(`capitalize', `patsubst(`$1', `\w+', `_$0(`\&')')')
+     =>divert`'dnl
+
+   While 'regexp' replaces the whole input with the replacement as soon
+as there is a match, 'patsubst' replaces each _occurrence_ of a match
+and preserves non-matching pieces:
+
+     define(`patreg',
+     `patsubst($@)
+     regexp($@)')dnl
+     patreg(`bar foo baz Foo', `foo\|Foo', `FOO')
+     =>bar FOO baz FOO
+     =>FOO
+     patreg(`aba abb 121', `\(.\)\(.\)\1', `\2\1\2')
+     =>bab abb 212
+     =>bab
+
+   Omitting REGEXP evokes a warning, but still produces output; contrast
+this with an empty REGEXP argument.
+
+     patsubst(`abc')
+     error->m4:stdin:1: Warning: too few arguments to builtin `patsubst'
+     =>abc
+     patsubst(`abc', `')
+     =>abc
+     patsubst(`abc', `', `\\-')
+     =>\-a\-b\-c\-
+
+
+File: m4.info,  Node: Format,  Prev: Patsubst,  Up: Text handling
+
+11.7 Formatting strings (printf-like)
+=====================================
+
+Formatted output can be made with 'format':
+
+ -- Builtin: format (FORMAT-STRING, ...)
+     Works much like the C function 'printf'.  The first argument
+     FORMAT-STRING can contain '%' specifications which are satisfied by
+     additional arguments, and the expansion of 'format' is the
+     formatted string.
+
+     The macro 'format' is recognized only with parameters.
+
+   Its use is best described by a few examples:
+
+     define(`foo', `The brown fox jumped over the lazy dog')
+     =>
+     format(`The string "%s" uses %d characters', foo, len(foo))
+     =>The string "The brown fox jumped over the lazy dog" uses 38 characters
+     format(`%*.*d', `-1', `-1', `1')
+     =>1
+     format(`%.0f', `56789.9876')
+     =>56790
+     len(format(`%-*X', `5000', `1'))
+     =>5000
+     ifelse(format(`%010F', `infinity'), `       INF', `success',
+            format(`%010F', `infinity'), `  INFINITY', `success',
+            format(`%010F', `infinity'))
+     =>success
+     ifelse(format(`%.1A', `1.999'), `0X1.0P+1', `success',
+            format(`%.1A', `1.999'), `0X2.0P+0', `success',
+            format(`%.1A', `1.999'))
+     =>success
+     format(`%g', `0xa.P+1')
+     =>20
+
+   Using the 'forloop' macro defined earlier (*note Forloop::), this
+example shows how 'format' can be used to produce tabular output.
+
+     $ m4 -I examples
+     include(`forloop.m4')
+     =>
+     forloop(`i', `1', `10', `format(`%6d squared is %10d
+     ', i, eval(i**2))')
+     =>     1 squared is          1
+     =>     2 squared is          4
+     =>     3 squared is          9
+     =>     4 squared is         16
+     =>     5 squared is         25
+     =>     6 squared is         36
+     =>     7 squared is         49
+     =>     8 squared is         64
+     =>     9 squared is         81
+     =>    10 squared is        100
+     =>
+
+   The builtin 'format' is modeled after the ANSI C 'printf' function,
+and supports these '%' specifiers: 'c', 's', 'd', 'o', 'x', 'X', 'u',
+'a', 'A', 'e', 'E', 'f', 'F', 'g', 'G', and '%'; it supports field
+widths and precisions, and the flags '+', '-', ' ', '0', '#', and '''.
+For integer specifiers, the width modifiers 'hh', 'h', and 'l' are
+recognized, and for floating point specifiers, the width modifier 'l' is
+recognized.  Items not yet supported include positional arguments, the
+'n', 'p', 'S', and 'C' specifiers, the 'z', 't', 'j', 'L' and 'll'
+modifiers, and any platform extensions available in the native 'printf'.
+For more details on the functioning of 'printf', see the C Library
+Manual, or the POSIX specification (for example, '%a' is supported even
+on platforms that haven't yet implemented C99 hexadecimal floating point
+output natively).
+
+   Unrecognized specifiers result in a warning.  It is anticipated that
+a future release of GNU 'm4' will support more specifiers, and give
+better warnings when various problems such as overflow are encountered.
+Likewise, escape sequences are not yet recognized.
+
+     format(`%p', `0')
+     error->m4:stdin:1: Warning: unrecognized specifier in `%p'
+     =>
+
+
+File: m4.info,  Node: Arithmetic,  Next: Shell commands,  Prev: Text handling,  Up: Top
+
+12 Macros for doing arithmetic
+******************************
+
+Integer arithmetic is included in 'm4', with a C-like syntax.  As
+convenient shorthands, there are builtins for simple increment and
+decrement operations.
+
+* Menu:
+
+* Incr::                        Decrement and increment operators
+* Eval::                        Evaluating integer expressions
+
+
+File: m4.info,  Node: Incr,  Next: Eval,  Up: Arithmetic
+
+12.1 Decrement and increment operators
+======================================
+
+Increment and decrement of integers are supported using the builtins
+'incr' and 'decr':
+
+ -- Builtin: incr (NUMBER)
+ -- Builtin: decr (NUMBER)
+     Expand to the numerical value of NUMBER, incremented or
+     decremented, respectively, by one.  Except for the empty string,
+     the expansion is empty if NUMBER could not be parsed.
+
+     The macros 'incr' and 'decr' are recognized only with parameters.
+
+     incr(`4')
+     =>5
+     decr(`7')
+     =>6
+     incr()
+     error->m4:stdin:3: empty string treated as 0 in builtin `incr'
+     =>1
+     decr()
+     error->m4:stdin:4: empty string treated as 0 in builtin `decr'
+     =>-1
+
+
+File: m4.info,  Node: Eval,  Prev: Incr,  Up: Arithmetic
+
+12.2 Evaluating integer expressions
+===================================
+
+Integer expressions are evaluated with 'eval':
+
+ -- Builtin: eval (EXPRESSION, [RADIX = '10']
+     Expands to the value of EXPRESSION.  The expansion is empty if a
+     problem is encountered while parsing the arguments.  If specified,
+     RADIX and WIDTH control the format of the output.
+
+     Calculations are done with 32-bit signed numbers.  Overflow
+     silently results in wraparound.  A warning is issued if division by
+     zero is attempted, or if EXPRESSION could not be parsed.
+
+     Expressions can contain the following operators, listed in order of
+     decreasing precedence.
+
+     '()'
+          Parentheses
+     '+ - ~ !'
+          Unary plus and minus, and bitwise and logical negation
+     '**'
+          Exponentiation
+     '* / %'
+          Multiplication, division, and modulo
+     '+ -'
+          Addition and subtraction
+     '<< >>'
+          Shift left or right
+     '> >= < <='
+          Relational operators
+     '== !='
+          Equality operators
+     '&'
+          Bitwise and
+     '^'
+          Bitwise exclusive-or
+     '|'
+          Bitwise or
+     '&&'
+          Logical and
+     '||'
+          Logical or
+
+     The macro 'eval' is recognized only with parameters.
+
+   All binary operators, except exponentiation, are left associative.  C
+operators that perform variable assignment, such as '+=' or '--', are
+not implemented, since 'eval' only operates on constants, not variables.
+Attempting to use them results in an error.  However, since traditional
+implementations treated '=' as an undocumented alias for '==' as opposed
+to an assignment operator, this usage is supported as a special case.
+Be aware that a future version of GNU M4 may support assignment
+semantics as an extension when POSIX mode is not requested, and that
+using '=' to check equality is not portable.
+
+     eval(`2 = 2')
+     error->m4:stdin:1: Warning: recommend ==, not =, for equality operator
+     =>1
+     eval(`++0')
+     error->m4:stdin:2: invalid operator in eval: ++0
+     =>
+     eval(`0 |= 1')
+     error->m4:stdin:3: invalid operator in eval: 0 |= 1
+     =>
+
+   Note that some older 'm4' implementations use '^' as an alternate
+operator for the exponentiation, although POSIX requires the C behavior
+of bitwise exclusive-or.  The precedence of the negation operators, '~'
+and '!', was traditionally lower than equality.  The unary operators
+could not be used reliably more than once on the same term without
+intervening parentheses.  The traditional precedence of the equality
+operators '==' and '!=' was identical instead of lower than the
+relational operators such as '<', even through GNU M4 1.4.8.  Starting
+with version 1.4.9, GNU M4 correctly follows POSIX precedence rules.  M4
+scripts designed to be portable between releases must be aware that
+parentheses may be required to enforce C precedence rules.  Likewise,
+division by zero, even in the unused branch of a short-circuiting
+operator, is not always well-defined in other implementations.
+
+   Following are some examples where the current version of M4 follows C
+precedence rules, but where older versions and some other
+implementations of 'm4' require explicit parentheses to get the correct
+result:
+
+     eval(`1 == 2 > 0')
+     =>1
+     eval(`(1 == 2) > 0')
+     =>0
+     eval(`! 0 * 2')
+     =>2
+     eval(`! (0 * 2)')
+     =>1
+     eval(`1 | 1 ^ 1')
+     =>1
+     eval(`(1 | 1) ^ 1')
+     =>0
+     eval(`+ + - ~ ! ~ 0')
+     =>1
+     eval(`2 || 1 / 0')
+     =>1
+     eval(`0 || 1 / 0')
+     error->m4:stdin:9: divide by zero in eval: 0 || 1 / 0
+     =>
+     eval(`0 && 1 % 0')
+     =>0
+     eval(`2 && 1 % 0')
+     error->m4:stdin:11: modulo by zero in eval: 2 && 1 % 0
+     =>
+
+   As a GNU extension, the operator '**' performs integral
+exponentiation.  The operator is right-associative, and if evaluated,
+the exponent must be non-negative, and at least one of the arguments
+must be non-zero, or a warning is issued.
+
+     eval(`2 ** 3 ** 2')
+     =>512
+     eval(`(2 ** 3) ** 2')
+     =>64
+     eval(`0 ** 1')
+     =>0
+     eval(`2 ** 0')
+     =>1
+     eval(`0 ** 0')
+     =>
+     error->m4:stdin:5: divide by zero in eval: 0 ** 0
+     eval(`4 ** -2')
+     error->m4:stdin:6: negative exponent in eval: 4 ** -2
+     =>
+
+   Within EXPRESSION, (but not RADIX or WIDTH), numbers without a
+special prefix are decimal.  A simple '0' prefix introduces an octal
+number.  '0x' introduces a hexadecimal number.  As GNU extensions, '0b'
+introduces a binary number.  '0r' introduces a number expressed in any
+radix between 1 and 36: the prefix should be immediately followed by the
+decimal expression of the radix, a colon, then the digits making the
+number.  For radix 1, leading zeros are ignored, and all remaining
+digits must be '1'; for all other radices, the digits are '0', '1', '2',
+....  Beyond '9', the digits are 'a', 'b' ... up to 'z'.  Lower and
+upper case letters can be used interchangeably in numbers prefixes and
+as number digits.
+
+   Parentheses may be used to group subexpressions whenever needed.  For
+the relational operators, a true relation returns '1', and a false
+relation return '0'.
+
+   Here are a few examples of use of 'eval'.
+
+     eval(`-3 * 5')
+     =>-15
+     eval(`-99 / 10')
+     =>-9
+     eval(`-99 % 10')
+     =>-9
+     eval(`99 % -10')
+     =>9
+     eval(index(`Hello world', `llo') >= 0)
+     =>1
+     eval(`0r1:0111 + 0b100 + 0r3:12')
+     =>12
+     define(`square', `eval(`($1) ** 2')')
+     =>
+     square(`9')
+     =>81
+     square(square(`5')` + 1')
+     =>676
+     define(`foo', `666')
+     =>
+     eval(`foo / 6')
+     error->m4:stdin:11: bad expression in eval: foo / 6
+     =>
+     eval(foo / 6)
+     =>111
+
+   As the last two lines show, 'eval' does not handle macro names, even
+if they expand to a valid expression (or part of a valid expression).
+Therefore all macros must be expanded before they are passed to 'eval'.
+
+   Some calculations are not portable to other implementations, since
+they have undefined semantics in C, but GNU 'm4' has well-defined
+behavior on overflow.  When shifting, an out-of-range shift amount is
+implicitly brought into the range of 32-bit signed integers using an
+implicit bit-wise and with 0x1f).
+
+     define(`max_int', eval(`0x7fffffff'))
+     =>
+     define(`min_int', incr(max_int))
+     =>
+     eval(min_int` < 0')
+     =>1
+     eval(max_int` > 0')
+     =>1
+     ifelse(eval(min_int` / -1'), min_int, `overflow occurred')
+     =>overflow occurred
+     min_int
+     =>-2147483648
+     eval(`0x80000000 % -1')
+     =>0
+     eval(`-4 >> 1')
+     =>-2
+     eval(`-4 >> 33')
+     =>-2
+
+   If RADIX is specified, it specifies the radix to be used in the
+expansion.  The default radix is 10; this is also the case if RADIX is
+the empty string.  A warning results if the radix is outside the range
+of 1 through 36, inclusive.  The result of 'eval' is always taken to be
+signed.  No radix prefix is output, and for radices greater than 10, the
+digits are lower case.  The WIDTH argument specifies the minimum output
+width, excluding any negative sign.  The result is zero-padded to extend
+the expansion to the requested width.  A warning results if the width is
+negative.  If RADIX or WIDTH is out of bounds, the expansion of 'eval'
+is empty.
+
+     eval(`666', `10')
+     =>666
+     eval(`666', `11')
+     =>556
+     eval(`666', `6')
+     =>3030
+     eval(`666', `6', `10')
+     =>0000003030
+     eval(`-666', `6', `10')
+     =>-0000003030
+     eval(`10', `', `0')
+     =>10
+     `0r1:'eval(`10', `1', `11')
+     =>0r1:01111111111
+     eval(`10', `16')
+     =>a
+     eval(`1', `37')
+     error->m4:stdin:9: radix 37 in builtin `eval' out of range
+     =>
+     eval(`1', , `-1')
+     error->m4:stdin:10: negative width to builtin `eval'
+     =>
+     eval()
+     error->m4:stdin:11: empty string treated as 0 in builtin `eval'
+     =>0
+
+
+File: m4.info,  Node: Shell commands,  Next: Miscellaneous,  Prev: Arithmetic,  Up: Top
+
+13 Macros for running shell commands
+************************************
+
+There are a few builtin macros in 'm4' that allow you to run shell
+commands from within 'm4'.
+
+   Note that the definition of a valid shell command is system
+dependent.  On UNIX systems, this is the typical '/bin/sh'.  But on
+other systems, such as native Windows, the shell has a different syntax
+of commands that it understands.  Some examples in this chapter assume
+'/bin/sh', and also demonstrate how to quit early with a known exit
+value if this is not the case.
+
+* Menu:
+
+* Platform macros::             Determining the platform
+* Syscmd::                      Executing simple commands
+* Esyscmd::                     Reading the output of commands
+* Sysval::                      Exit status
+* Mkstemp::                     Making temporary files
+
+
+File: m4.info,  Node: Platform macros,  Next: Syscmd,  Up: Shell commands
+
+13.1 Determining the platform
+=============================
+
+Sometimes it is desirable for an input file to know which platform 'm4'
+is running on.  GNU 'm4' provides several macros that are predefined to
+expand to the empty string; checking for their existence will confirm
+platform details.
+
+ -- Optional builtin: __gnu__
+ -- Optional builtin: __os2__
+ -- Optional builtin: os2
+ -- Optional builtin: __unix__
+ -- Optional builtin: unix
+ -- Optional builtin: __windows__
+ -- Optional builtin: windows
+     Each of these macros is conditionally defined as needed to describe
+     the environment of 'm4'.  If defined, each macro expands to the
+     empty string.  For now, these macros silently ignore all arguments,
+     but in a future release of M4, they might warn if arguments are
+     present.
+
+   When GNU extensions are in effect (that is, when you did not use the
+'-G' option, *note Invoking m4: Limits control.), GNU 'm4' will define
+the macro '__gnu__' to expand to the empty string.
+
+     $ m4
+     __gnu__
+     =>
+     __gnu__(`ignored')
+     =>
+     Extensions are ifdef(`__gnu__', `active', `inactive')
+     =>Extensions are active
+
+     $ m4 -G
+     __gnu__
+     =>__gnu__
+     __gnu__(`ignored')
+     =>__gnu__(ignored)
+     Extensions are ifdef(`__gnu__', `active', `inactive')
+     =>Extensions are inactive
+
+   On UNIX systems, GNU 'm4' will define '__unix__' by default, or
+'unix' when the '-G' option is specified.
+
+   On native Windows systems, GNU 'm4' will define '__windows__' by
+default, or 'windows' when the '-G' option is specified.
+
+   On OS/2 systems, GNU 'm4' will define '__os2__' by default, or 'os2'
+when the '-G' option is specified.
+
+   If GNU 'm4' does not provide a platform macro for your system, please
+report that as a bug.
+
+     define(`provided', `0')
+     =>
+     ifdef(`__unix__', `define(`provided', incr(provided))')
+     =>
+     ifdef(`__windows__', `define(`provided', incr(provided))')
+     =>
+     ifdef(`__os2__', `define(`provided', incr(provided))')
+     =>
+     provided
+     =>1
+
+
+File: m4.info,  Node: Syscmd,  Next: Esyscmd,  Prev: Platform macros,  Up: Shell commands
+
+13.2 Executing simple commands
+==============================
+
+Any shell command can be executed, using 'syscmd':
+
+ -- Builtin: syscmd (SHELL-COMMAND)
+     Executes SHELL-COMMAND as a shell command.
+
+     The expansion of 'syscmd' is void, _not_ the output from
+     SHELL-COMMAND!  Output or error messages from SHELL-COMMAND are not
+     read by 'm4'.  *Note Esyscmd::, if you need to process the command
+     output.
+
+     Prior to executing the command, 'm4' flushes its buffers.  The
+     default standard input, output and error of SHELL-COMMAND are the
+     same as those of 'm4'.
+
+     By default, the SHELL-COMMAND will be used as the argument to the
+     '-c' option of the '/bin/sh' shell (or the version of 'sh'
+     specified by 'command -p getconf PATH', if your system supports
+     that).  If you prefer a different shell, the 'configure' script can
+     be given the option '--with-syscmd-shell=LOCATION' to set the
+     location of an alternative shell at GNU 'm4' installation; the
+     alternative shell must still support '-c'.
+
+     The macro 'syscmd' is recognized only with parameters.
+
+     define(`foo', `FOO')
+     =>
+     syscmd(`echo foo')
+     =>foo
+     =>
+
+   Note how the expansion of 'syscmd' keeps the trailing newline of the
+command, as well as using the newline that appeared after the macro.
+
+   The following is an example of SHELL-COMMAND using the same standard
+input as 'm4':
+
+     $ echo "m4wrap(\`syscmd(\`cat')')" | m4
+     =>
+
+   It tells 'm4' to read all of its input before executing the wrapped
+text, then hand a valid (albeit emptied) pipe as standard input for the
+'cat' subcommand.  Therefore, you should be careful when using standard
+input (either by specifying no files, or by passing '-' as a file name
+on the command line, *note Invoking m4: Command line files.), and also
+invoking subcommands via 'syscmd' or 'esyscmd' that consume data from
+standard input.  When standard input is a seekable file, the subprocess
+will pick up with the next character not yet processed by 'm4'; when it
+is a pipe or other non-seekable file, there is no guarantee how much
+data will already be buffered by 'm4' and thus unavailable to the child.
+
+
+File: m4.info,  Node: Esyscmd,  Next: Sysval,  Prev: Syscmd,  Up: Shell commands
+
+13.3 Reading the output of commands
+===================================
+
+If you want 'm4' to read the output of a shell command, use 'esyscmd':
+
+ -- Builtin: esyscmd (SHELL-COMMAND)
+     Expands to the standard output of the shell command SHELL-COMMAND.
+
+     Prior to executing the command, 'm4' flushes its buffers.  The
+     default standard input and standard error of SHELL-COMMAND are the
+     same as those of 'm4'.  The error output of SHELL-COMMAND is not a
+     part of the expansion: it will appear along with the error output
+     of 'm4'.
+
+     By default, the SHELL-COMMAND will be used as the argument to the
+     '-c' option of the '/bin/sh' shell (or the version of 'sh'
+     specified by 'command -p getconf PATH', if your system supports
+     that).  If you prefer a different shell, the 'configure' script can
+     be given the option '--with-syscmd-shell=LOCATION' to set the
+     location of an alternative shell at GNU 'm4' installation; the
+     alternative shell must still support '-c'.
+
+     The macro 'esyscmd' is recognized only with parameters.
+
+     define(`foo', `FOO')
+     =>
+     esyscmd(`echo foo')
+     =>FOO
+     =>
+
+   Note how the expansion of 'esyscmd' keeps the trailing newline of the
+command, as well as using the newline that appeared after the macro.
+
+   Just as with 'syscmd', care must be exercised when sharing standard
+input between 'm4' and the child process of 'esyscmd'.
+
+
+File: m4.info,  Node: Sysval,  Next: Mkstemp,  Prev: Esyscmd,  Up: Shell commands
+
+13.4 Exit status
+================
+
+To see whether a shell command succeeded, use 'sysval':
+
+ -- Builtin: sysval
+     Expands to the exit status of the last shell command run with
+     'syscmd' or 'esyscmd'.  Expands to 0 if no command has been run
+     yet.
+
+     sysval
+     =>0
+     syscmd(`false')
+     =>
+     ifelse(sysval, `0', `zero', `non-zero')
+     =>non-zero
+     syscmd(`exit 2')
+     =>
+     sysval
+     =>2
+     syscmd(`true')
+     =>
+     sysval
+     =>0
+     esyscmd(`false')
+     =>
+     ifelse(sysval, `0', `zero', `non-zero')
+     =>non-zero
+     esyscmd(`echo dnl && exit 127')
+     =>
+     sysval
+     =>127
+     esyscmd(`true')
+     =>
+     sysval
+     =>0
+
+   'sysval' results in 127 if there was a problem executing the command,
+for example, if the system-imposed argument length is exceeded, or if
+there were not enough resources to fork.  It is not possible to
+distinguish between failed execution and successful execution that had
+an exit status of 127, unless there was output from the child process.
+
+   On UNIX platforms, where it is possible to detect when command
+execution is terminated by a signal, rather than a normal exit, the
+result is the signal number shifted left by eight bits.
+
+     dnl This test assumes kill is a shell builtin, and that signals are
+     dnl recognizable.
+     ifdef(`__unix__', ,
+           `errprint(` skipping: syscmd does not have unix semantics
+     ')m4exit(`77')')dnl
+     syscmd(`kill -9 $$')
+     =>
+     sysval
+     =>2304
+     syscmd()
+     =>
+     sysval
+     =>0
+     esyscmd(`kill -9 $$')
+     =>
+     sysval
+     =>2304
+
+
+File: m4.info,  Node: Mkstemp,  Prev: Sysval,  Up: Shell commands
+
+13.5 Making temporary files
+===========================
+
+Commands specified to 'syscmd' or 'esyscmd' might need a temporary file,
+for output or for some other purpose.  There is a builtin macro,
+'mkstemp', for making a temporary file:
+
+ -- Builtin: mkstemp (TEMPLATE)
+ -- Builtin: maketemp (TEMPLATE)
+     Expands to the quoted name of a new, empty file, made from the
+     string TEMPLATE, which should end with the string 'XXXXXX'.  The
+     six 'X' characters are then replaced with random characters
+     matching the regular expression '[a-zA-Z0-9._-]', in order to make
+     the file name unique.  If fewer than six 'X' characters are found
+     at the end of 'template', the result will be longer than the
+     template.  The created file will have access permissions as if by
+     'chmod =rw,go=', meaning that the current umask of the 'm4' process
+     is taken into account, and at most only the current user can read
+     and write the file.
+
+     The traditional behavior, standardized by POSIX, is that 'maketemp'
+     merely replaces the trailing 'X' with the process id, without
+     creating a file or quoting the expansion, and without ensuring that
+     the resulting string is a unique file name.  In part, this means
+     that using the same TEMPLATE twice in the same input file will
+     result in the same expansion.  This behavior is a security hole, as
+     it is very easy for another process to guess the name that will be
+     generated, and thus interfere with a subsequent use of 'syscmd'
+     trying to manipulate that file name.  Hence, POSIX has recommended
+     that all new implementations of 'm4' provide the secure 'mkstemp'
+     builtin, and that users of 'm4' check for its existence.
+
+     The expansion is void and an error issued if a temporary file could
+     not be created.
+
+     The macros 'mkstemp' and 'maketemp' are recognized only with
+     parameters.
+
+   If you try this next example, you will most likely get different
+output for the two file names, since the replacement characters are
+randomly chosen:
+
+     $ m4
+     define(`tmp', `oops')
+     =>
+     maketemp(`/tmp/fooXXXXXX')
+     =>/tmp/fooa07346
+     ifdef(`mkstemp', `define(`maketemp', defn(`mkstemp'))',
+           `define(`mkstemp', defn(`maketemp'))dnl
+     errprint(`warning: potentially insecure maketemp implementation
+     ')')
+     =>
+     mkstemp(`doc')
+     =>docQv83Uw
+
+   Unless you use the '--traditional' command line option (or '-G',
+*note Invoking m4: Limits control.), the GNU version of 'maketemp' is
+secure.  This means that using the same template to multiple calls will
+generate multiple files.  However, we recommend that you use the new
+'mkstemp' macro, introduced in GNU M4 1.4.8, which is secure even in
+traditional mode.  Also, as of M4 1.4.11, the secure implementation
+quotes the resulting file name, so that you are guaranteed to know what
+file was created even if the random file name happens to match an
+existing macro.  Notice that this example is careful to use 'defn' to
+avoid unintended expansion of 'foo'.
+
+     $ m4
+     define(`foo', `errprint(`oops')')
+     =>
+     syscmd(`rm -f foo-??????')sysval
+     =>0
+     define(`file1', maketemp(`foo-XXXXXX'))dnl
+     ifelse(esyscmd(`echo \` foo-?????? \''), ` foo-?????? ',
+            `no file', `created')
+     =>created
+     define(`file2', maketemp(`foo-XX'))dnl
+     define(`file3', mkstemp(`foo-XXXXXX'))dnl
+     ifelse(len(defn(`file1')), len(defn(`file2')),
+            `same length', `different')
+     =>same length
+     ifelse(defn(`file1'), defn(`file2'), `same', `different file')
+     =>different file
+     ifelse(defn(`file2'), defn(`file3'), `same', `different file')
+     =>different file
+     ifelse(defn(`file1'), defn(`file3'), `same', `different file')
+     =>different file
+     syscmd(`rm 'defn(`file1') defn(`file2') defn(`file3'))
+     =>
+     sysval
+     =>0
+
+
+File: m4.info,  Node: Miscellaneous,  Next: Frozen files,  Prev: Shell commands,  Up: Top
+
+14 Miscellaneous builtin macros
+*******************************
+
+This chapter describes various builtins, that do not really belong in
+any of the previous chapters.
+
+* Menu:
+
+* Errprint::                    Printing error messages
+* Location::                    Printing current location
+* M4exit::                      Exiting from 'm4'
+
+
+File: m4.info,  Node: Errprint,  Next: Location,  Up: Miscellaneous
+
+14.1 Printing error messages
+============================
+
+You can print error messages using 'errprint':
+
+ -- Builtin: errprint (MESSAGE, ...)
+     Prints MESSAGE and the rest of the arguments to standard error,
+     separated by spaces.  Standard error is used, regardless of the
+     '--debugfile' option (*note Invoking m4: Debugging options.).
+
+     The expansion of 'errprint' is void.  The macro 'errprint' is
+     recognized only with parameters.
+
+     errprint(`Invalid arguments to forloop
+     ')
+     error->Invalid arguments to forloop
+     =>
+     errprint(`1')errprint(`2',`3
+     ')
+     error->12 3
+     =>
+
+   A trailing newline is _not_ printed automatically, so it should be
+supplied as part of the argument, as in the example.  Unfortunately, the
+exact output of 'errprint' is not very portable to other 'm4'
+implementations: POSIX requires that all arguments be printed, but some
+implementations of 'm4' only print the first.  Furthermore, some BSD
+implementations always append a newline for each 'errprint' call,
+regardless of whether the last argument already had one, and POSIX is
+silent on whether this is acceptable.
+
+
+File: m4.info,  Node: Location,  Next: M4exit,  Prev: Errprint,  Up: Miscellaneous
+
+14.2 Printing current location
+==============================
+
+To make it possible to specify the location of an error, three utility
+builtins exist:
+
+ -- Builtin: __file__
+ -- Builtin: __line__
+ -- Builtin: __program__
+     Expand to the quoted name of the current input file, the current
+     input line number in that file, and the quoted name of the current
+     invocation of 'm4'.
+
+     errprint(__program__:__file__:__line__: `input error
+     ')
+     error->m4:stdin:1: input error
+     =>
+
+   Line numbers start at 1 for each file.  If the file was found due to
+the '-I' option or 'M4PATH' environment variable, that is reflected in
+the file name.  The syncline option ('-s', *note Invoking m4:
+Preprocessor features.), and the 'f' and 'l' flags of 'debugmode' (*note
+Debug Levels::), also use this notion of current file and line.
+Redefining the three location macros has no effect on syncline, debug,
+warning, or error message output.
+
+   This example reuses the file 'incl.m4' mentioned earlier (*note
+Include::):
+
+     $ m4 -I examples
+     define(`foo', ``$0' called at __file__:__line__')
+     =>
+     foo
+     =>foo called at stdin:2
+     include(`incl.m4')
+     =>Include file start
+     =>foo called at examples/incl.m4:2
+     =>Include file end
+     =>
+
+   The location of macros invoked during the rescanning of macro
+expansion text corresponds to the location in the file where the
+expansion was triggered, regardless of how many newline characters the
+expansion text contains.  As of GNU M4 1.4.8, the location of text
+wrapped with 'm4wrap' (*note M4wrap::) is the point at which the
+'m4wrap' was invoked.  Previous versions, however, behaved as though
+wrapped text came from line 0 of the file "".
+
+     define(`echo', `$@')
+     =>
+     define(`foo', `echo(__line__
+     __line__)')
+     =>
+     echo(__line__
+     __line__)
+     =>4
+     =>5
+     m4wrap(`foo
+     ')
+     =>
+     foo(errprint(__line__
+     __line__
+     ))
+     error->8
+     error->9
+     =>8
+     =>8
+     __line__
+     =>11
+     m4wrap(`__line__
+     ')
+     =>
+     ^D
+     =>12
+     =>6
+     =>6
+
+   The '__program__' macro behaves like '$0' in shell terminology.  If
+you invoke 'm4' through an absolute path or a link with a different
+spelling, rather than by relying on a 'PATH' search for plain 'm4', it
+will affect how '__program__' expands.  The intent is that you can use
+it to produce error messages with the same formatting that 'm4' produces
+internally.  It can also be used within 'syscmd' (*note Syscmd::) to
+pick the same version of 'm4' that is currently running, rather than
+whatever version of 'm4' happens to be first in 'PATH'.  It was first
+introduced in GNU M4 1.4.6.
+
+
+File: m4.info,  Node: M4exit,  Prev: Location,  Up: Miscellaneous
+
+14.3 Exiting from 'm4'
+======================
+
+If you need to exit from 'm4' before the entire input has been read, you
+can use 'm4exit':
+
+ -- Builtin: m4exit ([CODE = '0']
+     Causes 'm4' to exit, with exit status CODE.  If CODE is left out,
+     the exit status is zero.  If CODE cannot be parsed, or is outside
+     the range of 0 to 255, the exit status is one.  No further input is
+     read, and all wrapped and diverted text is discarded.
+
+     m4wrap(`This text is lost due to `m4exit'.')
+     =>
+     divert(`1') So is this.
+     divert
+     =>
+     m4exit And this is never read.
+
+   A common use of this is to abort processing:
+
+ -- Composite: fatal_error (MESSAGE)
+     Abort processing with an error message and non-zero status.  Prefix
+     MESSAGE with details about where the error occurred, and print the
+     resulting string to standard error.
+
+     define(`fatal_error',
+            `errprint(__program__:__file__:__line__`: fatal error: $*
+     ')m4exit(`1')')
+     =>
+     fatal_error(`this is a BAD one, buster')
+     error->m4:stdin:4: fatal error: this is a BAD one, buster
+
+   After this macro call, 'm4' will exit with exit status 1.  This macro
+is only intended for error exits, since the normal exit procedures are
+not followed, i.e., diverted text is not undiverted, and saved text
+(*note M4wrap::) is not reread.  (This macro could be made more robust
+to earlier versions of 'm4'.  You should try to see if you can find
+weaknesses and correct them; or *note Answers: Improved fatal_error.).
+
+   Note that it is still possible for the exit status to be different
+than what was requested by 'm4exit'.  If 'm4' detects some other error,
+such as a write error on standard output, the exit status will be
+non-zero even if 'm4exit' requested zero.
+
+   If standard input is seekable, then the file will be positioned at
+the next unread character.  If it is a pipe or other non-seekable file,
+then there are no guarantees how much data 'm4' might have read into
+buffers, and thus discarded.
+
+
+File: m4.info,  Node: Frozen files,  Next: Compatibility,  Prev: Miscellaneous,  Up: Top
+
+15 Fast loading of frozen state
+*******************************
+
+Some bigger 'm4' applications may be built over a common base containing
+hundreds of definitions and other costly initializations.  Usually, the
+common base is kept in one or more declarative files, which files are
+listed on each 'm4' invocation prior to the user's input file, or else
+each input file uses 'include'.
+
+   Reading the common base of a big application, over and over again,
+may be time consuming.  GNU 'm4' offers some machinery to speed up the
+start of an application using lengthy common bases.
+
+* Menu:
+
+* Using frozen files::          Using frozen files
+* Frozen file format::          Frozen file format
+
+
+File: m4.info,  Node: Using frozen files,  Next: Frozen file format,  Up: Frozen files
+
+15.1 Using frozen files
+=======================
+
+Suppose a user has a library of 'm4' initializations in 'base.m4', which
+is then used with multiple input files:
+
+     $ m4 base.m4 input1.m4
+     $ m4 base.m4 input2.m4
+     $ m4 base.m4 input3.m4
+
+   Rather than spending time parsing the fixed contents of 'base.m4'
+every time, the user might rather execute:
+
+     $ m4 -F base.m4f base.m4
+
+once, and further execute, as often as needed:
+
+     $ m4 -R base.m4f input1.m4
+     $ m4 -R base.m4f input2.m4
+     $ m4 -R base.m4f input3.m4
+
+with the varying input.  The first call, containing the '-F' option,
+only reads and executes file 'base.m4', defining various application
+macros and computing other initializations.  Once the input file
+'base.m4' has been completely processed, GNU 'm4' produces in 'base.m4f'
+a "frozen" file, that is, a file which contains a kind of snapshot of
+the 'm4' internal state.
+
+   Later calls, containing the '-R' option, are able to reload the
+internal state of 'm4', from 'base.m4f', _prior_ to reading any other
+input files.  This means instead of starting with a virgin copy of 'm4',
+input will be read after having effectively recovered the effect of a
+prior run.  In our example, the effect is the same as if file 'base.m4'
+has been read anew.  However, this effect is achieved a lot faster.
+
+   Only one frozen file may be created or read in any one 'm4'
+invocation.  It is not possible to recover two frozen files at once.
+However, frozen files may be updated incrementally, through using '-R'
+and '-F' options simultaneously.  For example, if some care is taken,
+the command:
+
+     $ m4 file1.m4 file2.m4 file3.m4 file4.m4
+
+could be broken down in the following sequence, accumulating the same
+output:
+
+     $ m4 -F file1.m4f file1.m4
+     $ m4 -R file1.m4f -F file2.m4f file2.m4
+     $ m4 -R file2.m4f -F file3.m4f file3.m4
+     $ m4 -R file3.m4f file4.m4
+
+   Some care is necessary because not every effort has been made for
+this to work in all cases.  In particular, the trace attribute of macros
+is not handled, nor the current setting of 'changeword'.  Currently,
+'m4wrap' and 'sysval' also have problems.  Also, interactions for some
+options of 'm4', being used in one call and not in the next, have not
+been fully analyzed yet.  On the other end, you may be confident that
+stacks of 'pushdef' definitions are handled correctly, as well as
+undefined or renamed builtins, and changed strings for quotes or
+comments.  And future releases of GNU M4 will improve on the utility of
+frozen files.
+
+   When an 'm4' run is to be frozen, the automatic undiversion which
+takes place at end of execution is inhibited.  Instead, all positively
+numbered diversions are saved into the frozen file.  The active
+diversion number is also transmitted.
+
+   A frozen file to be reloaded need not reside in the current
+directory.  It is looked up the same way as an 'include' file (*note
+Search Path::).
+
+   If the frozen file was generated with a newer version of 'm4', and
+contains directives that an older 'm4' cannot parse, attempting to load
+the frozen file with option '-R' will cause 'm4' to exit with status 63
+to indicate version mismatch.
+
+
+File: m4.info,  Node: Frozen file format,  Prev: Using frozen files,  Up: Frozen files
+
+15.2 Frozen file format
+=======================
+
+Frozen files are sharable across architectures.  It is safe to write a
+frozen file on one machine and read it on another, given that the second
+machine uses the same or newer version of GNU 'm4'.  It is conventional,
+but not required, to give a frozen file the suffix of '.m4f'.
+
+   These are simple (editable) text files, made up of directives, each
+starting with a capital letter and ending with a newline (<NL>).
+Wherever a directive is expected, the character '#' introduces a comment
+line; empty lines are also ignored if they are not part of an embedded
+string.  In the following descriptions, each LEN refers to the length of
+the corresponding strings STR in the next line of input.  Numbers are
+always expressed in decimal.  There are no escape characters.  The
+directives are:
+
+'C LEN1 , LEN2 <NL> STR1 STR2 <NL>'
+     Uses STR1 and STR2 as the begin-comment and end-comment strings.
+     If omitted, then '#' and <NL> are the comment delimiters.
+
+'D NUMBER, LEN <NL> STR <NL>'
+     Selects diversion NUMBER, making it current, then copy STR in the
+     current diversion.  NUMBER may be a negative number for a
+     non-existing diversion.  To merely specify an active selection, use
+     this command with an empty STR.  With 0 as the diversion NUMBER,
+     STR will be issued on standard output at reload time.  GNU 'm4'
+     will not produce the 'D' directive with non-zero length for
+     diversion 0, but this can be done with manual edits.  This
+     directive may appear more than once for the same diversion, in
+     which case the diversion is the concatenation of the various uses.
+     If omitted, then diversion 0 is current.
+
+'F LEN1 , LEN2 <NL> STR1 STR2 <NL>'
+     Defines, through 'pushdef', a definition for STR1 expanding to the
+     function whose builtin name is STR2.  If the builtin does not exist
+     (for example, if the frozen file was produced by a copy of 'm4'
+     compiled with changeword support, but the version of 'm4' reloading
+     was compiled without it), the reload is silent, but any subsequent
+     use of the definition of STR1 will result in a warning.  This
+     directive may appear more than once for the same name, and its
+     order, along with 'T', is important.  If omitted, you will have no
+     access to any builtins.
+
+'Q LEN1 , LEN2 <NL> STR1 STR2 <NL>'
+     Uses STR1 and STR2 as the begin-quote and end-quote strings.  If
+     omitted, then '`' and ''' are the quote delimiters.
+
+'T LEN1 , LEN2 <NL> STR1 STR2 <NL>'
+     Defines, though 'pushdef', a definition for STR1 expanding to the
+     text given by STR2.  This directive may appear more than once for
+     the same name, and its order, along with 'F', is important.
+
+'V NUMBER <NL>'
+     Confirms the format of the file.  'm4' 1.4.17 only creates and
+     understands frozen files where NUMBER is 1.  This directive must be
+     the first non-comment in the file, and may not appear more than
+     once.
+
+
+File: m4.info,  Node: Compatibility,  Next: Answers,  Prev: Frozen files,  Up: Top
+
+16 Compatibility with other versions of 'm4'
+********************************************
+
+This chapter describes the many of the differences between this
+implementation of 'm4', and of other implementations found under UNIX,
+such as System V Release 4, Solaris, and BSD flavors.  In particular, it
+lists the known differences and extensions to POSIX. However, the list
+is not necessarily comprehensive.
+
+   At the time of this writing, POSIX 2001 (also known as IEEE Std
+1003.1-2001) is the latest standard, although a new version of POSIX is
+under development and includes several proposals for modifying what 'm4'
+is required to do.  The requirements for 'm4' are shared between SUSv3
+and POSIX, and can be viewed at
+<http://www.opengroup.org/onlinepubs/000095399/utilities/m4.html>.
+
+* Menu:
+
+* Extensions::                  Extensions in GNU M4
+* Incompatibilities::           Facilities in System V m4 not in GNU M4
+* Other Incompatibilities::     Other incompatibilities
+
+
+File: m4.info,  Node: Extensions,  Next: Incompatibilities,  Up: Compatibility
+
+16.1 Extensions in GNU M4
+=========================
+
+This version of 'm4' contains a few facilities that do not exist in
+System V 'm4'.  These extra facilities are all suppressed by using the
+'-G' command line option (*note Invoking m4: Limits control.), unless
+overridden by other command line options.
+
+   * In the '$N' notation for macro arguments, N can contain several
+     digits, while the System V 'm4' only accepts one digit.  This
+     allows macros in GNU 'm4' to take any number of arguments, and not
+     only nine (*note Arguments::).
+
+     This means that 'define(`foo', `$11')' is ambiguous between
+     implementations.  To portably choose between grabbing the first
+     parameter and appending 1 to the expansion, or grabbing the
+     eleventh parameter, you can do the following:
+
+          define(`a1', `A1')
+          =>
+          dnl First argument, concatenated with 1
+          define(`_1', `$1')define(`first1', `_1($@)1')
+          =>
+          dnl Eleventh argument, portable
+          define(`_9', `$9')define(`eleventh', `_9(shift(shift($@)))')
+          =>
+          dnl Eleventh argument, GNU style
+          define(`Eleventh', `$11')
+          =>
+          first1(`a', `b', `c', `d', `e', `f', `g', `h', `i', `j', `k')
+          =>A1
+          eleventh(`a', `b', `c', `d', `e', `f', `g', `h', `i', `j', `k')
+          =>k
+          Eleventh(`a', `b', `c', `d', `e', `f', `g', `h', `i', `j', `k')
+          =>k
+
+     Also see the 'argn' macro (*note Shift::).
+
+   * The 'divert' (*note Divert::) macro can manage more than 9
+     diversions.  GNU 'm4' treats all positive numbers as valid
+     diversions, rather than discarding diversions greater than 9.
+
+   * Files included with 'include' and 'sinclude' are sought in a user
+     specified search path, if they are not found in the working
+     directory.  The search path is specified by the '-I' option and the
+     'M4PATH' environment variable (*note Search Path::).
+
+   * Arguments to 'undivert' can be non-numeric, in which case the named
+     file will be included uninterpreted in the output (*note
+     Undivert::).
+
+   * Formatted output is supported through the 'format' builtin, which
+     is modeled after the C library function 'printf' (*note Format::).
+
+   * Searches and text substitution through basic regular expressions
+     are supported by the 'regexp' (*note Regexp::) and 'patsubst'
+     (*note Patsubst::) builtins.  Some BSD implementations use extended
+     regular expressions instead.
+
+   * The output of shell commands can be read into 'm4' with 'esyscmd'
+     (*note Esyscmd::).
+
+   * There is indirect access to any builtin macro with 'builtin' (*note
+     Builtin::).
+
+   * Macros can be called indirectly through 'indir' (*note Indir::).
+
+   * The name of the program, the current input file, and the current
+     input line number are accessible through the builtins
+     '__program__', '__file__', and '__line__' (*note Location::).
+
+   * The format of the output from 'dumpdef' and macro tracing can be
+     controlled with 'debugmode' (*note Debug Levels::).
+
+   * The destination of trace and debug output can be controlled with
+     'debugfile' (*note Debug Output::).
+
+   * The 'maketemp' (*note Mkstemp::) macro behaves like 'mkstemp',
+     creating a new file with a unique name on every invocation, rather
+     than following the insecure behavior of replacing the trailing 'X'
+     characters with the 'm4' process id.
+
+   * POSIX only requires support for the command line options '-s',
+     '-D', and '-U', so all other options accepted by GNU M4 are
+     extensions.  *Note Invoking m4::, for a description of these
+     options.
+
+     The debugging and tracing facilities in GNU 'm4' are much more
+     extensive than in most other versions of 'm4'.
+
+
+File: m4.info,  Node: Incompatibilities,  Next: Other Incompatibilities,  Prev: Extensions,  Up: Compatibility
+
+16.2 Facilities in System V 'm4' not in GNU 'm4'
+================================================
+
+The version of 'm4' from System V contains a few facilities that have
+not been implemented in GNU 'm4' yet.  Additionally, POSIX requires some
+behaviors that GNU 'm4' has not implemented yet.  Relying on these
+behaviors is non-portable, as a future release of GNU 'm4' may change.
+
+   * POSIX requires support for multiple arguments to 'defn', without
+     any clarification on how 'defn' behaves when one of the multiple
+     arguments names a builtin.  System V 'm4' and some other
+     implementations allow mixing builtins and text macros into a single
+     macro.  GNU 'm4' only supports joining multiple text arguments,
+     although a future implementation may lift this restriction to
+     behave more like System V.  The only portable way to join text
+     macros with builtins is via helper macros and implicit
+     concatenation of macro results.
+
+   * POSIX requires an application to exit with non-zero status if it
+     wrote an error message to stderr.  This has not yet been
+     consistently implemented for the various builtins that are required
+     to issue an error (such as 'eval' (*note Eval::) when an argument
+     cannot be parsed).
+
+   * Some traditional implementations only allow reading standard input
+     once, but GNU 'm4' correctly handles multiple instances of '-' on
+     the command line.
+
+   * POSIX requires 'm4wrap' (*note M4wrap::) to act in FIFO (first-in,
+     first-out) order, but GNU 'm4' currently uses LIFO order.
+     Furthermore, POSIX states that only the first argument to 'm4wrap'
+     is saved for later evaluation, but GNU 'm4' saves and processes all
+     arguments, with output separated by spaces.
+
+   * POSIX states that builtins that require arguments, but are called
+     without arguments, have undefined behavior.  Traditional
+     implementations simply behave as though empty strings had been
+     passed.  For example, 'a`'define`'b' would expand to 'ab'.  But GNU
+     'm4' ignores certain builtins if they have missing arguments,
+     giving 'adefineb' for the above example.
+
+   * Traditional implementations handle 'define(`f',`1')' (*note
+     Define::) by undefining the entire stack of previous definitions,
+     and if doing 'undefine(`f')' first.  GNU 'm4' replaces just the top
+     definition on the stack, as if doing 'popdef(`f')' followed by
+     'pushdef(`f',`1')'.  POSIX allows either behavior.
+
+   * POSIX 2001 requires 'syscmd' (*note Syscmd::) to evaluate command
+     output for macro expansion, but this was a mistake that is
+     anticipated to be corrected in the next version of POSIX. GNU 'm4'
+     follows traditional behavior in 'syscmd' where output is not
+     rescanned, and provides the extension 'esyscmd' that does scan the
+     output.
+
+   * At one point, POSIX required 'changequote(ARG)' (*note
+     Changequote::) to use newline as the close quote, but this was a
+     bug, and the next version of POSIX is anticipated to state that
+     using empty strings or just one argument is unspecified.
+     Meanwhile, the GNU 'm4' behavior of treating an empty end-quote
+     delimiter as ''' is not portable, as Solaris treats it as repeating
+     the start-quote delimiter, and BSD treats it as leaving the
+     previous end-quote delimiter unchanged.  For predictable results,
+     never call changequote with just one argument, or with empty
+     strings for arguments.
+
+   * At one point, POSIX required 'changecom(ARG,)' (*note Changecom::)
+     to make it impossible to end a comment, but this is a bug, and the
+     next version of POSIX is anticipated to state that using empty
+     strings is unspecified.  Meanwhile, the GNU 'm4' behavior of
+     treating an empty end-comment delimiter as newline is not portable,
+     as BSD treats it as leaving the previous end-comment delimiter
+     unchanged.  It is also impossible in BSD implementations to disable
+     comments, even though that is required by POSIX. For predictable
+     results, never call changecom with empty strings for arguments.
+
+   * Most implementations of 'm4' give macros a higher precedence than
+     comments when parsing, meaning that if the start delimiter given to
+     'changecom' (*note Changecom::) starts with a macro name, comments
+     are effectively disabled.  POSIX does not specify what the
+     precedence is, so this version of GNU 'm4' parser recognizes
+     comments, then macros, then quoted strings.
+
+   * Traditional implementations allow argument collection, but not
+     string and comment processing, to span file boundaries.  Thus, if
+     'a.m4' contains 'len(', and 'b.m4' contains 'abc)', 'm4 a.m4 b.m4'
+     outputs '3' with traditional 'm4', but gives an error message that
+     the end of file was encountered inside a macro with GNU 'm4'.  On
+     the other hand, traditional implementations do end of file
+     processing for files included with 'include' or 'sinclude' (*note
+     Include::), while GNU 'm4' seamlessly integrates the content of
+     those files.  Thus 'include(`a.m4')include(`b.m4')' will output '3'
+     instead of giving an error.
+
+   * Traditional 'm4' treats 'traceon' (*note Trace::) without arguments
+     as a global variable, independent of named macro tracing.  Also,
+     once a macro is undefined, named tracing of that macro is lost.  On
+     the other hand, when GNU 'm4' encounters 'traceon' without
+     arguments, it turns tracing on for all existing definitions at the
+     time, but does not trace future definitions; 'traceoff' without
+     arguments turns tracing off for all definitions regardless of
+     whether they were also traced by name; and tracing by name, such as
+     with '-tfoo' at the command line or 'traceon(`foo')' in the input,
+     is an attribute that is preserved even if the macro is currently
+     undefined.
+
+     Additionally, while POSIX requires trace output, it makes no
+     demands on the formatting of that output.  Parsing trace output is
+     not guaranteed to be reliable, even between different releases of
+     GNU M4; however, the intent is that any future changes in trace
+     output will only occur under the direction of additional
+     'debugmode' flags (*note Debug Levels::).
+
+   * POSIX requires 'eval' (*note Eval::) to treat all operators with
+     the same precedence as C.  However, earlier versions of GNU 'm4'
+     followed the traditional behavior of other 'm4' implementations,
+     where bitwise and logical negation ('~' and '!') have lower
+     precedence than equality operators; and where equality operators
+     ('==' and '!=') had the same precedence as relational operators
+     (such as '<').  Use explicit parentheses to ensure proper
+     precedence.  As extensions to POSIX, GNU 'm4' gives well-defined
+     semantics to operations that C leaves undefined, such as when
+     overflow occurs, when shifting negative numbers, or when performing
+     division by zero.  POSIX also requires '=' to cause an error, but
+     many traditional implementations allowed it as an alias for '=='.
+
+   * POSIX 2001 requires 'translit' (*note Translit::) to treat each
+     character of the second and third arguments literally.  However, it
+     is anticipated that the next version of POSIX will allow the GNU
+     'm4' behavior of treating '-' as a range operator.
+
+   * POSIX requires 'm4' to honor the locale environment variables of
+     'LANG', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES', and 'NLSPATH', but
+     this has not yet been implemented in GNU 'm4'.
+
+   * POSIX states that only unquoted leading newlines and blanks (that
+     is, space and tab) are ignored when collecting macro arguments.
+     However, this appears to be a bug in POSIX, since most traditional
+     implementations also ignore all whitespace (formfeed, carriage
+     return, and vertical tab).  GNU 'm4' follows tradition and ignores
+     all leading unquoted whitespace.
+
+   * A strictly-compliant POSIX client is not allowed to use
+     command-line arguments not specified by POSIX. However, since this
+     version of M4 ignores 'POSIXLY_CORRECT' and enables the option
+     '--gnu' by default (*note Invoking m4: Limits control.), a client
+     desiring to be strictly compliant has no way to disable GNU
+     extensions that conflict with POSIX when directly invoking the
+     compiled 'm4'.  A future version of 'GNU' M4 will honor the
+     environment variable 'POSIXLY_CORRECT', implicitly enabling
+     '--traditional' if it is set, in order to allow a
+     strictly-compliant client.  In the meantime, a client needing
+     strict POSIX compliance can use the workaround of invoking a shell
+     script wrapper, where the wrapper then adds '--traditional' to the
+     arguments passed to the compiled 'm4'.
+
+
+File: m4.info,  Node: Other Incompatibilities,  Prev: Incompatibilities,  Up: Compatibility
+
+16.3 Other incompatibilities
+============================
+
+There are a few other incompatibilities between this implementation of
+'m4', and the System V version.
+
+   * GNU 'm4' implements sync lines differently from System V 'm4', when
+     text is being diverted.  GNU 'm4' outputs the sync lines when the
+     text is being diverted, and System V 'm4' when the diverted text is
+     being brought back.
+
+     The problem is which lines and file names should be attached to
+     text that is being, or has been, diverted.  System V 'm4' regards
+     all the diverted text as being generated by the source line
+     containing the 'undivert' call, whereas GNU 'm4' regards the
+     diverted text as being generated at the time it is diverted.
+
+     The sync line option is used mostly when using 'm4' as a front end
+     to a compiler.  If a diverted line causes a compiler error, the
+     error messages should most probably refer to the place where the
+     diversion was made, and not where it was inserted again.
+
+          divert(2)2
+          divert(1)1
+          divert`'0
+          =>#line 3 "stdin"
+          =>0
+          ^D
+          =>#line 2 "stdin"
+          =>1
+          =>#line 1 "stdin"
+          =>2
+
+     The current 'm4' implementation has a limitation that the syncline
+     output at the start of each diversion occurs no matter what, even
+     if the previous diversion did not end with a newline.  This goes
+     contrary to the claim that synclines appear on a line by
+     themselves, so this limitation may be corrected in a future version
+     of 'm4'.  In the meantime, when using '-s', it is wisest to make
+     sure all diversions end with newline.
+
+   * GNU 'm4' makes no attempt at prohibiting self-referential
+     definitions like:
+
+          define(`x', `x')
+          =>
+          define(`x', `x ')
+          =>
+
+     There is nothing inherently wrong with defining 'x' to return 'x'.
+     The wrong thing is to expand 'x' unquoted, because that would cause
+     an infinite rescan loop.  In 'm4', one might use macros to hold
+     strings, as we do for variables in other programming languages,
+     further checking them with:
+
+          ifelse(defn(`HOLDER'), `VALUE', ...)
+
+     In cases like this one, an interdiction for a macro to hold its own
+     name would be a useless limitation.  Of course, this leaves more
+     rope for the GNU 'm4' user to hang himself!  Rescanning hangs may
+     be avoided through careful programming, a little like for endless
+     loops in traditional programming languages.
+
+
+File: m4.info,  Node: Answers,  Next: Copying This Package,  Prev: Compatibility,  Up: Top
+
+17 Correct version of some examples
+***********************************
+
+Some of the examples in this manuals are buggy or not very robust, for
+demonstration purposes.  Improved versions of these composite macros are
+presented here.
+
+* Menu:
+
+* Improved exch::               Solution for 'exch'
+* Improved forloop::            Solution for 'forloop'
+* Improved foreach::            Solution for 'foreach'
+* Improved copy::               Solution for 'copy'
+* Improved m4wrap::             Solution for 'm4wrap'
+* Improved cleardivert::        Solution for 'cleardivert'
+* Improved capitalize::         Solution for 'capitalize'
+* Improved fatal_error::        Solution for 'fatal_error'
+
+
+File: m4.info,  Node: Improved exch,  Next: Improved forloop,  Up: Answers
+
+17.1 Solution for 'exch'
+========================
+
+The 'exch' macro (*note Arguments::) as presented requires clients to
+double quote their arguments.  A nicer definition, which lets clients
+follow the rule of thumb of one level of quoting per level of
+parentheses, involves adding quotes in the definition of 'exch', as
+follows:
+
+     define(`exch', ``$2', `$1'')
+     =>
+     define(exch(`expansion text', `macro'))
+     =>
+     macro
+     =>expansion text
+
+
+File: m4.info,  Node: Improved forloop,  Next: Improved foreach,  Prev: Improved exch,  Up: Answers
+
+17.2 Solution for 'forloop'
+===========================
+
+The 'forloop' macro (*note Forloop::) as presented earlier can go into
+an infinite loop if given an iterator that is not parsed as a macro
+name.  It does not do any sanity checking on its numeric bounds, and
+only permits decimal numbers for bounds.  Here is an improved version,
+shipped as 'm4-1.4.17/examples/forloop2.m4'; this version also optimizes
+overhead by calling four macros instead of six per iteration (excluding
+those in TEXT), by not dereferencing the ITERATOR in the helper
+'_forloop'.
+
+     $ m4 -d -I examples
+     undivert(`forloop2.m4')dnl
+     =>divert(`-1')
+     =># forloop(var, from, to, stmt) - improved version:
+     =>#   works even if VAR is not a strict macro name
+     =>#   performs sanity check that FROM is larger than TO
+     =>#   allows complex numerical expressions in TO and FROM
+     =>define(`forloop', `ifelse(eval(`($2) <= ($3)'), `1',
+     =>  `pushdef(`$1')_$0(`$1', eval(`$2'),
+     =>    eval(`$3'), `$4')popdef(`$1')')')
+     =>define(`_forloop',
+     =>  `define(`$1', `$2')$4`'ifelse(`$2', `$3', `',
+     =>    `$0(`$1', incr(`$2'), `$3', `$4')')')
+     =>divert`'dnl
+     include(`forloop2.m4')
+     =>
+     forloop(`i', `2', `1', `no iteration occurs')
+     =>
+     forloop(`', `1', `2', ` odd iterator name')
+     => odd iterator name odd iterator name
+     forloop(`i', `5 + 5', `0xc', ` 0x`'eval(i, `16')')
+     => 0xa 0xb 0xc
+     forloop(`i', `a', `b', `non-numeric bounds')
+     error->m4:stdin:6: bad expression in eval (bad input): (a) <= (b)
+     =>
+
+   One other change to notice is that the improved version used '_$0'
+rather than '_foreach' to invoke the helper routine.  In general, this
+is a good practice to follow, because then the set of macros can be
+uniformly transformed.  The following example shows a transformation
+that doubles the current quoting and appends a suffix '2' to each
+transformed macro.  If 'foreach' refers to the literal '_foreach', then
+'foreach2' invokes '_foreach' instead of the intended '_foreach2', and
+the mixing of quoting paradigms leads to an infinite recursion loop in
+this example.
+
+     $ m4 -d -L 9 -I examples
+     define(`arg1', `$1')include(`forloop2.m4')include(`quote.m4')
+     =>
+     define(`double', `define(`$1'`2',
+       arg1(patsubst(dquote(defn(`$1')), `[`']', `\&\&')))')
+     =>
+     double(`forloop')double(`_forloop')defn(`forloop2')
+     =>ifelse(eval(``($2) <= ($3)''), ``1'',
+     =>  ``pushdef(``$1'')_$0(``$1'', eval(``$2''),
+     =>    eval(``$3''), ``$4'')popdef(``$1'')'')
+     forloop(i, 1, 5, `ifelse(')forloop(i, 1, 5, `)')
+     =>
+     changequote(`[', `]')changequote([``], [''])
+     =>
+     forloop2(i, 1, 5, ``ifelse('')forloop2(i, 1, 5, ``)'')
+     =>
+     changequote`'include(`forloop.m4')
+     =>
+     double(`forloop')double(`_forloop')defn(`forloop2')
+     =>pushdef(``$1'', ``$2'')_forloop($@)popdef(``$1'')
+     forloop(i, 1, 5, `ifelse(')forloop(i, 1, 5, `)')
+     =>
+     changequote(`[', `]')changequote([``], [''])
+     =>
+     forloop2(i, 1, 5, ``ifelse('')forloop2(i, 1, 5, ``)'')
+     error->m4:stdin:12: recursion limit of 9 exceeded, use -L<N> to change it
+
+   One more optimization is still possible.  Instead of repeatedly
+assigning a variable then invoking or dereferencing it, it is possible
+to pass the current iterator value as a single argument.  Coupled with
+'curry' if other arguments are needed (*note Composition::), or with
+helper macros if the argument is needed in more than one place in the
+expansion, the output can be generated with three, rather than four,
+macros of overhead per iteration.  Notice how the file
+'m4-1.4.17/examples/forloop3.m4' rearranges the arguments of the helper
+'_forloop' to take two arguments that are placed around the current
+value.  By splitting a balanced set of parantheses across multiple
+arguments, the helper macro can now be shared by 'forloop' and the new
+'forloop_arg'.
+
+     $ m4 -I examples
+     include(`forloop3.m4')
+     =>
+     undivert(`forloop3.m4')dnl
+     =>divert(`-1')
+     =># forloop_arg(from, to, macro) - invoke MACRO(value) for
+     =>#   each value between FROM and TO, without define overhead
+     =>define(`forloop_arg', `ifelse(eval(`($1) <= ($2)'), `1',
+     =>  `_forloop(`$1', eval(`$2'), `$3(', `)')')')
+     =># forloop(var, from, to, stmt) - refactored to share code
+     =>define(`forloop', `ifelse(eval(`($2) <= ($3)'), `1',
+     =>  `pushdef(`$1')_forloop(eval(`$2'), eval(`$3'),
+     =>    `define(`$1',', `)$4')popdef(`$1')')')
+     =>define(`_forloop',
+     =>  `$3`$1'$4`'ifelse(`$1', `$2', `',
+     =>    `$0(incr(`$1'), `$2', `$3', `$4')')')
+     =>divert`'dnl
+     forloop(`i', `1', `3', ` i')
+     => 1 2 3
+     define(`echo', `$@')
+     =>
+     forloop_arg(`1', `3', ` echo')
+     => 1 2 3
+     include(`curry.m4')
+     =>
+     forloop_arg(`1', `3', `curry(`pushdef', `a')')
+     =>
+     a
+     =>3
+     popdef(`a')a
+     =>2
+     popdef(`a')a
+     =>1
+     popdef(`a')a
+     =>a
+
+   Of course, it is possible to make even more improvements, such as
+adding an optional step argument, or allowing iteration through
+descending sequences.  GNU Autoconf provides some of these additional
+bells and whistles in its 'm4_for' macro.
+
+
+File: m4.info,  Node: Improved foreach,  Next: Improved copy,  Prev: Improved forloop,  Up: Answers
+
+17.3 Solution for 'foreach'
+===========================
+
+The 'foreach' and 'foreachq' macros (*note Foreach::) as presented
+earlier each have flaws.  First, we will examine and fix the quadratic
+behavior of 'foreachq':
+
+     $ m4 -I examples
+     include(`foreachq.m4')
+     =>
+     traceon(`shift')debugmode(`aq')
+     =>
+     foreachq(`x', ``1', `2', `3', `4'', `x
+     ')dnl
+     =>1
+     error->m4trace: -3- shift(`1', `2', `3', `4')
+     error->m4trace: -2- shift(`1', `2', `3', `4')
+     =>2
+     error->m4trace: -4- shift(`1', `2', `3', `4')
+     error->m4trace: -3- shift(`2', `3', `4')
+     error->m4trace: -3- shift(`1', `2', `3', `4')
+     error->m4trace: -2- shift(`2', `3', `4')
+     =>3
+     error->m4trace: -5- shift(`1', `2', `3', `4')
+     error->m4trace: -4- shift(`2', `3', `4')
+     error->m4trace: -3- shift(`3', `4')
+     error->m4trace: -4- shift(`1', `2', `3', `4')
+     error->m4trace: -3- shift(`2', `3', `4')
+     error->m4trace: -2- shift(`3', `4')
+     =>4
+     error->m4trace: -6- shift(`1', `2', `3', `4')
+     error->m4trace: -5- shift(`2', `3', `4')
+     error->m4trace: -4- shift(`3', `4')
+     error->m4trace: -3- shift(`4')
+
+   Each successive iteration was adding more quoted 'shift' invocations,
+and the entire list contents were passing through every iteration.  In
+general, when recursing, it is a good idea to make the recursion use
+fewer arguments, rather than adding additional quoted uses of 'shift'.
+By doing so, 'm4' uses less memory, invokes fewer macros, is less likely
+to run into machine limits, and most importantly, performs faster.  The
+fixed version of 'foreachq' can be found in
+'m4-1.4.17/examples/foreachq2.m4':
+
+     $ m4 -I examples
+     include(`foreachq2.m4')
+     =>
+     undivert(`foreachq2.m4')dnl
+     =>include(`quote.m4')dnl
+     =>divert(`-1')
+     =># foreachq(x, `item_1, item_2, ..., item_n', stmt)
+     =>#   quoted list, improved version
+     =>define(`foreachq', `pushdef(`$1')_$0($@)popdef(`$1')')
+     =>define(`_arg1q', ``$1'')
+     =>define(`_rest', `ifelse(`$#', `1', `', `dquote(shift($@))')')
+     =>define(`_foreachq', `ifelse(`$2', `', `',
+     =>  `define(`$1', _arg1q($2))$3`'$0(`$1', _rest($2), `$3')')')
+     =>divert`'dnl
+     traceon(`shift')debugmode(`aq')
+     =>
+     foreachq(`x', ``1', `2', `3', `4'', `x
+     ')dnl
+     =>1
+     error->m4trace: -3- shift(`1', `2', `3', `4')
+     =>2
+     error->m4trace: -3- shift(`2', `3', `4')
+     =>3
+     error->m4trace: -3- shift(`3', `4')
+     =>4
+
+   Note that the fixed version calls unquoted helper macros in
+'_foreachq' to trim elements immediately; those helper macros in turn
+must re-supply the layer of quotes lost in the macro invocation.
+Contrast the use of '_arg1q', which quotes the first list element, with
+'_arg1' of the earlier implementation that returned the first list
+element directly.  Additionally, by calling the helper method
+immediately, the 'defn(`ITERATOR')' no longer contains unexpanded
+macros.
+
+   The astute m4 programmer might notice that the solution above still
+uses more memory and macro invocations, and thus more time, than
+strictly necessary.  Note that '$2', which contains an arbitrarily long
+quoted list, is expanded and rescanned three times per iteration of
+'_foreachq'.  Furthermore, every iteration of the algorithm effectively
+unboxes then reboxes the list, which costs a couple of macro
+invocations.  It is possible to rewrite the algorithm for a bit more
+speed by swapping the order of the arguments to '_foreachq' in order to
+operate on an unboxed list in the first place, and by using the
+fixed-length '$#' instead of an arbitrary length list as the key to end
+recursion.  The result is an overhead of six macro invocations per loop
+(excluding any macros in TEXT), instead of eight.  This alternative
+approach is available as 'm4-1.4.17/examples/foreach3.m4':
+
+     $ m4 -I examples
+     include(`foreachq3.m4')
+     =>
+     undivert(`foreachq3.m4')dnl
+     =>divert(`-1')
+     =># foreachq(x, `item_1, item_2, ..., item_n', stmt)
+     =>#   quoted list, alternate improved version
+     =>define(`foreachq', `ifelse(`$2', `', `',
+     =>  `pushdef(`$1')_$0(`$1', `$3', `', $2)popdef(`$1')')')
+     =>define(`_foreachq', `ifelse(`$#', `3', `',
+     =>  `define(`$1', `$4')$2`'$0(`$1', `$2',
+     =>    shift(shift(shift($@))))')')
+     =>divert`'dnl
+     traceon(`shift')debugmode(`aq')
+     =>
+     foreachq(`x', ``1', `2', `3', `4'', `x
+     ')dnl
+     =>1
+     error->m4trace: -4- shift(`x', `x
+     error->', `', `1', `2', `3', `4')
+     error->m4trace: -3- shift(`x
+     error->', `', `1', `2', `3', `4')
+     error->m4trace: -2- shift(`', `1', `2', `3', `4')
+     =>2
+     error->m4trace: -4- shift(`x', `x
+     error->', `1', `2', `3', `4')
+     error->m4trace: -3- shift(`x
+     error->', `1', `2', `3', `4')
+     error->m4trace: -2- shift(`1', `2', `3', `4')
+     =>3
+     error->m4trace: -4- shift(`x', `x
+     error->', `2', `3', `4')
+     error->m4trace: -3- shift(`x
+     error->', `2', `3', `4')
+     error->m4trace: -2- shift(`2', `3', `4')
+     =>4
+     error->m4trace: -4- shift(`x', `x
+     error->', `3', `4')
+     error->m4trace: -3- shift(`x
+     error->', `3', `4')
+     error->m4trace: -2- shift(`3', `4')
+
+   In the current version of M4, every instance of '$@' is rescanned as
+it is encountered.  Thus, the 'foreachq3.m4' alternative uses much less
+memory than 'foreachq2.m4', and executes as much as 10% faster, since
+each iteration encounters fewer '$@'.  However, the implementation of
+rescanning every byte in '$@' is quadratic in the number of bytes
+scanned (for example, making the broken version in 'foreachq.m4' cubic,
+rather than quadratic, in behavior).  A future release of M4 will
+improve the underlying implementation by reusing results of previous
+scans, so that both styles of 'foreachq' can become linear in the number
+of bytes scanned.  Notice how the implementation injects an empty
+argument prior to expanding '$2' within 'foreachq'; the helper macro
+'_foreachq' then ignores the third argument altogether, and ends
+recursion when there are three arguments left because there was nothing
+left to pass through 'shift'.  Thus, each iteration only needs one
+'ifelse', rather than the two conditionals used in the version from
+'foreachq2.m4'.
+
+   So far, all of the implementations of 'foreachq' presented have been
+quadratic with M4 1.4.x.  But 'forloop' is linear, because each
+iteration parses a constant amount of arguments.  So, it is possible to
+design a variant that uses 'forloop' to do the iteration, then uses '$@'
+only once at the end, giving a linear result even with older M4
+implementations.  This implementation relies on the GNU extension that
+'$10' expands to the tenth argument rather than the first argument
+concatenated with '0'.  The trick is to define an intermediate macro
+that repeats the text 'm4_define(`$1', `$N')$2`'', with 'n' set to
+successive integers corresponding to each argument.  The helper macro
+'_foreachq_' is needed in order to generate the literal sequences such
+as '$1' into the intermediate macro, rather than expanding them as the
+arguments of '_foreachq'.  With this approach, no 'shift' calls are even
+needed!  Even though there are seven macros of overhead per iteration
+instead of six in 'foreachq3.m4', the linear scaling is apparent at
+relatively small list sizes.  However, this approach will need
+adjustment when a future version of M4 follows POSIX by no longer
+treating '$10' as the tenth argument; the anticipation is that '${10}'
+can be used instead, although that alternative syntax is not yet
+supported.
+
+     $ m4 -I examples
+     include(`foreachq4.m4')
+     =>
+     undivert(`foreachq4.m4')dnl
+     =>include(`forloop2.m4')dnl
+     =>divert(`-1')
+     =># foreachq(x, `item_1, item_2, ..., item_n', stmt)
+     =>#   quoted list, version based on forloop
+     =>define(`foreachq',
+     =>`ifelse(`$2', `', `', `_$0(`$1', `$3', $2)')')
+     =>define(`_foreachq',
+     =>`pushdef(`$1', forloop(`$1', `3', `$#',
+     =>  `$0_(`1', `2', indir(`$1'))')`popdef(
+     =>    `$1')')indir(`$1', $@)')
+     =>define(`_foreachq_',
+     =>``define(`$$1', `$$3')$$2`''')
+     =>divert`'dnl
+     traceon(`shift')debugmode(`aq')
+     =>
+     foreachq(`x', ``1', `2', `3', `4'', `x
+     ')dnl
+     =>1
+     =>2
+     =>3
+     =>4
+
+   For yet another approach, the improved version of 'foreach',
+available in 'm4-1.4.17/examples/foreach2.m4', simply overquotes the
+arguments to '_foreach' to begin with, using 'dquote_elt'.  Then
+'_foreach' can just use '_arg1' to remove the extra layer of quoting
+that was added up front:
+
+     $ m4 -I examples
+     include(`foreach2.m4')
+     =>
+     undivert(`foreach2.m4')dnl
+     =>include(`quote.m4')dnl
+     =>divert(`-1')
+     =># foreach(x, (item_1, item_2, ..., item_n), stmt)
+     =>#   parenthesized list, improved version
+     =>define(`foreach', `pushdef(`$1')_$0(`$1',
+     =>  (dquote(dquote_elt$2)), `$3')popdef(`$1')')
+     =>define(`_arg1', `$1')
+     =>define(`_foreach', `ifelse(`$2', `(`')', `',
+     =>  `define(`$1', _arg1$2)$3`'$0(`$1', (dquote(shift$2)), `$3')')')
+     =>divert`'dnl
+     traceon(`shift')debugmode(`aq')
+     =>
+     foreach(`x', `(`1', `2', `3', `4')', `x
+     ')dnl
+     error->m4trace: -4- shift(`1', `2', `3', `4')
+     error->m4trace: -4- shift(`2', `3', `4')
+     error->m4trace: -4- shift(`3', `4')
+     =>1
+     error->m4trace: -3- shift(``1'', ``2'', ``3'', ``4'')
+     =>2
+     error->m4trace: -3- shift(``2'', ``3'', ``4'')
+     =>3
+     error->m4trace: -3- shift(``3'', ``4'')
+     =>4
+     error->m4trace: -3- shift(``4'')
+
+   It is likewise possible to write a variant of 'foreach' that performs
+in linear time on M4 1.4.x; the easiest method is probably writing a
+version of 'foreach' that unboxes its list, then invokes '_foreachq' as
+previously defined in 'foreachq4.m4'.
+
+   In summary, recursion over list elements is trickier than it appeared
+at first glance, but provides a powerful idiom within 'm4' processing.
+As a final demonstration, both list styles are now able to handle
+several scenarios that would wreak havoc on one or both of the original
+implementations.  This points out one other difference between the list
+styles.  'foreach' evaluates unquoted list elements only once, in
+preparation for calling '_foreach', similary for 'foreachq' as provided
+by 'foreachq3.m4' or 'foreachq4.m4'.  But 'foreachq', as provided by
+'foreachq2.m4', evaluates unquoted list elements twice while visiting
+the first list element, once in '_arg1q' and once in '_rest'.  When
+deciding which list style to use, one must take into account whether
+repeating the side effects of unquoted list elements will have any
+detrimental effects.
+
+     $ m4 -I examples
+     include(`foreach2.m4')
+     =>
+     include(`foreachq2.m4')
+     =>
+     dnl 0-element list:
+     foreach(`x', `', `<x>') / foreachq(`x', `', `<x>')
+     => / 
+     dnl 1-element list of empty element
+     foreach(`x', `()', `<x>') / foreachq(`x', ``'', `<x>')
+     =><> / <>
+     dnl 2-element list of empty elements
+     foreach(`x', `(`',`')', `<x>') / foreachq(`x', ``',`'', `<x>')
+     =><><> / <><>
+     dnl 1-element list of a comma
+     foreach(`x', `(`,')', `<x>') / foreachq(`x', ``,'', `<x>')
+     =><,> / <,>
+     dnl 2-element list of unbalanced parentheses
+     foreach(`x', `(`(', `)')', `<x>') / foreachq(`x', ``(', `)'', `<x>')
+     =><(><)> / <(><)>
+     define(`ab', `oops')dnl using defn(`iterator')
+     foreach(`x', `(`a', `b')', `defn(`x')') /dnl
+      foreachq(`x', ``a', `b'', `defn(`x')')
+     =>ab / ab
+     define(`active', `ACT, IVE')
+     =>
+     traceon(`active')
+     =>
+     dnl list of unquoted macros; expansion occurs before recursion
+     foreach(`x', `(active, active)', `<x>
+     ')dnl
+     error->m4trace: -4- active -> `ACT, IVE'
+     error->m4trace: -4- active -> `ACT, IVE'
+     =><ACT>
+     =><IVE>
+     =><ACT>
+     =><IVE>
+     foreachq(`x', `active, active', `<x>
+     ')dnl
+     error->m4trace: -3- active -> `ACT, IVE'
+     error->m4trace: -3- active -> `ACT, IVE'
+     =><ACT>
+     error->m4trace: -3- active -> `ACT, IVE'
+     error->m4trace: -3- active -> `ACT, IVE'
+     =><IVE>
+     =><ACT>
+     =><IVE>
+     dnl list of quoted macros; expansion occurs during recursion
+     foreach(`x', `(`active', `active')', `<x>
+     ')dnl
+     error->m4trace: -1- active -> `ACT, IVE'
+     =><ACT, IVE>
+     error->m4trace: -1- active -> `ACT, IVE'
+     =><ACT, IVE>
+     foreachq(`x', ``active', `active'', `<x>
+     ')dnl
+     error->m4trace: -1- active -> `ACT, IVE'
+     =><ACT, IVE>
+     error->m4trace: -1- active -> `ACT, IVE'
+     =><ACT, IVE>
+     dnl list of double-quoted macro names; no expansion
+     foreach(`x', `(``active'', ``active'')', `<x>
+     ')dnl
+     =><active>
+     =><active>
+     foreachq(`x', ```active'', ``active''', `<x>
+     ')dnl
+     =><active>
+     =><active>
+
+
+File: m4.info,  Node: Improved copy,  Next: Improved m4wrap,  Prev: Improved foreach,  Up: Answers
+
+17.4 Solution for 'copy'
+========================
+
+The macro 'copy' presented above is unable to handle builtin tokens with
+M4 1.4.x, because it tries to pass the builtin token through the macro
+'curry', where it is silently flattened to an empty string (*note
+Composition::).  Rather than using the problematic 'curry' to work
+around the limitation that 'stack_foreach' expects to invoke a macro
+that takes exactly one argument, we can write a new macro that lets us
+form the exact two-argument 'pushdef' call sequence needed, so that we
+are no longer passing a builtin token through a text macro.
+
+ -- Composite: stack_foreach_sep (MACRO, PRE, POST, SEP)
+ -- Composite: stack_foreach_sep_lifo (MACRO, PRE, POST, SEP)
+     For each of the 'pushdef' definitions associated with MACRO, expand
+     the sequence 'PRE`'definition`'POST'.  Additionally, expand SEP
+     between definitions.  'stack_foreach_sep' visits the oldest
+     definition first, while 'stack_foreach_sep_lifo' visits the current
+     definition first.  The expansion may dereference MACRO, but should
+     not modify it.  There are a few special macros, such as 'defn',
+     which cannot be used as the MACRO parameter.
+
+   Note that 'stack_foreach(`MACRO', `ACTION')' is equivalent to
+'stack_foreach_sep(`MACRO', `ACTION(', `)')'.  By supplying explicit
+parentheses, split among the PRE and POST arguments to
+'stack_foreach_sep', it is now possible to construct macro calls with
+more than one argument, without passing builtin tokens through a macro
+call.  It is likewise possible to directly reference the stack
+definitions without a macro call, by leaving PRE and POST empty.  Thus,
+in addition to fixing 'copy' on builtin tokens, it also executes with
+fewer macro invocations.
+
+   The new macro also adds a separator that is only output after the
+first iteration of the helper '_stack_reverse_sep', implemented by
+prepending the original SEP to PRE and omitting a SEP argument in
+subsequent iterations.  Note that the empty string that separates SEP
+from PRE is provided as part of the fourth argument when originally
+calling '_stack_reverse_sep', and not by writing '$4`'$3' as the third
+argument in the recursive call; while the other approach would give the
+same output, it does so at the expense of increasing the argument size
+on each iteration of '_stack_reverse_sep', which results in quadratic
+instead of linear execution time.  The improved stack walking macros are
+available in 'm4-1.4.17/examples/stack_sep.m4':
+
+     $ m4 -I examples
+     include(`stack_sep.m4')
+     =>
+     define(`copy', `ifdef(`$2', `errprint(`$2 already defined
+     ')m4exit(`1')',
+        `stack_foreach_sep(`$1', `pushdef(`$2',', `)')')')dnl
+     pushdef(`a', `1')pushdef(`a', defn(`divnum'))
+     =>
+     copy(`a', `b')
+     =>
+     b
+     =>0
+     popdef(`b')
+     =>
+     b
+     =>1
+     pushdef(`c', `1')pushdef(`c', `2')
+     =>
+     stack_foreach_sep_lifo(`c', `', `', `, ')
+     =>2, 1
+     undivert(`stack_sep.m4')dnl
+     =>divert(`-1')
+     =># stack_foreach_sep(macro, pre, post, sep)
+     =># Invoke PRE`'defn`'POST with a single argument of each definition
+     =># from the definition stack of MACRO, starting with the oldest, and
+     =># separated by SEP between definitions.
+     =>define(`stack_foreach_sep',
+     =>`_stack_reverse_sep(`$1', `tmp-$1')'dnl
+     =>`_stack_reverse_sep(`tmp-$1', `$1', `$2`'defn(`$1')$3', `$4`'')')
+     =># stack_foreach_sep_lifo(macro, pre, post, sep)
+     =># Like stack_foreach_sep, but starting with the newest definition.
+     =>define(`stack_foreach_sep_lifo',
+     =>`_stack_reverse_sep(`$1', `tmp-$1', `$2`'defn(`$1')$3', `$4`'')'dnl
+     =>`_stack_reverse_sep(`tmp-$1', `$1')')
+     =>define(`_stack_reverse_sep',
+     =>`ifdef(`$1', `pushdef(`$2', defn(`$1'))$3`'popdef(`$1')$0(
+     =>  `$1', `$2', `$4$3')')')
+     =>divert`'dnl
+
+
+File: m4.info,  Node: Improved m4wrap,  Next: Improved cleardivert,  Prev: Improved copy,  Up: Answers
+
+17.5 Solution for 'm4wrap'
+==========================
+
+The replacement 'm4wrap' versions presented above, designed to guarantee
+FIFO or LIFO order regardless of the underlying M4 implementation, share
+a bug when dealing with wrapped text that looks like parameter
+expansion.  Note how the invocation of 'm4wrapN' interprets these
+parameters, while using the builtin preserves them for their intended
+use.
+
+     $ m4 -I examples
+     include(`wraplifo.m4')
+     =>
+     m4wrap(`define(`foo', ``$0:'-$1-$*-$#-')foo(`a', `b')
+     ')
+     =>
+     builtin(`m4wrap', ``'define(`bar', ``$0:'-$1-$*-$#-')bar(`a', `b')
+     ')
+     =>
+     ^D
+     =>bar:-a-a,b-2-
+     =>m4wrap0:---0-
+
+   Additionally, the computation of '_m4wrap_level' and creation of
+multiple 'm4wrapN' placeholders in the original examples is more
+expensive in time and memory than strictly necessary.  Notice how the
+improved version grabs the wrapped text via 'defn' to avoid parameter
+expansion, then undefines '_m4wrap_text', before stripping a level of
+quotes with '_arg1' to expand the text.  That way, each level of
+wrapping reuses the single placeholder, which starts each nesting level
+in an undefined state.
+
+   Finally, it is worth emulating the GNU M4 extension of saving all
+arguments to 'm4wrap', separated by a space, rather than saving just the
+first argument.  This is done with the 'join' macro documented
+previously (*note Shift::).  The improved LIFO example is shipped as
+'m4-1.4.17/examples/wraplifo2.m4', and can easily be converted to a FIFO
+solution by swapping the adjacent invocations of 'joinall' and 'defn'.
+
+     $ m4 -I examples
+     include(`wraplifo2.m4')
+     =>
+     undivert(`wraplifo2.m4')dnl
+     =>dnl Redefine m4wrap to have LIFO semantics, improved example.
+     =>include(`join.m4')dnl
+     =>define(`_m4wrap', defn(`m4wrap'))dnl
+     =>define(`_arg1', `$1')dnl
+     =>define(`m4wrap',
+     =>`ifdef(`_$0_text',
+     =>       `define(`_$0_text', joinall(` ', $@)defn(`_$0_text'))',
+     =>       `_$0(`_arg1(defn(`_$0_text')undefine(`_$0_text'))')dnl
+     =>define(`_$0_text', joinall(` ', $@))')')dnl
+     m4wrap(`define(`foo', ``$0:'-$1-$*-$#-')foo(`a', `b')
+     ')
+     =>
+     m4wrap(`lifo text
+     m4wrap(`nested', `', `$@
+     ')')
+     =>
+     ^D
+     =>lifo text
+     =>foo:-a-a,b-2-
+     =>nested  $@
+
+
+File: m4.info,  Node: Improved cleardivert,  Next: Improved capitalize,  Prev: Improved m4wrap,  Up: Answers
+
+17.6 Solution for 'cleardivert'
+===============================
+
+The 'cleardivert' macro (*note Cleardivert::) cannot, as it stands, be
+called without arguments to clear all pending diversions.  That is
+because using undivert with an empty string for an argument is different
+than using it with no arguments at all.  Compare the earlier definition
+with one that takes the number of arguments into account:
+
+     define(`cleardivert',
+       `pushdef(`_n', divnum)divert(`-1')undivert($@)divert(_n)popdef(`_n')')
+     =>
+     divert(`1')one
+     divert
+     =>
+     cleardivert
+     =>
+     undivert
+     =>one
+     =>
+     define(`cleardivert',
+       `pushdef(`_num', divnum)divert(`-1')ifelse(`$#', `0',
+         `undivert`'', `undivert($@)')divert(_num)popdef(`_num')')
+     =>
+     divert(`2')two
+     divert
+     =>
+     cleardivert
+     =>
+     undivert
+     =>
+
+
+File: m4.info,  Node: Improved capitalize,  Next: Improved fatal_error,  Prev: Improved cleardivert,  Up: Answers
+
+17.7 Solution for 'capitalize'
+==============================
+
+The 'capitalize' macro (*note Patsubst::) as presented earlier does not
+allow clients to follow the quoting rule of thumb.  Consider the three
+macros 'active', 'Active', and 'ACTIVE', and the difference between
+calling 'capitalize' with the expansion of a macro, expanding the result
+of a case change, and changing the case of a double-quoted string:
+
+     $ m4 -I examples
+     include(`capitalize.m4')dnl
+     define(`active', `act1, ive')dnl
+     define(`Active', `Act2, Ive')dnl
+     define(`ACTIVE', `ACT3, IVE')dnl
+     upcase(active)
+     =>ACT1,IVE
+     upcase(`active')
+     =>ACT3, IVE
+     upcase(``active'')
+     =>ACTIVE
+     downcase(ACTIVE)
+     =>act3,ive
+     downcase(`ACTIVE')
+     =>act1, ive
+     downcase(``ACTIVE'')
+     =>active
+     capitalize(active)
+     =>Act1
+     capitalize(`active')
+     =>Active
+     capitalize(``active'')
+     =>_capitalize(`active')
+     define(`A', `OOPS')
+     =>
+     capitalize(active)
+     =>OOPSct1
+     capitalize(`active')
+     =>OOPSctive
+
+   First, when 'capitalize' is called with more than one argument, it
+was throwing away later arguments, whereas 'upcase' and 'downcase' used
+'$*' to collect them all.  The fix is simple: use '$*' consistently.
+
+   Next, with single-quoting, 'capitalize' outputs a single character, a
+set of quotes, then the rest of the characters, making it impossible to
+invoke 'Active' after the fact, and allowing the alternate macro 'A' to
+interfere.  Here, the solution is to use additional quoting in the
+helper macros, then pass the final over-quoted output string through
+'_arg1' to remove the extra quoting and finally invoke the concatenated
+portions as a single string.
+
+   Finally, when passed a double-quoted string, the nested macro
+'_capitalize' is never invoked because it ended up nested inside quotes.
+This one is the toughest to fix.  In short, we have no idea how many
+levels of quotes are in effect on the substring being altered by
+'patsubst'.  If the replacement string cannot be expressed entirely in
+terms of literal text and backslash substitutions, then we need a
+mechanism to guarantee that the helper macros are invoked outside of
+quotes.  In other words, this sounds like a job for 'changequote' (*note
+Changequote::).  By changing the active quoting characters, we can
+guarantee that replacement text injected by 'patsubst' always occurs in
+the middle of a string that has exactly one level of over-quoting using
+alternate quotes; so the replacement text closes the quoted string,
+invokes the helper macros, then reopens the quoted string.  In turn,
+that means the replacement text has unbalanced quotes, necessitating
+another round of 'changequote'.
+
+   In the fixed version below, (also shipped as
+'m4-1.4.17/examples/capitalize2.m4'), 'capitalize' uses the alternate
+quotes of '<<[' and ']>>' (the longer strings are chosen so as to be
+less likely to appear in the text being converted).  The helpers
+'_to_alt' and '_from_alt' merely reduce the number of characters
+required to perform a 'changequote', since the definition changes twice.
+The outermost pair means that 'patsubst' and '_capitalize_alt' are
+invoked with alternate quoting; the innermost pair is used so that the
+third argument to 'patsubst' can contain an unbalanced ']>>'/'<<[' pair.
+Note that 'upcase' and 'downcase' must be redefined as '_upcase_alt' and
+'_downcase_alt', since they contain nested quotes but are invoked with
+the alternate quoting scheme in effect.
+
+     $ m4 -I examples
+     include(`capitalize2.m4')dnl
+     define(`active', `act1, ive')dnl
+     define(`Active', `Act2, Ive')dnl
+     define(`ACTIVE', `ACT3, IVE')dnl
+     define(`A', `OOPS')dnl
+     capitalize(active; `active'; ``active''; ```actIVE''')
+     =>Act1,Ive; Act2, Ive; Active; `Active'
+     undivert(`capitalize2.m4')dnl
+     =>divert(`-1')
+     =># upcase(text)
+     =># downcase(text)
+     =># capitalize(text)
+     =>#   change case of text, improved version
+     =>define(`upcase', `translit(`$*', `a-z', `A-Z')')
+     =>define(`downcase', `translit(`$*', `A-Z', `a-z')')
+     =>define(`_arg1', `$1')
+     =>define(`_to_alt', `changequote(`<<[', `]>>')')
+     =>define(`_from_alt', `changequote(<<[`]>>, <<[']>>)')
+     =>define(`_upcase_alt', `translit(<<[$*]>>, <<[a-z]>>, <<[A-Z]>>)')
+     =>define(`_downcase_alt', `translit(<<[$*]>>, <<[A-Z]>>, <<[a-z]>>)')
+     =>define(`_capitalize_alt',
+     =>  `regexp(<<[$1]>>, <<[^\(\w\)\(\w*\)]>>,
+     =>    <<[_upcase_alt(<<[<<[\1]>>]>>)_downcase_alt(<<[<<[\2]>>]>>)]>>)')
+     =>define(`capitalize',
+     =>  `_arg1(_to_alt()patsubst(<<[<<[$*]>>]>>, <<[\w+]>>,
+     =>    _from_alt()`]>>_$0_alt(<<[\&]>>)<<['_to_alt())_from_alt())')
+     =>divert`'dnl
+
+
+File: m4.info,  Node: Improved fatal_error,  Prev: Improved capitalize,  Up: Answers
+
+17.8 Solution for 'fatal_error'
+===============================
+
+The 'fatal_error' macro (*note M4exit::) is not robust to versions of
+GNU M4 earlier than 1.4.8, where invoking '__file__' (*note Location::)
+inside 'm4wrap' would result in an empty string, and '__line__' resulted
+in '0' even though all files start at line 1.  Furthermore, versions
+earlier than 1.4.6 did not support the '__program__' macro.  If you want
+'fatal_error' to work across the entire 1.4.x release series, a better
+implementation would be:
+
+     define(`fatal_error',
+       `errprint(ifdef(`__program__', `__program__', ``m4'')'dnl
+     `:ifelse(__line__, `0', `',
+         `__file__:__line__:')` fatal error: $*
+     ')m4exit(`1')')
+     =>
+     m4wrap(`divnum(`demo of internal message')
+     fatal_error(`inside wrapped text')')
+     =>
+     ^D
+     error->m4:stdin:6: Warning: excess arguments to builtin `divnum' ignored
+     =>0
+     error->m4:stdin:6: fatal error: inside wrapped text
+
+
+File: m4.info,  Node: Copying This Package,  Next: Copying This Manual,  Prev: Answers,  Up: Top
+
+Appendix A How to make copies of the overall M4 package
+*******************************************************
+
+This appendix covers the license for copying the source code of the
+overall M4 package.  This manual is under a different set of
+restrictions, covered later (*note Copying This Manual::).
+
+* Menu:
+
+* GNU General Public License::  License for copying the M4 package
+
+
+File: m4.info,  Node: GNU General Public License,  Up: Copying This Package
+
+A.1 License for copying the M4 package
+======================================
+
+                        Version 3, 29 June 2007
+
+     Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+
+     Everyone is permitted to copy and distribute verbatim copies of this
+     license document, but changing it is not allowed.
+
+Preamble
+========
+
+The GNU General Public License is a free, copyleft license for software
+and other kinds of works.
+
+   The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+   When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+   To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+   For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+   Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+   For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+   Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+   Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+   The precise terms and conditions for copying, distribution and
+modification follow.
+
+TERMS AND CONDITIONS
+====================
+
+  0. Definitions.
+
+     "This License" refers to version 3 of the GNU General Public
+     License.
+
+     "Copyright" also means copyright-like laws that apply to other
+     kinds of works, such as semiconductor masks.
+
+     "The Program" refers to any copyrightable work licensed under this
+     License.  Each licensee is addressed as "you".  "Licensees" and
+     "recipients" may be individuals or organizations.
+
+     To "modify" a work means to copy from or adapt all or part of the
+     work in a fashion requiring copyright permission, other than the
+     making of an exact copy.  The resulting work is called a "modified
+     version" of the earlier work or a work "based on" the earlier work.
+
+     A "covered work" means either the unmodified Program or a work
+     based on the Program.
+
+     To "propagate" a work means to do anything with it that, without
+     permission, would make you directly or secondarily liable for
+     infringement under applicable copyright law, except executing it on
+     a computer or modifying a private copy.  Propagation includes
+     copying, distribution (with or without modification), making
+     available to the public, and in some countries other activities as
+     well.
+
+     To "convey" a work means any kind of propagation that enables other
+     parties to make or receive copies.  Mere interaction with a user
+     through a computer network, with no transfer of a copy, is not
+     conveying.
+
+     An interactive user interface displays "Appropriate Legal Notices"
+     to the extent that it includes a convenient and prominently visible
+     feature that (1) displays an appropriate copyright notice, and (2)
+     tells the user that there is no warranty for the work (except to
+     the extent that warranties are provided), that licensees may convey
+     the work under this License, and how to view a copy of this
+     License.  If the interface presents a list of user commands or
+     options, such as a menu, a prominent item in the list meets this
+     criterion.
+
+  1. Source Code.
+
+     The "source code" for a work means the preferred form of the work
+     for making modifications to it.  "Object code" means any non-source
+     form of a work.
+
+     A "Standard Interface" means an interface that either is an
+     official standard defined by a recognized standards body, or, in
+     the case of interfaces specified for a particular programming
+     language, one that is widely used among developers working in that
+     language.
+
+     The "System Libraries" of an executable work include anything,
+     other than the work as a whole, that (a) is included in the normal
+     form of packaging a Major Component, but which is not part of that
+     Major Component, and (b) serves only to enable use of the work with
+     that Major Component, or to implement a Standard Interface for
+     which an implementation is available to the public in source code
+     form.  A "Major Component", in this context, means a major
+     essential component (kernel, window system, and so on) of the
+     specific operating system (if any) on which the executable work
+     runs, or a compiler used to produce the work, or an object code
+     interpreter used to run it.
+
+     The "Corresponding Source" for a work in object code form means all
+     the source code needed to generate, install, and (for an executable
+     work) run the object code and to modify the work, including scripts
+     to control those activities.  However, it does not include the
+     work's System Libraries, or general-purpose tools or generally
+     available free programs which are used unmodified in performing
+     those activities but which are not part of the work.  For example,
+     Corresponding Source includes interface definition files associated
+     with source files for the work, and the source code for shared
+     libraries and dynamically linked subprograms that the work is
+     specifically designed to require, such as by intimate data
+     communication or control flow between those subprograms and other
+     parts of the work.
+
+     The Corresponding Source need not include anything that users can
+     regenerate automatically from other parts of the Corresponding
+     Source.
+
+     The Corresponding Source for a work in source code form is that
+     same work.
+
+  2. Basic Permissions.
+
+     All rights granted under this License are granted for the term of
+     copyright on the Program, and are irrevocable provided the stated
+     conditions are met.  This License explicitly affirms your unlimited
+     permission to run the unmodified Program.  The output from running
+     a covered work is covered by this License only if the output, given
+     its content, constitutes a covered work.  This License acknowledges
+     your rights of fair use or other equivalent, as provided by
+     copyright law.
+
+     You may make, run and propagate covered works that you do not
+     convey, without conditions so long as your license otherwise
+     remains in force.  You may convey covered works to others for the
+     sole purpose of having them make modifications exclusively for you,
+     or provide you with facilities for running those works, provided
+     that you comply with the terms of this License in conveying all
+     material for which you do not control copyright.  Those thus making
+     or running the covered works for you must do so exclusively on your
+     behalf, under your direction and control, on terms that prohibit
+     them from making any copies of your copyrighted material outside
+     their relationship with you.
+
+     Conveying under any other circumstances is permitted solely under
+     the conditions stated below.  Sublicensing is not allowed; section
+     10 makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+     No covered work shall be deemed part of an effective technological
+     measure under any applicable law fulfilling obligations under
+     article 11 of the WIPO copyright treaty adopted on 20 December
+     1996, or similar laws prohibiting or restricting circumvention of
+     such measures.
+
+     When you convey a covered work, you waive any legal power to forbid
+     circumvention of technological measures to the extent such
+     circumvention is effected by exercising rights under this License
+     with respect to the covered work, and you disclaim any intention to
+     limit operation or modification of the work as a means of
+     enforcing, against the work's users, your or third parties' legal
+     rights to forbid circumvention of technological measures.
+
+  4. Conveying Verbatim Copies.
+
+     You may convey verbatim copies of the Program's source code as you
+     receive it, in any medium, provided that you conspicuously and
+     appropriately publish on each copy an appropriate copyright notice;
+     keep intact all notices stating that this License and any
+     non-permissive terms added in accord with section 7 apply to the
+     code; keep intact all notices of the absence of any warranty; and
+     give all recipients a copy of this License along with the Program.
+
+     You may charge any price or no price for each copy that you convey,
+     and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+     You may convey a work based on the Program, or the modifications to
+     produce it from the Program, in the form of source code under the
+     terms of section 4, provided that you also meet all of these
+     conditions:
+
+       a. The work must carry prominent notices stating that you
+          modified it, and giving a relevant date.
+
+       b. The work must carry prominent notices stating that it is
+          released under this License and any conditions added under
+          section 7.  This requirement modifies the requirement in
+          section 4 to "keep intact all notices".
+
+       c. You must license the entire work, as a whole, under this
+          License to anyone who comes into possession of a copy.  This
+          License will therefore apply, along with any applicable
+          section 7 additional terms, to the whole of the work, and all
+          its parts, regardless of how they are packaged.  This License
+          gives no permission to license the work in any other way, but
+          it does not invalidate such permission if you have separately
+          received it.
+
+       d. If the work has interactive user interfaces, each must display
+          Appropriate Legal Notices; however, if the Program has
+          interactive interfaces that do not display Appropriate Legal
+          Notices, your work need not make them do so.
+
+     A compilation of a covered work with other separate and independent
+     works, which are not by their nature extensions of the covered
+     work, and which are not combined with it such as to form a larger
+     program, in or on a volume of a storage or distribution medium, is
+     called an "aggregate" if the compilation and its resulting
+     copyright are not used to limit the access or legal rights of the
+     compilation's users beyond what the individual works permit.
+     Inclusion of a covered work in an aggregate does not cause this
+     License to apply to the other parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+     You may convey a covered work in object code form under the terms
+     of sections 4 and 5, provided that you also convey the
+     machine-readable Corresponding Source under the terms of this
+     License, in one of these ways:
+
+       a. Convey the object code in, or embodied in, a physical product
+          (including a physical distribution medium), accompanied by the
+          Corresponding Source fixed on a durable physical medium
+          customarily used for software interchange.
+
+       b. Convey the object code in, or embodied in, a physical product
+          (including a physical distribution medium), accompanied by a
+          written offer, valid for at least three years and valid for as
+          long as you offer spare parts or customer support for that
+          product model, to give anyone who possesses the object code
+          either (1) a copy of the Corresponding Source for all the
+          software in the product that is covered by this License, on a
+          durable physical medium customarily used for software
+          interchange, for a price no more than your reasonable cost of
+          physically performing this conveying of source, or (2) access
+          to copy the Corresponding Source from a network server at no
+          charge.
+
+       c. Convey individual copies of the object code with a copy of the
+          written offer to provide the Corresponding Source.  This
+          alternative is allowed only occasionally and noncommercially,
+          and only if you received the object code with such an offer,
+          in accord with subsection 6b.
+
+       d. Convey the object code by offering access from a designated
+          place (gratis or for a charge), and offer equivalent access to
+          the Corresponding Source in the same way through the same
+          place at no further charge.  You need not require recipients
+          to copy the Corresponding Source along with the object code.
+          If the place to copy the object code is a network server, the
+          Corresponding Source may be on a different server (operated by
+          you or a third party) that supports equivalent copying
+          facilities, provided you maintain clear directions next to the
+          object code saying where to find the Corresponding Source.
+          Regardless of what server hosts the Corresponding Source, you
+          remain obligated to ensure that it is available for as long as
+          needed to satisfy these requirements.
+
+       e. Convey the object code using peer-to-peer transmission,
+          provided you inform other peers where the object code and
+          Corresponding Source of the work are being offered to the
+          general public at no charge under subsection 6d.
+
+     A separable portion of the object code, whose source code is
+     excluded from the Corresponding Source as a System Library, need
+     not be included in conveying the object code work.
+
+     A "User Product" is either (1) a "consumer product", which means
+     any tangible personal property which is normally used for personal,
+     family, or household purposes, or (2) anything designed or sold for
+     incorporation into a dwelling.  In determining whether a product is
+     a consumer product, doubtful cases shall be resolved in favor of
+     coverage.  For a particular product received by a particular user,
+     "normally used" refers to a typical or common use of that class of
+     product, regardless of the status of the particular user or of the
+     way in which the particular user actually uses, or expects or is
+     expected to use, the product.  A product is a consumer product
+     regardless of whether the product has substantial commercial,
+     industrial or non-consumer uses, unless such uses represent the
+     only significant mode of use of the product.
+
+     "Installation Information" for a User Product means any methods,
+     procedures, authorization keys, or other information required to
+     install and execute modified versions of a covered work in that
+     User Product from a modified version of its Corresponding Source.
+     The information must suffice to ensure that the continued
+     functioning of the modified object code is in no case prevented or
+     interfered with solely because modification has been made.
+
+     If you convey an object code work under this section in, or with,
+     or specifically for use in, a User Product, and the conveying
+     occurs as part of a transaction in which the right of possession
+     and use of the User Product is transferred to the recipient in
+     perpetuity or for a fixed term (regardless of how the transaction
+     is characterized), the Corresponding Source conveyed under this
+     section must be accompanied by the Installation Information.  But
+     this requirement does not apply if neither you nor any third party
+     retains the ability to install modified object code on the User
+     Product (for example, the work has been installed in ROM).
+
+     The requirement to provide Installation Information does not
+     include a requirement to continue to provide support service,
+     warranty, or updates for a work that has been modified or installed
+     by the recipient, or for the User Product in which it has been
+     modified or installed.  Access to a network may be denied when the
+     modification itself materially and adversely affects the operation
+     of the network or violates the rules and protocols for
+     communication across the network.
+
+     Corresponding Source conveyed, and Installation Information
+     provided, in accord with this section must be in a format that is
+     publicly documented (and with an implementation available to the
+     public in source code form), and must require no special password
+     or key for unpacking, reading or copying.
+
+  7. Additional Terms.
+
+     "Additional permissions" are terms that supplement the terms of
+     this License by making exceptions from one or more of its
+     conditions.  Additional permissions that are applicable to the
+     entire Program shall be treated as though they were included in
+     this License, to the extent that they are valid under applicable
+     law.  If additional permissions apply only to part of the Program,
+     that part may be used separately under those permissions, but the
+     entire Program remains governed by this License without regard to
+     the additional permissions.
+
+     When you convey a copy of a covered work, you may at your option
+     remove any additional permissions from that copy, or from any part
+     of it.  (Additional permissions may be written to require their own
+     removal in certain cases when you modify the work.)  You may place
+     additional permissions on material, added by you to a covered work,
+     for which you have or can give appropriate copyright permission.
+
+     Notwithstanding any other provision of this License, for material
+     you add to a covered work, you may (if authorized by the copyright
+     holders of that material) supplement the terms of this License with
+     terms:
+
+       a. Disclaiming warranty or limiting liability differently from
+          the terms of sections 15 and 16 of this License; or
+
+       b. Requiring preservation of specified reasonable legal notices
+          or author attributions in that material or in the Appropriate
+          Legal Notices displayed by works containing it; or
+
+       c. Prohibiting misrepresentation of the origin of that material,
+          or requiring that modified versions of such material be marked
+          in reasonable ways as different from the original version; or
+
+       d. Limiting the use for publicity purposes of names of licensors
+          or authors of the material; or
+
+       e. Declining to grant rights under trademark law for use of some
+          trade names, trademarks, or service marks; or
+
+       f. Requiring indemnification of licensors and authors of that
+          material by anyone who conveys the material (or modified
+          versions of it) with contractual assumptions of liability to
+          the recipient, for any liability that these contractual
+          assumptions directly impose on those licensors and authors.
+
+     All other non-permissive additional terms are considered "further
+     restrictions" within the meaning of section 10.  If the Program as
+     you received it, or any part of it, contains a notice stating that
+     it is governed by this License along with a term that is a further
+     restriction, you may remove that term.  If a license document
+     contains a further restriction but permits relicensing or conveying
+     under this License, you may add to a covered work material governed
+     by the terms of that license document, provided that the further
+     restriction does not survive such relicensing or conveying.
+
+     If you add terms to a covered work in accord with this section, you
+     must place, in the relevant source files, a statement of the
+     additional terms that apply to those files, or a notice indicating
+     where to find the applicable terms.
+
+     Additional terms, permissive or non-permissive, may be stated in
+     the form of a separately written license, or stated as exceptions;
+     the above requirements apply either way.
+
+  8. Termination.
+
+     You may not propagate or modify a covered work except as expressly
+     provided under this License.  Any attempt otherwise to propagate or
+     modify it is void, and will automatically terminate your rights
+     under this License (including any patent licenses granted under the
+     third paragraph of section 11).
+
+     However, if you cease all violation of this License, then your
+     license from a particular copyright holder is reinstated (a)
+     provisionally, unless and until the copyright holder explicitly and
+     finally terminates your license, and (b) permanently, if the
+     copyright holder fails to notify you of the violation by some
+     reasonable means prior to 60 days after the cessation.
+
+     Moreover, your license from a particular copyright holder is
+     reinstated permanently if the copyright holder notifies you of the
+     violation by some reasonable means, this is the first time you have
+     received notice of violation of this License (for any work) from
+     that copyright holder, and you cure the violation prior to 30 days
+     after your receipt of the notice.
+
+     Termination of your rights under this section does not terminate
+     the licenses of parties who have received copies or rights from you
+     under this License.  If your rights have been terminated and not
+     permanently reinstated, you do not qualify to receive new licenses
+     for the same material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+     You are not required to accept this License in order to receive or
+     run a copy of the Program.  Ancillary propagation of a covered work
+     occurring solely as a consequence of using peer-to-peer
+     transmission to receive a copy likewise does not require
+     acceptance.  However, nothing other than this License grants you
+     permission to propagate or modify any covered work.  These actions
+     infringe copyright if you do not accept this License.  Therefore,
+     by modifying or propagating a covered work, you indicate your
+     acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+     Each time you convey a covered work, the recipient automatically
+     receives a license from the original licensors, to run, modify and
+     propagate that work, subject to this License.  You are not
+     responsible for enforcing compliance by third parties with this
+     License.
+
+     An "entity transaction" is a transaction transferring control of an
+     organization, or substantially all assets of one, or subdividing an
+     organization, or merging organizations.  If propagation of a
+     covered work results from an entity transaction, each party to that
+     transaction who receives a copy of the work also receives whatever
+     licenses to the work the party's predecessor in interest had or
+     could give under the previous paragraph, plus a right to possession
+     of the Corresponding Source of the work from the predecessor in
+     interest, if the predecessor has it or can get it with reasonable
+     efforts.
+
+     You may not impose any further restrictions on the exercise of the
+     rights granted or affirmed under this License.  For example, you
+     may not impose a license fee, royalty, or other charge for exercise
+     of rights granted under this License, and you may not initiate
+     litigation (including a cross-claim or counterclaim in a lawsuit)
+     alleging that any patent claim is infringed by making, using,
+     selling, offering for sale, or importing the Program or any portion
+     of it.
+
+  11. Patents.
+
+     A "contributor" is a copyright holder who authorizes use under this
+     License of the Program or a work on which the Program is based.
+     The work thus licensed is called the contributor's "contributor
+     version".
+
+     A contributor's "essential patent claims" are all patent claims
+     owned or controlled by the contributor, whether already acquired or
+     hereafter acquired, that would be infringed by some manner,
+     permitted by this License, of making, using, or selling its
+     contributor version, but do not include claims that would be
+     infringed only as a consequence of further modification of the
+     contributor version.  For purposes of this definition, "control"
+     includes the right to grant patent sublicenses in a manner
+     consistent with the requirements of this License.
+
+     Each contributor grants you a non-exclusive, worldwide,
+     royalty-free patent license under the contributor's essential
+     patent claims, to make, use, sell, offer for sale, import and
+     otherwise run, modify and propagate the contents of its contributor
+     version.
+
+     In the following three paragraphs, a "patent license" is any
+     express agreement or commitment, however denominated, not to
+     enforce a patent (such as an express permission to practice a
+     patent or covenant not to sue for patent infringement).  To "grant"
+     such a patent license to a party means to make such an agreement or
+     commitment not to enforce a patent against the party.
+
+     If you convey a covered work, knowingly relying on a patent
+     license, and the Corresponding Source of the work is not available
+     for anyone to copy, free of charge and under the terms of this
+     License, through a publicly available network server or other
+     readily accessible means, then you must either (1) cause the
+     Corresponding Source to be so available, or (2) arrange to deprive
+     yourself of the benefit of the patent license for this particular
+     work, or (3) arrange, in a manner consistent with the requirements
+     of this License, to extend the patent license to downstream
+     recipients.  "Knowingly relying" means you have actual knowledge
+     that, but for the patent license, your conveying the covered work
+     in a country, or your recipient's use of the covered work in a
+     country, would infringe one or more identifiable patents in that
+     country that you have reason to believe are valid.
+
+     If, pursuant to or in connection with a single transaction or
+     arrangement, you convey, or propagate by procuring conveyance of, a
+     covered work, and grant a patent license to some of the parties
+     receiving the covered work authorizing them to use, propagate,
+     modify or convey a specific copy of the covered work, then the
+     patent license you grant is automatically extended to all
+     recipients of the covered work and works based on it.
+
+     A patent license is "discriminatory" if it does not include within
+     the scope of its coverage, prohibits the exercise of, or is
+     conditioned on the non-exercise of one or more of the rights that
+     are specifically granted under this License.  You may not convey a
+     covered work if you are a party to an arrangement with a third
+     party that is in the business of distributing software, under which
+     you make payment to the third party based on the extent of your
+     activity of conveying the work, and under which the third party
+     grants, to any of the parties who would receive the covered work
+     from you, a discriminatory patent license (a) in connection with
+     copies of the covered work conveyed by you (or copies made from
+     those copies), or (b) primarily for and in connection with specific
+     products or compilations that contain the covered work, unless you
+     entered into that arrangement, or that patent license was granted,
+     prior to 28 March 2007.
+
+     Nothing in this License shall be construed as excluding or limiting
+     any implied license or other defenses to infringement that may
+     otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+     If conditions are imposed on you (whether by court order, agreement
+     or otherwise) that contradict the conditions of this License, they
+     do not excuse you from the conditions of this License.  If you
+     cannot convey a covered work so as to satisfy simultaneously your
+     obligations under this License and any other pertinent obligations,
+     then as a consequence you may not convey it at all.  For example,
+     if you agree to terms that obligate you to collect a royalty for
+     further conveying from those to whom you convey the Program, the
+     only way you could satisfy both those terms and this License would
+     be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+     Notwithstanding any other provision of this License, you have
+     permission to link or combine any covered work with a work licensed
+     under version 3 of the GNU Affero General Public License into a
+     single combined work, and to convey the resulting work.  The terms
+     of this License will continue to apply to the part which is the
+     covered work, but the special requirements of the GNU Affero
+     General Public License, section 13, concerning interaction through
+     a network will apply to the combination as such.
+
+  14. Revised Versions of this License.
+
+     The Free Software Foundation may publish revised and/or new
+     versions of the GNU General Public License from time to time.  Such
+     new versions will be similar in spirit to the present version, but
+     may differ in detail to address new problems or concerns.
+
+     Each version is given a distinguishing version number.  If the
+     Program specifies that a certain numbered version of the GNU
+     General Public License "or any later version" applies to it, you
+     have the option of following the terms and conditions either of
+     that numbered version or of any later version published by the Free
+     Software Foundation.  If the Program does not specify a version
+     number of the GNU General Public License, you may choose any
+     version ever published by the Free Software Foundation.
+
+     If the Program specifies that a proxy can decide which future
+     versions of the GNU General Public License can be used, that
+     proxy's public statement of acceptance of a version permanently
+     authorizes you to choose that version for the Program.
+
+     Later license versions may give you additional or different
+     permissions.  However, no additional obligations are imposed on any
+     author or copyright holder as a result of your choosing to follow a
+     later version.
+
+  15. Disclaimer of Warranty.
+
+     THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+     APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE
+     COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS"
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED,
+     INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+     MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE
+     RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.
+     SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL
+     NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+     IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+     WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES
+     AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR
+     DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+     CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
+     THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA
+     BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+     PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+     PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF
+     THE POSSIBILITY OF SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+     If the disclaimer of warranty and limitation of liability provided
+     above cannot be given local legal effect according to their terms,
+     reviewing courts shall apply local law that most closely
+     approximates an absolute waiver of all civil liability in
+     connection with the Program, unless a warranty or assumption of
+     liability accompanies a copy of the Program in return for a fee.
+
+END OF TERMS AND CONDITIONS
+===========================
+
+How to Apply These Terms to Your New Programs
+=============================================
+
+If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these
+terms.
+
+   To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+     ONE LINE TO GIVE THE PROGRAM'S NAME AND A BRIEF IDEA OF WHAT IT DOES.
+     Copyright (C) YEAR NAME OF AUTHOR
+
+     This program is free software: you can redistribute it and/or modify
+     it under the terms of the GNU General Public License as published by
+     the Free Software Foundation, either version 3 of the License, or (at
+     your option) any later version.
+
+     This program is distributed in the hope that it will be useful, but
+     WITHOUT ANY WARRANTY; without even the implied warranty of
+     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+     General Public License for more details.
+
+     You should have received a copy of the GNU General Public License
+     along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+   Also add information on how to contact you by electronic and paper
+mail.
+
+   If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+     PROGRAM Copyright (C) YEAR NAME OF AUTHOR
+     This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'.
+     This is free software, and you are welcome to redistribute it
+     under certain conditions; type 'show c' for details.
+
+   The hypothetical commands 'show w' and 'show c' should show the
+appropriate parts of the General Public License.  Of course, your
+program's commands might be different; for a GUI interface, you would
+use an "about box".
+
+   You should also get your employer (if you work as a programmer) or
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  For more information on this, and how to apply and follow
+the GNU GPL, see <http://www.gnu.org/licenses/>.
+
+   The GNU General Public License does not permit incorporating your
+program into proprietary programs.  If your program is a subroutine
+library, you may consider it more useful to permit linking proprietary
+applications with the library.  If this is what you want to do, use the
+GNU Lesser General Public License instead of this License.  But first,
+please read <http://www.gnu.org/philosophy/why-not-lgpl.html>.
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-2 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-2
new file mode 100644
index 0000000..84921f0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/m4.info-2
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info
new file mode 100644
index 0000000..bd28f2b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info
@@ -0,0 +1,187 @@
+This is make.info, produced by makeinfo version 4.13 from make.texi.
+
+This file documents the GNU `make' utility, which determines
+automatically which pieces of a large program need to be recompiled,
+and issues the commands to recompile them.
+
+   This is Edition 0.71, last updated 19 July 2010, of `The GNU Make
+Manual', for GNU `make' version 3.82.
+
+   Copyright (C) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996,
+1997, 1998, 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+2010 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.2 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with the Front-Cover Texts
+     being "A GNU Manual," and with the Back-Cover Texts as in (a)
+     below.  A copy of the license is included in the section entitled
+     "GNU Free Documentation License."
+
+     (a) The FSF's Back-Cover Text is: "You have the freedom to copy and
+     modify this GNU manual.  Buying copies from the FSF supports it in
+     developing GNU and promoting software freedom."
+
+INFO-DIR-SECTION Software development
+START-INFO-DIR-ENTRY
+* Make: (make).            Remake files automatically.
+END-INFO-DIR-ENTRY
+
+
+Indirect:
+make.info-1: 1319
+make.info-2: 293656
+
+Tag Table:
+(Indirect)
+Node: Top1319
+Node: Overview14709
+Node: Preparing15719
+Node: Reading16690
+Node: Bugs17617
+Node: Introduction19446
+Node: Rule Introduction21037
+Node: Simple Makefile22970
+Node: How Make Works26666
+Node: Variables Simplify29321
+Node: make Deduces31527
+Node: Combine By Prerequisite33260
+Node: Cleanup34289
+Node: Makefiles35707
+Node: Makefile Contents36543
+Node: Makefile Names39649
+Node: Include41260
+Ref: Include-Footnote-145008
+Node: MAKEFILES Variable45142
+Node: Remaking Makefiles46683
+Node: Overriding Makefiles50907
+Node: Reading Makefiles52935
+Node: Secondary Expansion55947
+Node: Rules63388
+Node: Rule Example66060
+Node: Rule Syntax66914
+Node: Prerequisite Types69507
+Node: Wildcards72374
+Node: Wildcard Examples74092
+Node: Wildcard Pitfall75441
+Node: Wildcard Function77230
+Node: Directory Search79014
+Node: General Search80148
+Node: Selective Search81855
+Node: Search Algorithm84843
+Node: Recipes/Search87361
+Node: Implicit/Search88684
+Node: Libraries/Search89626
+Node: Phony Targets91596
+Node: Force Targets96665
+Node: Empty Targets97702
+Node: Special Targets99007
+Node: Multiple Targets106862
+Node: Multiple Rules108727
+Node: Static Pattern110945
+Node: Static Usage111597
+Node: Static versus Implicit115316
+Node: Double-Colon117057
+Node: Automatic Prerequisites118817
+Node: Recipes123095
+Node: Recipe Syntax124268
+Node: Splitting Lines126383
+Node: Variables in Recipes129483
+Node: Echoing130802
+Node: Execution132077
+Ref: Execution-Footnote-1133488
+Node: One Shell133633
+Node: Choosing the Shell136948
+Node: Parallel141094
+Node: Errors144815
+Node: Interrupts148483
+Node: Recursion150066
+Node: MAKE Variable152164
+Node: Variables/Recursion154411
+Node: Options/Recursion159861
+Node: -w Option165021
+Node: Canned Recipes166016
+Node: Empty Recipes169000
+Node: Using Variables170148
+Node: Reference173523
+Node: Flavors175081
+Node: Advanced180821
+Node: Substitution Refs181326
+Node: Computed Names182879
+Node: Values187425
+Node: Setting188342
+Node: Appending190378
+Node: Override Directive194304
+Node: Multi-Line195931
+Node: Undefine Directive198754
+Node: Environment199840
+Node: Target-specific202091
+Node: Pattern-specific205108
+Node: Suppressing Inheritance206954
+Node: Special Variables208408
+Node: Conditionals213121
+Node: Conditional Example213834
+Node: Conditional Syntax216397
+Node: Testing Flags222127
+Node: Functions223228
+Node: Syntax of Functions224659
+Node: Text Functions226858
+Node: File Name Functions235429
+Node: Conditional Functions240651
+Node: Foreach Function243025
+Node: Call Function246237
+Node: Value Function249122
+Node: Eval Function250559
+Node: Origin Function252835
+Node: Flavor Function256051
+Node: Shell Function257117
+Node: Make Control Functions258751
+Node: Running260413
+Node: Makefile Arguments262396
+Node: Goals263112
+Node: Instead of Execution267851
+Node: Avoiding Compilation271433
+Node: Overriding273407
+Node: Testing275710
+Node: Options Summary277594
+Node: Implicit Rules287992
+Node: Using Implicit290137
+Node: Catalogue of Rules293656
+Node: Implicit Variables303004
+Node: Chained Rules307761
+Node: Pattern Rules311772
+Node: Pattern Intro313307
+Node: Pattern Examples315895
+Node: Automatic Variables317701
+Node: Pattern Match325059
+Node: Match-Anything Rules328382
+Node: Canceling Rules332256
+Node: Last Resort332970
+Node: Suffix Rules334799
+Node: Implicit Rule Search338524
+Node: Archives342023
+Node: Archive Members342721
+Node: Archive Update344331
+Node: Archive Symbols346242
+Node: Archive Pitfalls347476
+Node: Archive Suffix Rules348198
+Node: Features349745
+Node: Missing358290
+Node: Makefile Conventions362017
+Node: Makefile Basics362996
+Node: Utilities in Makefiles366163
+Node: Command Variables368661
+Node: DESTDIR371900
+Node: Directory Variables374067
+Node: Standard Targets388682
+Ref: Standard Targets-Footnote-1402458
+Node: Install Command Categories402558
+Node: Quick Reference407084
+Node: Error Messages418767
+Node: Complex Makefile426463
+Node: GNU Free Documentation License434974
+Node: Concept Index460136
+Node: Name Index526637
+
+End Tag Table
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-1
new file mode 100644
index 0000000..509ce05
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-1
@@ -0,0 +1,7066 @@
+This is make.info, produced by makeinfo version 4.13 from make.texi.
+
+This file documents the GNU `make' utility, which determines
+automatically which pieces of a large program need to be recompiled,
+and issues the commands to recompile them.
+
+   This is Edition 0.71, last updated 19 July 2010, of `The GNU Make
+Manual', for GNU `make' version 3.82.
+
+   Copyright (C) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996,
+1997, 1998, 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+2010 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.2 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with the Front-Cover Texts
+     being "A GNU Manual," and with the Back-Cover Texts as in (a)
+     below.  A copy of the license is included in the section entitled
+     "GNU Free Documentation License."
+
+     (a) The FSF's Back-Cover Text is: "You have the freedom to copy and
+     modify this GNU manual.  Buying copies from the FSF supports it in
+     developing GNU and promoting software freedom."
+
+INFO-DIR-SECTION Software development
+START-INFO-DIR-ENTRY
+* Make: (make).            Remake files automatically.
+END-INFO-DIR-ENTRY
+
+
+File: make.info,  Node: Top,  Next: Overview,  Prev: (dir),  Up: (dir)
+
+GNU `make'
+**********
+
+This file documents the GNU `make' utility, which determines
+automatically which pieces of a large program need to be recompiled,
+and issues the commands to recompile them.
+
+   This is Edition 0.71, last updated 19 July 2010, of `The GNU Make
+Manual', for GNU `make' version 3.82.
+
+   Copyright (C) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996,
+1997, 1998, 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+2010 Free Software Foundation, Inc.
+
+     Permission is granted to copy, distribute and/or modify this
+     document under the terms of the GNU Free Documentation License,
+     Version 1.2 or any later version published by the Free Software
+     Foundation; with no Invariant Sections, with the Front-Cover Texts
+     being "A GNU Manual," and with the Back-Cover Texts as in (a)
+     below.  A copy of the license is included in the section entitled
+     "GNU Free Documentation License."
+
+     (a) The FSF's Back-Cover Text is: "You have the freedom to copy and
+     modify this GNU manual.  Buying copies from the FSF supports it in
+     developing GNU and promoting software freedom."
+
+* Menu:
+
+* Overview::                    Overview of `make'.
+* Introduction::                An introduction to `make'.
+* Makefiles::                   Makefiles tell `make' what to do.
+* Rules::                       Rules describe when a file must be remade.
+* Recipes::                     Recipes say how to remake a file.
+* Using Variables::             You can use variables to avoid repetition.
+* Conditionals::                Use or ignore parts of the makefile based
+                                  on the values of variables.
+* Functions::                   Many powerful ways to manipulate text.
+* Invoking make: Running.       How to invoke `make' on the command line.
+* Implicit Rules::              Use implicit rules to treat many files alike,
+                                  based on their file names.
+* Archives::                    How `make' can update library archives.
+* Features::                    Features GNU `make' has over other `make's.
+* Missing::                     What GNU `make' lacks from other `make's.
+* Makefile Conventions::        Conventions for writing makefiles for
+                                  GNU programs.
+* Quick Reference::             A quick reference for experienced users.
+* Error Messages::              A list of common errors generated by `make'.
+* Complex Makefile::            A real example of a straightforward,
+                                  but nontrivial, makefile.
+
+* GNU Free Documentation License::  License for copying this manual
+* Concept Index::               Index of Concepts
+* Name Index::                  Index of Functions, Variables, & Directives
+
+ --- The Detailed Node Listing ---
+
+Overview of `make'
+
+* Preparing::                   Preparing and running make
+* Reading::                     On reading this text
+* Bugs::                        Problems and bugs
+
+An Introduction to Makefiles
+
+* Rule Introduction::           What a rule looks like.
+* Simple Makefile::             A simple makefile
+* How Make Works::              How `make' processes this makefile
+* Variables Simplify::          Variables make makefiles simpler
+* make Deduces::                Letting `make' deduce the recipe
+* Combine By Prerequisite::     Another style of makefile
+* Cleanup::                     Rules for cleaning the directory
+
+Writing Makefiles
+
+* Makefile Contents::           What makefiles contain.
+* Makefile Names::              How to name your makefile.
+* Include::                     How one makefile can use another makefile.
+* MAKEFILES Variable::          The environment can specify extra makefiles.
+* Remaking Makefiles::          How makefiles get remade.
+* Overriding Makefiles::        How to override part of one makefile
+                                  with another makefile.
+* Reading Makefiles::           How makefiles are parsed.
+* Secondary Expansion::         How and when secondary expansion is performed.
+
+Writing Rules
+
+* Rule Example::                An example explained.
+* Rule Syntax::                 General syntax explained.
+* Prerequisite Types::          There are two types of prerequisites.
+* Wildcards::                   Using wildcard characters such as `*'.
+* Directory Search::            Searching other directories for source files.
+* Phony Targets::               Using a target that is not a real file's name.
+* Force Targets::               You can use a target without a recipe
+                                  or prerequisites to mark other targets
+                                  as phony.
+* Empty Targets::               When only the date matters and the
+                                  files are empty.
+* Special Targets::             Targets with special built-in meanings.
+* Multiple Targets::            When to make use of several targets in a rule.
+* Multiple Rules::              How to use several rules with the same target.
+* Static Pattern::              Static pattern rules apply to multiple targets
+                                  and can vary the prerequisites according to
+                                  the target name.
+* Double-Colon::                How to use a special kind of rule to allow
+                                  several independent rules for one target.
+* Automatic Prerequisites::     How to automatically generate rules giving
+                                  prerequisites from source files themselves.
+
+Using Wildcard Characters in File Names
+
+* Wildcard Examples::           Several examples
+* Wildcard Pitfall::            Problems to avoid.
+* Wildcard Function::           How to cause wildcard expansion where
+                                  it does not normally take place.
+
+Searching Directories for Prerequisites
+
+* General Search::              Specifying a search path that applies
+                                  to every prerequisite.
+* Selective Search::            Specifying a search path
+                                  for a specified class of names.
+* Search Algorithm::            When and how search paths are applied.
+* Recipes/Search::              How to write recipes that work together
+                                  with search paths.
+* Implicit/Search::             How search paths affect implicit rules.
+* Libraries/Search::            Directory search for link libraries.
+
+Static Pattern Rules
+
+* Static Usage::                The syntax of static pattern rules.
+* Static versus Implicit::      When are they better than implicit rules?
+
+Writing Recipes in Rules
+
+* Recipe Syntax::               Recipe syntax features and pitfalls.
+* Echoing::                     How to control when recipes are echoed.
+* Execution::                   How recipes are executed.
+* Parallel::                    How recipes can be executed in parallel.
+* Errors::                      What happens after a recipe execution error.
+* Interrupts::                  What happens when a recipe is interrupted.
+* Recursion::                   Invoking `make' from makefiles.
+* Canned Recipes::              Defining canned recipes.
+* Empty Recipes::               Defining useful, do-nothing recipes.
+
+Recipe Syntax
+
+* Splitting Lines::             Breaking long recipe lines for readability.
+* Variables in Recipes::        Using `make' variables in recipes.
+
+Recipe Execution
+
+* Choosing the Shell::          How `make' chooses the shell used
+                                  to run recipes.
+
+Recursive Use of `make'
+
+* MAKE Variable::               The special effects of using `$(MAKE)'.
+* Variables/Recursion::         How to communicate variables to a sub-`make'.
+* Options/Recursion::           How to communicate options to a sub-`make'.
+* -w Option::                   How the `-w' or `--print-directory' option
+                                  helps debug use of recursive `make' commands.
+
+How to Use Variables
+
+* Reference::                   How to use the value of a variable.
+* Flavors::                     Variables come in two flavors.
+* Advanced::                    Advanced features for referencing a variable.
+* Values::                      All the ways variables get their values.
+* Setting::                     How to set a variable in the makefile.
+* Appending::                   How to append more text to the old value
+                                  of a variable.
+* Override Directive::          How to set a variable in the makefile even if
+                                  the user has set it with a command argument.
+* Multi-Line::                  An alternate way to set a variable
+                                  to a multi-line string.
+* Environment::                 Variable values can come from the environment.
+* Target-specific::             Variable values can be defined on a per-target
+                                  basis.
+* Pattern-specific::            Target-specific variable values can be applied
+                                  to a group of targets that match a pattern.
+* Suppressing Inheritance::     Suppress inheritance of variables.
+* Special Variables::           Variables with special meaning or behavior.
+
+Advanced Features for Reference to Variables
+
+* Substitution Refs::           Referencing a variable with
+                                  substitutions on the value.
+* Computed Names::              Computing the name of the variable to refer to.
+
+Conditional Parts of Makefiles
+
+* Conditional Example::         Example of a conditional
+* Conditional Syntax::          The syntax of conditionals.
+* Testing Flags::               Conditionals that test flags.
+
+Functions for Transforming Text
+
+* Syntax of Functions::         How to write a function call.
+* Text Functions::              General-purpose text manipulation functions.
+* File Name Functions::         Functions for manipulating file names.
+* Conditional Functions::       Functions that implement conditions.
+* Foreach Function::            Repeat some text with controlled variation.
+* Call Function::               Expand a user-defined function.
+* Value Function::              Return the un-expanded value of a variable.
+* Eval Function::               Evaluate the arguments as makefile syntax.
+* Origin Function::             Find where a variable got its value.
+* Flavor Function::             Find out the flavor of a variable.
+* Shell Function::              Substitute the output of a shell command.
+* Make Control Functions::      Functions that control how make runs.
+
+How to Run `make'
+
+* Makefile Arguments::          How to specify which makefile to use.
+* Goals::                       How to use goal arguments to specify which
+                                  parts of the makefile to use.
+* Instead of Execution::        How to use mode flags to specify what
+                                  kind of thing to do with the recipes
+                                  in the makefile other than simply
+                                  execute them.
+* Avoiding Compilation::        How to avoid recompiling certain files.
+* Overriding::                  How to override a variable to specify
+                                  an alternate compiler and other things.
+* Testing::                     How to proceed past some errors, to
+                                  test compilation.
+* Options Summary::             Summary of Options
+
+Using Implicit Rules
+
+* Using Implicit::              How to use an existing implicit rule
+                                  to get the recipe for updating a file.
+* Catalogue of Rules::          A list of built-in implicit rules.
+* Implicit Variables::          How to change what predefined rules do.
+* Chained Rules::               How to use a chain of implicit rules.
+* Pattern Rules::               How to define new implicit rules.
+* Last Resort::                 How to define a recipe for rules which
+                                  cannot find any.
+* Suffix Rules::                The old-fashioned style of implicit rule.
+* Implicit Rule Search::        The precise algorithm for applying
+                                  implicit rules.
+
+Defining and Redefining Pattern Rules
+
+* Pattern Intro::               An introduction to pattern rules.
+* Pattern Examples::            Examples of pattern rules.
+* Automatic Variables::         How to use automatic variables in the
+                                  recipe of implicit rules.
+* Pattern Match::               How patterns match.
+* Match-Anything Rules::        Precautions you should take prior to
+                                  defining rules that can match any
+                                  target file whatever.
+* Canceling Rules::             How to override or cancel built-in rules.
+
+Using `make' to Update Archive Files
+
+* Archive Members::             Archive members as targets.
+* Archive Update::              The implicit rule for archive member targets.
+* Archive Pitfalls::            Dangers to watch out for when using archives.
+* Archive Suffix Rules::        You can write a special kind of suffix rule
+                                  for updating archives.
+
+Implicit Rule for Archive Member Targets
+
+* Archive Symbols::             How to update archive symbol directories.
+
+
+File: make.info,  Node: Overview,  Next: Introduction,  Prev: Top,  Up: Top
+
+1 Overview of `make'
+********************
+
+The `make' utility automatically determines which pieces of a large
+program need to be recompiled, and issues commands to recompile them.
+This manual describes GNU `make', which was implemented by Richard
+Stallman and Roland McGrath.  Development since Version 3.76 has been
+handled by Paul D. Smith.
+
+   GNU `make' conforms to section 6.2 of `IEEE Standard 1003.2-1992'
+(POSIX.2).  
+
+   Our examples show C programs, since they are most common, but you
+can use `make' with any programming language whose compiler can be run
+with a shell command.  Indeed, `make' is not limited to programs.  You
+can use it to describe any task where some files must be updated
+automatically from others whenever the others change.
+
+* Menu:
+
+* Preparing::                   Preparing and Running Make
+* Reading::                     On Reading this Text
+* Bugs::                        Problems and Bugs
+
+
+File: make.info,  Node: Preparing,  Next: Reading,  Prev: Overview,  Up: Overview
+
+Preparing and Running Make
+==========================
+
+   To prepare to use `make', you must write a file called the
+"makefile" that describes the relationships among files in your program
+and provides commands for updating each file.  In a program, typically,
+the executable file is updated from object files, which are in turn
+made by compiling source files.
+
+   Once a suitable makefile exists, each time you change some source
+files, this simple shell command:
+
+     make
+
+suffices to perform all necessary recompilations.  The `make' program
+uses the makefile data base and the last-modification times of the
+files to decide which of the files need to be updated.  For each of
+those files, it issues the recipes recorded in the data base.
+
+   You can provide command line arguments to `make' to control which
+files should be recompiled, or how.  *Note How to Run `make': Running.
+
+
+File: make.info,  Node: Reading,  Next: Bugs,  Prev: Preparing,  Up: Overview
+
+1.1 How to Read This Manual
+===========================
+
+If you are new to `make', or are looking for a general introduction,
+read the first few sections of each chapter, skipping the later
+sections.  In each chapter, the first few sections contain introductory
+or general information and the later sections contain specialized or
+technical information.  The exception is the second chapter, *note An
+Introduction to Makefiles: Introduction, all of which is introductory.
+
+   If you are familiar with other `make' programs, see *note Features
+of GNU `make': Features, which lists the enhancements GNU `make' has,
+and *note Incompatibilities and Missing Features: Missing, which
+explains the few things GNU `make' lacks that others have.
+
+   For a quick summary, see *note Options Summary::, *note Quick
+Reference::, and *note Special Targets::.
+
+
+File: make.info,  Node: Bugs,  Prev: Reading,  Up: Overview
+
+1.2 Problems and Bugs
+=====================
+
+If you have problems with GNU `make' or think you've found a bug,
+please report it to the developers; we cannot promise to do anything but
+we might well want to fix it.
+
+   Before reporting a bug, make sure you've actually found a real bug.
+Carefully reread the documentation and see if it really says you can do
+what you're trying to do.  If it's not clear whether you should be able
+to do something or not, report that too; it's a bug in the
+documentation!
+
+   Before reporting a bug or trying to fix it yourself, try to isolate
+it to the smallest possible makefile that reproduces the problem.  Then
+send us the makefile and the exact results `make' gave you, including
+any error or warning messages.  Please don't paraphrase these messages:
+it's best to cut and paste them into your report.  When generating this
+small makefile, be sure to not use any non-free or unusual tools in
+your recipes: you can almost always emulate what such a tool would do
+with simple shell commands.  Finally, be sure to explain what you
+expected to occur; this will help us decide whether the problem was
+really in the documentation.
+
+   Once you have a precise problem you can report it in one of two ways.
+Either send electronic mail to:
+
+         bug-make@gnu.org
+
+or use our Web-based project management tool, at:
+
+         http://savannah.gnu.org/projects/make/
+
+In addition to the information above, please be careful to include the
+version number of `make' you are using.  You can get this information
+with the command `make --version'.  Be sure also to include the type of
+machine and operating system you are using.  One way to obtain this
+information is by looking at the final lines of output from the command
+`make --help'.
+
+
+File: make.info,  Node: Introduction,  Next: Makefiles,  Prev: Overview,  Up: Top
+
+2 An Introduction to Makefiles
+******************************
+
+You need a file called a "makefile" to tell `make' what to do.  Most
+often, the makefile tells `make' how to compile and link a program.  
+
+   In this chapter, we will discuss a simple makefile that describes
+how to compile and link a text editor which consists of eight C source
+files and three header files.  The makefile can also tell `make' how to
+run miscellaneous commands when explicitly asked (for example, to remove
+certain files as a clean-up operation).  To see a more complex example
+of a makefile, see *note Complex Makefile::.
+
+   When `make' recompiles the editor, each changed C source file must
+be recompiled.  If a header file has changed, each C source file that
+includes the header file must be recompiled to be safe.  Each
+compilation produces an object file corresponding to the source file.
+Finally, if any source file has been recompiled, all the object files,
+whether newly made or saved from previous compilations, must be linked
+together to produce the new executable editor.  
+
+* Menu:
+
+* Rule Introduction::           What a rule looks like.
+* Simple Makefile::             A Simple Makefile
+* How Make Works::              How `make' Processes This Makefile
+* Variables Simplify::          Variables Make Makefiles Simpler
+* make Deduces::                Letting `make' Deduce the Recipes
+* Combine By Prerequisite::     Another Style of Makefile
+* Cleanup::                     Rules for Cleaning the Directory
+
+
+File: make.info,  Node: Rule Introduction,  Next: Simple Makefile,  Prev: Introduction,  Up: Introduction
+
+2.1 What a Rule Looks Like
+==========================
+
+A simple makefile consists of "rules" with the following shape:
+
+     TARGET ... : PREREQUISITES ...
+             RECIPE
+             ...
+             ...
+
+   A "target" is usually the name of a file that is generated by a
+program; examples of targets are executable or object files.  A target
+can also be the name of an action to carry out, such as `clean' (*note
+Phony Targets::).
+
+   A "prerequisite" is a file that is used as input to create the
+target.  A target often depends on several files.
+
+   A "recipe" is an action that `make' carries out.  A recipe may have
+more than one command, either on the same line or each on its own line.
+*Please note:* you need to put a tab character at the beginning of
+every recipe line!  This is an obscurity that catches the unwary.  If
+you prefer to prefix your recipes with a character other than tab, you
+can set the `.RECIPEPREFIX' variable to an alternate character (*note
+Special Variables::).
+
+   Usually a recipe is in a rule with prerequisites and serves to
+create a target file if any of the prerequisites change.  However, the
+rule that specifies a recipe for the target need not have
+prerequisites.  For example, the rule containing the delete command
+associated with the target `clean' does not have prerequisites.
+
+   A "rule", then, explains how and when to remake certain files which
+are the targets of the particular rule.  `make' carries out the recipe
+on the prerequisites to create or update the target.  A rule can also
+explain how and when to carry out an action.  *Note Writing Rules:
+Rules.
+
+   A makefile may contain other text besides rules, but a simple
+makefile need only contain rules.  Rules may look somewhat more
+complicated than shown in this template, but all fit the pattern more
+or less.
+
+
+File: make.info,  Node: Simple Makefile,  Next: How Make Works,  Prev: Rule Introduction,  Up: Introduction
+
+2.2 A Simple Makefile
+=====================
+
+Here is a straightforward makefile that describes the way an executable
+file called `edit' depends on eight object files which, in turn, depend
+on eight C source and three header files.
+
+   In this example, all the C files include `defs.h', but only those
+defining editing commands include `command.h', and only low level files
+that change the editor buffer include `buffer.h'.
+
+     edit : main.o kbd.o command.o display.o \
+            insert.o search.o files.o utils.o
+             cc -o edit main.o kbd.o command.o display.o \
+                        insert.o search.o files.o utils.o
+
+     main.o : main.c defs.h
+             cc -c main.c
+     kbd.o : kbd.c defs.h command.h
+             cc -c kbd.c
+     command.o : command.c defs.h command.h
+             cc -c command.c
+     display.o : display.c defs.h buffer.h
+             cc -c display.c
+     insert.o : insert.c defs.h buffer.h
+             cc -c insert.c
+     search.o : search.c defs.h buffer.h
+             cc -c search.c
+     files.o : files.c defs.h buffer.h command.h
+             cc -c files.c
+     utils.o : utils.c defs.h
+             cc -c utils.c
+     clean :
+             rm edit main.o kbd.o command.o display.o \
+                insert.o search.o files.o utils.o
+
+We split each long line into two lines using backslash-newline; this is
+like using one long line, but is easier to read.  
+
+   To use this makefile to create the executable file called `edit',
+type:
+
+     make
+
+   To use this makefile to delete the executable file and all the object
+files from the directory, type:
+
+     make clean
+
+   In the example makefile, the targets include the executable file
+`edit', and the object files `main.o' and `kbd.o'.  The prerequisites
+are files such as `main.c' and `defs.h'.  In fact, each `.o' file is
+both a target and a prerequisite.  Recipes include `cc -c main.c' and
+`cc -c kbd.c'.
+
+   When a target is a file, it needs to be recompiled or relinked if any
+of its prerequisites change.  In addition, any prerequisites that are
+themselves automatically generated should be updated first.  In this
+example, `edit' depends on each of the eight object files; the object
+file `main.o' depends on the source file `main.c' and on the header
+file `defs.h'.
+
+   A recipe may follow each line that contains a target and
+prerequisites.  These recipes say how to update the target file.  A tab
+character (or whatever character is specified by the `.RECIPEPREFIX'
+variable; *note Special Variables::) must come at the beginning of
+every line in the recipe to distinguish recipes from other lines in the
+makefile.  (Bear in mind that `make' does not know anything about how
+the recipes work.  It is up to you to supply recipes that will update
+the target file properly.  All `make' does is execute the recipe you
+have specified when the target file needs to be updated.)  
+
+   The target `clean' is not a file, but merely the name of an action.
+Since you normally do not want to carry out the actions in this rule,
+`clean' is not a prerequisite of any other rule.  Consequently, `make'
+never does anything with it unless you tell it specifically.  Note that
+this rule not only is not a prerequisite, it also does not have any
+prerequisites, so the only purpose of the rule is to run the specified
+recipe.  Targets that do not refer to files but are just actions are
+called "phony targets".  *Note Phony Targets::, for information about
+this kind of target.  *Note Errors in Recipes: Errors, to see how to
+cause `make' to ignore errors from `rm' or any other command.  
+
+
+File: make.info,  Node: How Make Works,  Next: Variables Simplify,  Prev: Simple Makefile,  Up: Introduction
+
+2.3 How `make' Processes a Makefile
+===================================
+
+By default, `make' starts with the first target (not targets whose
+names start with `.').  This is called the "default goal".  ("Goals"
+are the targets that `make' strives ultimately to update.    You can
+override this behavior using the command line (*note Arguments to
+Specify the Goals: Goals.) or with the `.DEFAULT_GOAL' special variable
+(*note Other Special Variables: Special Variables.).  
+
+   In the simple example of the previous section, the default goal is to
+update the executable program `edit'; therefore, we put that rule first.
+
+   Thus, when you give the command:
+
+     make
+
+`make' reads the makefile in the current directory and begins by
+processing the first rule.  In the example, this rule is for relinking
+`edit'; but before `make' can fully process this rule, it must process
+the rules for the files that `edit' depends on, which in this case are
+the object files.  Each of these files is processed according to its
+own rule.  These rules say to update each `.o' file by compiling its
+source file.  The recompilation must be done if the source file, or any
+of the header files named as prerequisites, is more recent than the
+object file, or if the object file does not exist.
+
+   The other rules are processed because their targets appear as
+prerequisites of the goal.  If some other rule is not depended on by the
+goal (or anything it depends on, etc.), that rule is not processed,
+unless you tell `make' to do so (with a command such as `make clean').
+
+   Before recompiling an object file, `make' considers updating its
+prerequisites, the source file and header files.  This makefile does not
+specify anything to be done for them--the `.c' and `.h' files are not
+the targets of any rules--so `make' does nothing for these files.  But
+`make' would update automatically generated C programs, such as those
+made by Bison or Yacc, by their own rules at this time.
+
+   After recompiling whichever object files need it, `make' decides
+whether to relink `edit'.  This must be done if the file `edit' does
+not exist, or if any of the object files are newer than it.  If an
+object file was just recompiled, it is now newer than `edit', so `edit'
+is relinked.  
+
+   Thus, if we change the file `insert.c' and run `make', `make' will
+compile that file to update `insert.o', and then link `edit'.  If we
+change the file `command.h' and run `make', `make' will recompile the
+object files `kbd.o', `command.o' and `files.o' and then link the file
+`edit'.
+
+
+File: make.info,  Node: Variables Simplify,  Next: make Deduces,  Prev: How Make Works,  Up: Introduction
+
+2.4 Variables Make Makefiles Simpler
+====================================
+
+In our example, we had to list all the object files twice in the rule
+for `edit' (repeated here):
+
+     edit : main.o kbd.o command.o display.o \
+                   insert.o search.o files.o utils.o
+             cc -o edit main.o kbd.o command.o display.o \
+                        insert.o search.o files.o utils.o
+
+   Such duplication is error-prone; if a new object file is added to the
+system, we might add it to one list and forget the other.  We can
+eliminate the risk and simplify the makefile by using a variable.
+"Variables" allow a text string to be defined once and substituted in
+multiple places later (*note How to Use Variables: Using Variables.).
+
+   It is standard practice for every makefile to have a variable named
+`objects', `OBJECTS', `objs', `OBJS', `obj', or `OBJ' which is a list
+of all object file names.  We would define such a variable `objects'
+with a line like this in the makefile:
+
+     objects = main.o kbd.o command.o display.o \
+               insert.o search.o files.o utils.o
+
+Then, each place we want to put a list of the object file names, we can
+substitute the variable's value by writing `$(objects)' (*note How to
+Use Variables: Using Variables.).
+
+   Here is how the complete simple makefile looks when you use a
+variable for the object files:
+
+     objects = main.o kbd.o command.o display.o \
+               insert.o search.o files.o utils.o
+
+     edit : $(objects)
+             cc -o edit $(objects)
+     main.o : main.c defs.h
+             cc -c main.c
+     kbd.o : kbd.c defs.h command.h
+             cc -c kbd.c
+     command.o : command.c defs.h command.h
+             cc -c command.c
+     display.o : display.c defs.h buffer.h
+             cc -c display.c
+     insert.o : insert.c defs.h buffer.h
+             cc -c insert.c
+     search.o : search.c defs.h buffer.h
+             cc -c search.c
+     files.o : files.c defs.h buffer.h command.h
+             cc -c files.c
+     utils.o : utils.c defs.h
+             cc -c utils.c
+     clean :
+             rm edit $(objects)
+
+
+File: make.info,  Node: make Deduces,  Next: Combine By Prerequisite,  Prev: Variables Simplify,  Up: Introduction
+
+2.5 Letting `make' Deduce the Recipes
+=====================================
+
+It is not necessary to spell out the recipes for compiling the
+individual C source files, because `make' can figure them out: it has an
+"implicit rule" for updating a `.o' file from a correspondingly named
+`.c' file using a `cc -c' command.  For example, it will use the recipe
+`cc -c main.c -o main.o' to compile `main.c' into `main.o'.  We can
+therefore omit the recipes from the rules for the object files.  *Note
+Using Implicit Rules: Implicit Rules.
+
+   When a `.c' file is used automatically in this way, it is also
+automatically added to the list of prerequisites.  We can therefore omit
+the `.c' files from the prerequisites, provided we omit the recipe.
+
+   Here is the entire example, with both of these changes, and a
+variable `objects' as suggested above:
+
+     objects = main.o kbd.o command.o display.o \
+               insert.o search.o files.o utils.o
+
+     edit : $(objects)
+             cc -o edit $(objects)
+
+     main.o : defs.h
+     kbd.o : defs.h command.h
+     command.o : defs.h command.h
+     display.o : defs.h buffer.h
+     insert.o : defs.h buffer.h
+     search.o : defs.h buffer.h
+     files.o : defs.h buffer.h command.h
+     utils.o : defs.h
+
+     .PHONY : clean
+     clean :
+             rm edit $(objects)
+
+This is how we would write the makefile in actual practice.  (The
+complications associated with `clean' are described elsewhere.  See
+*note Phony Targets::, and *note Errors in Recipes: Errors.)
+
+   Because implicit rules are so convenient, they are important.  You
+will see them used frequently.
+
+
+File: make.info,  Node: Combine By Prerequisite,  Next: Cleanup,  Prev: make Deduces,  Up: Introduction
+
+2.6 Another Style of Makefile
+=============================
+
+When the objects of a makefile are created only by implicit rules, an
+alternative style of makefile is possible.  In this style of makefile,
+you group entries by their prerequisites instead of by their targets.
+Here is what one looks like:
+
+     objects = main.o kbd.o command.o display.o \
+               insert.o search.o files.o utils.o
+
+     edit : $(objects)
+             cc -o edit $(objects)
+
+     $(objects) : defs.h
+     kbd.o command.o files.o : command.h
+     display.o insert.o search.o files.o : buffer.h
+
+Here `defs.h' is given as a prerequisite of all the object files;
+`command.h' and `buffer.h' are prerequisites of the specific object
+files listed for them.
+
+   Whether this is better is a matter of taste: it is more compact, but
+some people dislike it because they find it clearer to put all the
+information about each target in one place.
+
+
+File: make.info,  Node: Cleanup,  Prev: Combine By Prerequisite,  Up: Introduction
+
+2.7 Rules for Cleaning the Directory
+====================================
+
+Compiling a program is not the only thing you might want to write rules
+for.  Makefiles commonly tell how to do a few other things besides
+compiling a program: for example, how to delete all the object files
+and executables so that the directory is `clean'.
+
+   Here is how we could write a `make' rule for cleaning our example
+editor:
+
+     clean:
+             rm edit $(objects)
+
+   In practice, we might want to write the rule in a somewhat more
+complicated manner to handle unanticipated situations.  We would do
+this:
+
+     .PHONY : clean
+     clean :
+             -rm edit $(objects)
+
+This prevents `make' from getting confused by an actual file called
+`clean' and causes it to continue in spite of errors from `rm'.  (See
+*note Phony Targets::, and *note Errors in Recipes: Errors.)
+
+A rule such as this should not be placed at the beginning of the
+makefile, because we do not want it to run by default!  Thus, in the
+example makefile, we want the rule for `edit', which recompiles the
+editor, to remain the default goal.
+
+   Since `clean' is not a prerequisite of `edit', this rule will not
+run at all if we give the command `make' with no arguments.  In order
+to make the rule run, we have to type `make clean'.  *Note How to Run
+`make': Running.
+
+
+File: make.info,  Node: Makefiles,  Next: Rules,  Prev: Introduction,  Up: Top
+
+3 Writing Makefiles
+*******************
+
+The information that tells `make' how to recompile a system comes from
+reading a data base called the "makefile".
+
+* Menu:
+
+* Makefile Contents::           What makefiles contain.
+* Makefile Names::              How to name your makefile.
+* Include::                     How one makefile can use another makefile.
+* MAKEFILES Variable::          The environment can specify extra makefiles.
+* Remaking Makefiles::          How makefiles get remade.
+* Overriding Makefiles::        How to override part of one makefile
+                                  with another makefile.
+* Reading Makefiles::           How makefiles are parsed.
+* Secondary Expansion::         How and when secondary expansion is performed.
+
+
+File: make.info,  Node: Makefile Contents,  Next: Makefile Names,  Prev: Makefiles,  Up: Makefiles
+
+3.1 What Makefiles Contain
+==========================
+
+Makefiles contain five kinds of things: "explicit rules", "implicit
+rules", "variable definitions", "directives", and "comments".  Rules,
+variables, and directives are described at length in later chapters.
+
+   * An "explicit rule" says when and how to remake one or more files,
+     called the rule's "targets".  It lists the other files that the
+     targets depend on, called the "prerequisites" of the target, and
+     may also give a recipe to use to create or update the targets.
+     *Note Writing Rules: Rules.
+
+   * An "implicit rule" says when and how to remake a class of files
+     based on their names.  It describes how a target may depend on a
+     file with a name similar to the target and gives a recipe to
+     create or update such a target.  *Note Using Implicit Rules:
+     Implicit Rules.
+
+   * A "variable definition" is a line that specifies a text string
+     value for a variable that can be substituted into the text later.
+     The simple makefile example shows a variable definition for
+     `objects' as a list of all object files (*note Variables Make
+     Makefiles Simpler: Variables Simplify.).
+
+   * A "directive" is an instruction for `make' to do something special
+     while reading the makefile.  These include:
+
+        * Reading another makefile (*note Including Other Makefiles:
+          Include.).
+
+        * Deciding (based on the values of variables) whether to use or
+          ignore a part of the makefile (*note Conditional Parts of
+          Makefiles: Conditionals.).
+
+        * Defining a variable from a verbatim string containing
+          multiple lines (*note Defining Multi-Line Variables:
+          Multi-Line.).
+
+   * `#' in a line of a makefile starts a "comment".  It and the rest
+     of the line are ignored, except that a trailing backslash not
+     escaped by another backslash will continue the comment across
+     multiple lines.  A line containing just a comment (with perhaps
+     spaces before it) is effectively blank, and is ignored.  If you
+     want a literal `#', escape it with a backslash (e.g., `\#').
+     Comments may appear on any line in the makefile, although they are
+     treated specially in certain situations.
+
+     You cannot use comments within variable references or function
+     calls: any instance of `#' will be treated literally (rather than
+     as the start of a comment) inside a variable reference or function
+     call.
+
+     Comments within a recipe are passed to the shell, just as with any
+     other recipe text.  The shell decides how to interpret it: whether
+     or not this is a comment is up to the shell.
+
+     Within a `define' directive, comments are not ignored during the
+     definition of the variable, but rather kept intact in the value of
+     the variable.  When the variable is expanded they will either be
+     treated as `make' comments or as recipe text, depending on the
+     context in which the variable is evaluated.
+
+
+File: make.info,  Node: Makefile Names,  Next: Include,  Prev: Makefile Contents,  Up: Makefiles
+
+3.2 What Name to Give Your Makefile
+===================================
+
+By default, when `make' looks for the makefile, it tries the following
+names, in order: `GNUmakefile', `makefile' and `Makefile'.  
+
+   Normally you should call your makefile either `makefile' or
+`Makefile'.  (We recommend `Makefile' because it appears prominently
+near the beginning of a directory listing, right near other important
+files such as `README'.)  The first name checked, `GNUmakefile', is not
+recommended for most makefiles.  You should use this name if you have a
+makefile that is specific to GNU `make', and will not be understood by
+other versions of `make'.  Other `make' programs look for `makefile' and
+`Makefile', but not `GNUmakefile'.
+
+   If `make' finds none of these names, it does not use any makefile.
+Then you must specify a goal with a command argument, and `make' will
+attempt to figure out how to remake it using only its built-in implicit
+rules.  *Note Using Implicit Rules: Implicit Rules.
+
+   If you want to use a nonstandard name for your makefile, you can
+specify the makefile name with the `-f' or `--file' option.  The
+arguments `-f NAME' or `--file=NAME' tell `make' to read the file NAME
+as the makefile.  If you use more than one `-f' or `--file' option, you
+can specify several makefiles.  All the makefiles are effectively
+concatenated in the order specified.  The default makefile names
+`GNUmakefile', `makefile' and `Makefile' are not checked automatically
+if you specify `-f' or `--file'.  
+
+
+File: make.info,  Node: Include,  Next: MAKEFILES Variable,  Prev: Makefile Names,  Up: Makefiles
+
+3.3 Including Other Makefiles
+=============================
+
+The `include' directive tells `make' to suspend reading the current
+makefile and read one or more other makefiles before continuing.  The
+directive is a line in the makefile that looks like this:
+
+     include FILENAMES...
+
+FILENAMES can contain shell file name patterns.  If FILENAMES is empty,
+nothing is included and no error is printed.  
+
+   Extra spaces are allowed and ignored at the beginning of the line,
+but the first character must not be a tab (or the value of
+`.RECIPEPREFIX')--if the line begins with a tab, it will be considered
+a recipe line.  Whitespace is required between `include' and the file
+names, and between file names; extra whitespace is ignored there and at
+the end of the directive.  A comment starting with `#' is allowed at
+the end of the line.  If the file names contain any variable or
+function references, they are expanded.  *Note How to Use Variables:
+Using Variables.
+
+   For example, if you have three `.mk' files, `a.mk', `b.mk', and
+`c.mk', and `$(bar)' expands to `bish bash', then the following
+expression
+
+     include foo *.mk $(bar)
+
+   is equivalent to
+
+     include foo a.mk b.mk c.mk bish bash
+
+   When `make' processes an `include' directive, it suspends reading of
+the containing makefile and reads from each listed file in turn.  When
+that is finished, `make' resumes reading the makefile in which the
+directive appears.
+
+   One occasion for using `include' directives is when several programs,
+handled by individual makefiles in various directories, need to use a
+common set of variable definitions (*note Setting Variables: Setting.)
+or pattern rules (*note Defining and Redefining Pattern Rules: Pattern
+Rules.).
+
+   Another such occasion is when you want to generate prerequisites from
+source files automatically; the prerequisites can be put in a file that
+is included by the main makefile.  This practice is generally cleaner
+than that of somehow appending the prerequisites to the end of the main
+makefile as has been traditionally done with other versions of `make'.
+*Note Automatic Prerequisites::.  
+
+   If the specified name does not start with a slash, and the file is
+not found in the current directory, several other directories are
+searched.  First, any directories you have specified with the `-I' or
+`--include-dir' option are searched (*note Summary of Options: Options
+Summary.).  Then the following directories (if they exist) are
+searched, in this order: `PREFIX/include' (normally `/usr/local/include'
+(1)) `/usr/gnu/include', `/usr/local/include', `/usr/include'.
+
+   If an included makefile cannot be found in any of these directories,
+a warning message is generated, but it is not an immediately fatal
+error; processing of the makefile containing the `include' continues.
+Once it has finished reading makefiles, `make' will try to remake any
+that are out of date or don't exist.  *Note How Makefiles Are Remade:
+Remaking Makefiles.  Only after it has tried to find a way to remake a
+makefile and failed, will `make' diagnose the missing makefile as a
+fatal error.
+
+   If you want `make' to simply ignore a makefile which does not exist
+or cannot be remade, with no error message, use the `-include'
+directive instead of `include', like this:
+
+     -include FILENAMES...
+
+   This acts like `include' in every way except that there is no error
+(not even a warning) if any of the FILENAMES (or any prerequisites of
+any of the FILENAMES) do not exist or cannot be remade.
+
+   For compatibility with some other `make' implementations, `sinclude'
+is another name for `-include'.
+
+   ---------- Footnotes ----------
+
+   (1) GNU Make compiled for MS-DOS and MS-Windows behaves as if PREFIX
+has been defined to be the root of the DJGPP tree hierarchy.
+
+
+File: make.info,  Node: MAKEFILES Variable,  Next: Remaking Makefiles,  Prev: Include,  Up: Makefiles
+
+3.4 The Variable `MAKEFILES'
+============================
+
+If the environment variable `MAKEFILES' is defined, `make' considers
+its value as a list of names (separated by whitespace) of additional
+makefiles to be read before the others.  This works much like the
+`include' directive: various directories are searched for those files
+(*note Including Other Makefiles: Include.).  In addition, the default
+goal is never taken from one of these makefiles (or any makefile
+included by them) and it is not an error if the files listed in
+`MAKEFILES' are not found.
+
+   The main use of `MAKEFILES' is in communication between recursive
+invocations of `make' (*note Recursive Use of `make': Recursion.).  It
+usually is not desirable to set the environment variable before a
+top-level invocation of `make', because it is usually better not to
+mess with a makefile from outside.  However, if you are running `make'
+without a specific makefile, a makefile in `MAKEFILES' can do useful
+things to help the built-in implicit rules work better, such as
+defining search paths (*note Directory Search::).
+
+   Some users are tempted to set `MAKEFILES' in the environment
+automatically on login, and program makefiles to expect this to be done.
+This is a very bad idea, because such makefiles will fail to work if
+run by anyone else.  It is much better to write explicit `include'
+directives in the makefiles.  *Note Including Other Makefiles: Include.
+
+
+File: make.info,  Node: Remaking Makefiles,  Next: Overriding Makefiles,  Prev: MAKEFILES Variable,  Up: Makefiles
+
+3.5 How Makefiles Are Remade
+============================
+
+Sometimes makefiles can be remade from other files, such as RCS or SCCS
+files.  If a makefile can be remade from other files, you probably want
+`make' to get an up-to-date version of the makefile to read in.
+
+   To this end, after reading in all makefiles, `make' will consider
+each as a goal target and attempt to update it.  If a makefile has a
+rule which says how to update it (found either in that very makefile or
+in another one) or if an implicit rule applies to it (*note Using
+Implicit Rules: Implicit Rules.), it will be updated if necessary.
+After all makefiles have been checked, if any have actually been
+changed, `make' starts with a clean slate and reads all the makefiles
+over again.  (It will also attempt to update each of them over again,
+but normally this will not change them again, since they are already up
+to date.)
+
+   If you know that one or more of your makefiles cannot be remade and
+you want to keep `make' from performing an implicit rule search on
+them, perhaps for efficiency reasons, you can use any normal method of
+preventing implicit rule lookup to do so.  For example, you can write
+an explicit rule with the makefile as the target, and an empty recipe
+(*note Using Empty Recipes: Empty Recipes.).
+
+   If the makefiles specify a double-colon rule to remake a file with a
+recipe but no prerequisites, that file will always be remade (*note
+Double-Colon::).  In the case of makefiles, a makefile that has a
+double-colon rule with a recipe but no prerequisites will be remade
+every time `make' is run, and then again after `make' starts over and
+reads the makefiles in again.  This would cause an infinite loop:
+`make' would constantly remake the makefile, and never do anything
+else.  So, to avoid this, `make' will *not* attempt to remake makefiles
+which are specified as targets of a double-colon rule with a recipe but
+no prerequisites.
+
+   If you do not specify any makefiles to be read with `-f' or `--file'
+options, `make' will try the default makefile names; *note What Name to
+Give Your Makefile: Makefile Names.  Unlike makefiles explicitly
+requested with `-f' or `--file' options, `make' is not certain that
+these makefiles should exist.  However, if a default makefile does not
+exist but can be created by running `make' rules, you probably want the
+rules to be run so that the makefile can be used.
+
+   Therefore, if none of the default makefiles exists, `make' will try
+to make each of them in the same order in which they are searched for
+(*note What Name to Give Your Makefile: Makefile Names.)  until it
+succeeds in making one, or it runs out of names to try.  Note that it
+is not an error if `make' cannot find or make any makefile; a makefile
+is not always necessary.
+
+   When you use the `-t' or `--touch' option (*note Instead of
+Executing Recipes: Instead of Execution.), you would not want to use an
+out-of-date makefile to decide which targets to touch.  So the `-t'
+option has no effect on updating makefiles; they are really updated
+even if `-t' is specified.  Likewise, `-q' (or `--question') and `-n'
+(or `--just-print') do not prevent updating of makefiles, because an
+out-of-date makefile would result in the wrong output for other targets.
+Thus, `make -f mfile -n foo' will update `mfile', read it in, and then
+print the recipe to update `foo' and its prerequisites without running
+it.  The recipe printed for `foo' will be the one specified in the
+updated contents of `mfile'.
+
+   However, on occasion you might actually wish to prevent updating of
+even the makefiles.  You can do this by specifying the makefiles as
+goals in the command line as well as specifying them as makefiles.
+When the makefile name is specified explicitly as a goal, the options
+`-t' and so on do apply to them.
+
+   Thus, `make -f mfile -n mfile foo' would read the makefile `mfile',
+print the recipe needed to update it without actually running it, and
+then print the recipe needed to update `foo' without running that.  The
+recipe for `foo' will be the one specified by the existing contents of
+`mfile'.
+
+
+File: make.info,  Node: Overriding Makefiles,  Next: Reading Makefiles,  Prev: Remaking Makefiles,  Up: Makefiles
+
+3.6 Overriding Part of Another Makefile
+=======================================
+
+Sometimes it is useful to have a makefile that is mostly just like
+another makefile.  You can often use the `include' directive to include
+one in the other, and add more targets or variable definitions.
+However, it is illegal for two makefiles to give different recipes for
+the same target.  But there is another way.
+
+   In the containing makefile (the one that wants to include the other),
+you can use a match-anything pattern rule to say that to remake any
+target that cannot be made from the information in the containing
+makefile, `make' should look in another makefile.  *Note Pattern
+Rules::, for more information on pattern rules.
+
+   For example, if you have a makefile called `Makefile' that says how
+to make the target `foo' (and other targets), you can write a makefile
+called `GNUmakefile' that contains:
+
+     foo:
+             frobnicate > foo
+
+     %: force
+             @$(MAKE) -f Makefile $@
+     force: ;
+
+   If you say `make foo', `make' will find `GNUmakefile', read it, and
+see that to make `foo', it needs to run the recipe `frobnicate > foo'.
+If you say `make bar', `make' will find no way to make `bar' in
+`GNUmakefile', so it will use the recipe from the pattern rule: `make
+-f Makefile bar'.  If `Makefile' provides a rule for updating `bar',
+`make' will apply the rule.  And likewise for any other target that
+`GNUmakefile' does not say how to make.
+
+   The way this works is that the pattern rule has a pattern of just
+`%', so it matches any target whatever.  The rule specifies a
+prerequisite `force', to guarantee that the recipe will be run even if
+the target file already exists.  We give the `force' target an empty
+recipe to prevent `make' from searching for an implicit rule to build
+it--otherwise it would apply the same match-anything rule to `force'
+itself and create a prerequisite loop!
+
+
+File: make.info,  Node: Reading Makefiles,  Next: Secondary Expansion,  Prev: Overriding Makefiles,  Up: Makefiles
+
+3.7 How `make' Reads a Makefile
+===============================
+
+GNU `make' does its work in two distinct phases.  During the first
+phase it reads all the makefiles, included makefiles, etc. and
+internalizes all the variables and their values, implicit and explicit
+rules, and constructs a dependency graph of all the targets and their
+prerequisites.  During the second phase, `make' uses these internal
+structures to determine what targets will need to be rebuilt and to
+invoke the rules necessary to do so.
+
+   It's important to understand this two-phase approach because it has a
+direct impact on how variable and function expansion happens; this is
+often a source of some confusion when writing makefiles.  Here we will
+present a summary of the phases in which expansion happens for different
+constructs within the makefile.  We say that expansion is "immediate"
+if it happens during the first phase: in this case `make' will expand
+any variables or functions in that section of a construct as the
+makefile is parsed.  We say that expansion is "deferred" if expansion
+is not performed immediately.  Expansion of a deferred construct is not
+performed until either the construct appears later in an immediate
+context, or until the second phase.
+
+   You may not be familiar with some of these constructs yet.  You can
+reference this section as you become familiar with them, in later
+chapters.
+
+Variable Assignment
+-------------------
+
+Variable definitions are parsed as follows:
+
+     IMMEDIATE = DEFERRED
+     IMMEDIATE ?= DEFERRED
+     IMMEDIATE := IMMEDIATE
+     IMMEDIATE += DEFERRED or IMMEDIATE
+
+     define IMMEDIATE
+       DEFERRED
+     endef
+
+     define IMMEDIATE =
+       DEFERRED
+     endef
+
+     define IMMEDIATE ?=
+       DEFERRED
+     endef
+
+     define IMMEDIATE :=
+       IMMEDIATE
+     endef
+
+     define IMMEDIATE +=
+       DEFERRED or IMMEDIATE
+     endef
+
+   For the append operator, `+=', the right-hand side is considered
+immediate if the variable was previously set as a simple variable
+(`:='), and deferred otherwise.
+
+Conditional Directives
+----------------------
+
+Conditional directives are parsed immediately.  This means, for
+example, that automatic variables cannot be used in conditional
+directives, as automatic variables are not set until the recipe for
+that rule is invoked.  If you need to use automatic variables in a
+conditional directive you _must_ move the condition into the recipe and
+use shell conditional syntax instead.
+
+Rule Definition
+---------------
+
+A rule is always expanded the same way, regardless of the form:
+
+     IMMEDIATE : IMMEDIATE ; DEFERRED
+     	DEFERRED
+
+   That is, the target and prerequisite sections are expanded
+immediately, and the recipe used to construct the target is always
+deferred.  This general rule is true for explicit rules, pattern rules,
+suffix rules, static pattern rules, and simple prerequisite definitions.
+
+
+File: make.info,  Node: Secondary Expansion,  Prev: Reading Makefiles,  Up: Makefiles
+
+3.8 Secondary Expansion
+=======================
+
+In the previous section we learned that GNU `make' works in two
+distinct phases: a read-in phase and a target-update phase (*note How
+`make' Reads a Makefile: Reading Makefiles.).  GNU make also has the
+ability to enable a _second expansion_ of the prerequisites (only) for
+some or all targets defined in the makefile.  In order for this second
+expansion to occur, the special target `.SECONDEXPANSION' must be
+defined before the first prerequisite list that makes use of this
+feature.
+
+   If that special target is defined then in between the two phases
+mentioned above, right at the end of the read-in phase, all the
+prerequisites of the targets defined after the special target are
+expanded a _second time_.  In most circumstances this secondary
+expansion will have no effect, since all variable and function
+references will have been expanded during the initial parsing of the
+makefiles.  In order to take advantage of the secondary expansion phase
+of the parser, then, it's necessary to _escape_ the variable or
+function reference in the makefile.  In this case the first expansion
+merely un-escapes the reference but doesn't expand it, and expansion is
+left to the secondary expansion phase.  For example, consider this
+makefile:
+
+     .SECONDEXPANSION:
+     ONEVAR = onefile
+     TWOVAR = twofile
+     myfile: $(ONEVAR) $$(TWOVAR)
+
+   After the first expansion phase the prerequisites list of the
+`myfile' target will be `onefile' and `$(TWOVAR)'; the first
+(unescaped) variable reference to ONEVAR is expanded, while the second
+(escaped) variable reference is simply unescaped, without being
+recognized as a variable reference.  Now during the secondary expansion
+the first word is expanded again but since it contains no variable or
+function references it remains the static value `onefile', while the
+second word is now a normal reference to the variable TWOVAR, which is
+expanded to the value `twofile'.  The final result is that there are
+two prerequisites, `onefile' and `twofile'.
+
+   Obviously, this is not a very interesting case since the same result
+could more easily have been achieved simply by having both variables
+appear, unescaped, in the prerequisites list.  One difference becomes
+apparent if the variables are reset; consider this example:
+
+     .SECONDEXPANSION:
+     AVAR = top
+     onefile: $(AVAR)
+     twofile: $$(AVAR)
+     AVAR = bottom
+
+   Here the prerequisite of `onefile' will be expanded immediately, and
+resolve to the value `top', while the prerequisite of `twofile' will
+not be full expanded until the secondary expansion and yield a value of
+`bottom'.
+
+   This is marginally more exciting, but the true power of this feature
+only becomes apparent when you discover that secondary expansions
+always take place within the scope of the automatic variables for that
+target.  This means that you can use variables such as `$@', `$*', etc.
+during the second expansion and they will have their expected values,
+just as in the recipe.  All you have to do is defer the expansion by
+escaping the `$'.  Also, secondary expansion occurs for both explicit
+and implicit (pattern) rules.  Knowing this, the possible uses for this
+feature increase dramatically.  For example:
+
+     .SECONDEXPANSION:
+     main_OBJS := main.o try.o test.o
+     lib_OBJS := lib.o api.o
+
+     main lib: $$($$@_OBJS)
+
+   Here, after the initial expansion the prerequisites of both the
+`main' and `lib' targets will be `$($@_OBJS)'.  During the secondary
+expansion, the `$@' variable is set to the name of the target and so
+the expansion for the `main' target will yield `$(main_OBJS)', or
+`main.o try.o test.o', while the secondary expansion for the `lib'
+target will yield `$(lib_OBJS)', or `lib.o api.o'.
+
+   You can also mix in functions here, as long as they are properly
+escaped:
+
+     main_SRCS := main.c try.c test.c
+     lib_SRCS := lib.c api.c
+
+     .SECONDEXPANSION:
+     main lib: $$(patsubst %.c,%.o,$$($$@_SRCS))
+
+   This version allows users to specify source files rather than object
+files, but gives the same resulting prerequisites list as the previous
+example.
+
+   Evaluation of automatic variables during the secondary expansion
+phase, especially of the target name variable `$$@', behaves similarly
+to evaluation within recipes.  However, there are some subtle
+differences and "corner cases" which come into play for the different
+types of rule definitions that `make' understands.  The subtleties of
+using the different automatic variables are described below.
+
+Secondary Expansion of Explicit Rules
+-------------------------------------
+
+During the secondary expansion of explicit rules, `$$@' and `$$%'
+evaluate, respectively, to the file name of the target and, when the
+target is an archive member, the target member name.  The `$$<'
+variable evaluates to the first prerequisite in the first rule for this
+target.  `$$^' and `$$+' evaluate to the list of all prerequisites of
+rules _that have already appeared_ for the same target (`$$+' with
+repetitions and `$$^' without).  The following example will help
+illustrate these behaviors:
+
+     .SECONDEXPANSION:
+
+     foo: foo.1 bar.1 $$< $$^ $$+    # line #1
+
+     foo: foo.2 bar.2 $$< $$^ $$+    # line #2
+
+     foo: foo.3 bar.3 $$< $$^ $$+    # line #3
+
+   In the first prerequisite list, all three variables (`$$<', `$$^',
+and `$$+') expand to the empty string.  In the second, they will have
+values `foo.1', `foo.1 bar.1', and `foo.1 bar.1' respectively.  In the
+third they will have values `foo.1', `foo.1 bar.1 foo.2 bar.2', and
+`foo.1 bar.1 foo.2 bar.2 foo.1 foo.1 bar.1 foo.1 bar.1' respectively.
+
+   Rules undergo secondary expansion in makefile order, except that the
+rule with the recipe is always evaluated last.
+
+   The variables `$$?' and `$$*' are not available and expand to the
+empty string.
+
+Secondary Expansion of Static Pattern Rules
+-------------------------------------------
+
+Rules for secondary expansion of static pattern rules are identical to
+those for explicit rules, above, with one exception: for static pattern
+rules the `$$*' variable is set to the pattern stem.  As with explicit
+rules, `$$?' is not available and expands to the empty string.
+
+Secondary Expansion of Implicit Rules
+-------------------------------------
+
+As `make' searches for an implicit rule, it substitutes the stem and
+then performs secondary expansion for every rule with a matching target
+pattern.  The value of the automatic variables is derived in the same
+fashion as for static pattern rules.  As an example:
+
+     .SECONDEXPANSION:
+
+     foo: bar
+
+     foo foz: fo%: bo%
+
+     %oo: $$< $$^ $$+ $$*
+
+   When the implicit rule is tried for target `foo', `$$<' expands to
+`bar', `$$^' expands to `bar boo', `$$+' also expands to `bar boo', and
+`$$*' expands to `f'.
+
+   Note that the directory prefix (D), as described in *note Implicit
+Rule Search Algorithm: Implicit Rule Search, is appended (after
+expansion) to all the patterns in the prerequisites list.  As an
+example:
+
+     .SECONDEXPANSION:
+
+     /tmp/foo.o:
+
+     %.o: $$(addsuffix /%.c,foo bar) foo.h
+
+   The prerequisite list after the secondary expansion and directory
+prefix reconstruction will be `/tmp/foo/foo.c /tmp/var/bar/foo.c
+foo.h'.  If you are not interested in this reconstruction, you can use
+`$$*' instead of `%' in the prerequisites list.
+
+
+File: make.info,  Node: Rules,  Next: Recipes,  Prev: Makefiles,  Up: Top
+
+4 Writing Rules
+***************
+
+A "rule" appears in the makefile and says when and how to remake
+certain files, called the rule's "targets" (most often only one per
+rule).  It lists the other files that are the "prerequisites" of the
+target, and the "recipe" to use to create or update the target.
+
+   The order of rules is not significant, except for determining the
+"default goal": the target for `make' to consider, if you do not
+otherwise specify one.  The default goal is the target of the first
+rule in the first makefile.  If the first rule has multiple targets,
+only the first target is taken as the default.  There are two
+exceptions: a target starting with a period is not a default unless it
+contains one or more slashes, `/', as well; and, a target that defines
+a pattern rule has no effect on the default goal.  (*Note Defining and
+Redefining Pattern Rules: Pattern Rules.)
+
+   Therefore, we usually write the makefile so that the first rule is
+the one for compiling the entire program or all the programs described
+by the makefile (often with a target called `all').  *Note Arguments to
+Specify the Goals: Goals.
+
+* Menu:
+
+* Rule Example::                An example explained.
+* Rule Syntax::                 General syntax explained.
+* Prerequisite Types::          There are two types of prerequisites.
+* Wildcards::                   Using wildcard characters such as `*'.
+* Directory Search::            Searching other directories for source files.
+* Phony Targets::               Using a target that is not a real file's name.
+* Force Targets::               You can use a target without recipes
+                                  or prerequisites to mark other targets
+                                  as phony.
+* Empty Targets::               When only the date matters and the
+                                  files are empty.
+* Special Targets::             Targets with special built-in meanings.
+* Multiple Targets::            When to make use of several targets in a rule.
+* Multiple Rules::              How to use several rules with the same target.
+* Static Pattern::              Static pattern rules apply to multiple targets
+                                  and can vary the prerequisites according to
+                                  the target name.
+* Double-Colon::                How to use a special kind of rule to allow
+                                  several independent rules for one target.
+* Automatic Prerequisites::     How to automatically generate rules giving
+                                  prerequisites from source files themselves.
+
+
+File: make.info,  Node: Rule Example,  Next: Rule Syntax,  Prev: Rules,  Up: Rules
+
+4.1 Rule Example
+================
+
+Here is an example of a rule:
+
+     foo.o : foo.c defs.h       # module for twiddling the frobs
+             cc -c -g foo.c
+
+   Its target is `foo.o' and its prerequisites are `foo.c' and
+`defs.h'.  It has one command in the recipe: `cc -c -g foo.c'.  The
+recipe starts with a tab to identify it as a recipe.
+
+   This rule says two things:
+
+   * How to decide whether `foo.o' is out of date: it is out of date if
+     it does not exist, or if either `foo.c' or `defs.h' is more recent
+     than it.
+
+   * How to update the file `foo.o': by running `cc' as stated.  The
+     recipe does not explicitly mention `defs.h', but we presume that
+     `foo.c' includes it, and that that is why `defs.h' was added to
+     the prerequisites.
+
+
+File: make.info,  Node: Rule Syntax,  Next: Prerequisite Types,  Prev: Rule Example,  Up: Rules
+
+4.2 Rule Syntax
+===============
+
+In general, a rule looks like this:
+
+     TARGETS : PREREQUISITES
+             RECIPE
+             ...
+
+or like this:
+
+     TARGETS : PREREQUISITES ; RECIPE
+             RECIPE
+             ...
+
+   The TARGETS are file names, separated by spaces.  Wildcard
+characters may be used (*note Using Wildcard Characters in File Names:
+Wildcards.) and a name of the form `A(M)' represents member M in
+archive file A (*note Archive Members as Targets: Archive Members.).
+Usually there is only one target per rule, but occasionally there is a
+reason to have more (*note Multiple Targets in a Rule: Multiple
+Targets.).
+
+   The RECIPE lines start with a tab character (or the first character
+in the value of the `.RECIPEPREFIX' variable; *note Special
+Variables::).  The first recipe line may appear on the line after the
+prerequisites, with a tab character, or may appear on the same line,
+with a semicolon.  Either way, the effect is the same.  There are other
+differences in the syntax of recipes.  *Note Writing Recipes in Rules:
+Recipes.
+
+   Because dollar signs are used to start `make' variable references,
+if you really want a dollar sign in a target or prerequisite you must
+write two of them, `$$' (*note How to Use Variables: Using Variables.).
+If you have enabled secondary expansion (*note Secondary Expansion::)
+and you want a literal dollar sign in the prerequisites list, you must
+actually write _four_ dollar signs (`$$$$').
+
+   You may split a long line by inserting a backslash followed by a
+newline, but this is not required, as `make' places no limit on the
+length of a line in a makefile.
+
+   A rule tells `make' two things: when the targets are out of date,
+and how to update them when necessary.
+
+   The criterion for being out of date is specified in terms of the
+PREREQUISITES, which consist of file names separated by spaces.
+(Wildcards and archive members (*note Archives::) are allowed here too.)
+A target is out of date if it does not exist or if it is older than any
+of the prerequisites (by comparison of last-modification times).  The
+idea is that the contents of the target file are computed based on
+information in the prerequisites, so if any of the prerequisites
+changes, the contents of the existing target file are no longer
+necessarily valid.
+
+   How to update is specified by a RECIPE.  This is one or more lines
+to be executed by the shell (normally `sh'), but with some extra
+features (*note Writing Recipes in Rules: Recipes.).
+
+
+File: make.info,  Node: Prerequisite Types,  Next: Wildcards,  Prev: Rule Syntax,  Up: Rules
+
+4.3 Types of Prerequisites
+==========================
+
+There are actually two different types of prerequisites understood by
+GNU `make': normal prerequisites such as described in the previous
+section, and "order-only" prerequisites.  A normal prerequisite makes
+two statements: first, it imposes an order in which recipes will be
+invoked: the recipes for all prerequisites of a target will be
+completed before the recipe for the target is run.  Second, it imposes
+a dependency relationship: if any prerequisite is newer than the
+target, then the target is considered out-of-date and must be rebuilt.
+
+   Normally, this is exactly what you want: if a target's prerequisite
+is updated, then the target should also be updated.
+
+   Occasionally, however, you have a situation where you want to impose
+a specific ordering on the rules to be invoked _without_ forcing the
+target to be updated if one of those rules is executed.  In that case,
+you want to define "order-only" prerequisites.  Order-only
+prerequisites can be specified by placing a pipe symbol (`|') in the
+prerequisites list: any prerequisites to the left of the pipe symbol
+are normal; any prerequisites to the right are order-only:
+
+     TARGETS : NORMAL-PREREQUISITES | ORDER-ONLY-PREREQUISITES
+
+   The normal prerequisites section may of course be empty.  Also, you
+may still declare multiple lines of prerequisites for the same target:
+they are appended appropriately (normal prerequisites are appended to
+the list of normal prerequisites; order-only prerequisites are appended
+to the list of order-only prerequisites).  Note that if you declare the
+same file to be both a normal and an order-only prerequisite, the
+normal prerequisite takes precedence (since they have a strict superset
+of the behavior of an order-only prerequisite).
+
+   Consider an example where your targets are to be placed in a separate
+directory, and that directory might not exist before `make' is run.  In
+this situation, you want the directory to be created before any targets
+are placed into it but, because the timestamps on directories change
+whenever a file is added, removed, or renamed, we certainly don't want
+to rebuild all the targets whenever the directory's timestamp changes.
+One way to manage this is with order-only prerequisites: make the
+directory an order-only prerequisite on all the targets:
+
+     OBJDIR := objdir
+     OBJS := $(addprefix $(OBJDIR)/,foo.o bar.o baz.o)
+
+     $(OBJDIR)/%.o : %.c
+             $(COMPILE.c) $(OUTPUT_OPTION) $<
+
+     all: $(OBJS)
+
+     $(OBJS): | $(OBJDIR)
+
+     $(OBJDIR):
+             mkdir $(OBJDIR)
+
+   Now the rule to create the `objdir' directory will be run, if
+needed, before any `.o' is built, but no `.o' will be built because the
+`objdir' directory timestamp changed.
+
+
+File: make.info,  Node: Wildcards,  Next: Directory Search,  Prev: Prerequisite Types,  Up: Rules
+
+4.4 Using Wildcard Characters in File Names
+===========================================
+
+A single file name can specify many files using "wildcard characters".
+The wildcard characters in `make' are `*', `?' and `[...]', the same as
+in the Bourne shell.  For example, `*.c' specifies a list of all the
+files (in the working directory) whose names end in `.c'.
+
+   The character `~' at the beginning of a file name also has special
+significance.  If alone, or followed by a slash, it represents your home
+directory.  For example `~/bin' expands to `/home/you/bin'.  If the `~'
+is followed by a word, the string represents the home directory of the
+user named by that word.  For example `~john/bin' expands to
+`/home/john/bin'.  On systems which don't have a home directory for
+each user (such as MS-DOS or MS-Windows), this functionality can be
+simulated by setting the environment variable HOME.
+
+   Wildcard expansion is performed by `make' automatically in targets
+and in prerequisites.  In recipes, the shell is responsible for
+wildcard expansion.  In other contexts, wildcard expansion happens only
+if you request it explicitly with the `wildcard' function.
+
+   The special significance of a wildcard character can be turned off by
+preceding it with a backslash.  Thus, `foo\*bar' would refer to a
+specific file whose name consists of `foo', an asterisk, and `bar'.
+
+* Menu:
+
+* Wildcard Examples::           Several examples
+* Wildcard Pitfall::            Problems to avoid.
+* Wildcard Function::           How to cause wildcard expansion where
+                                  it does not normally take place.
+
+
+File: make.info,  Node: Wildcard Examples,  Next: Wildcard Pitfall,  Prev: Wildcards,  Up: Wildcards
+
+4.4.1 Wildcard Examples
+-----------------------
+
+Wildcards can be used in the recipe of a rule, where they are expanded
+by the shell.  For example, here is a rule to delete all the object
+files:
+
+     clean:
+             rm -f *.o
+
+   Wildcards are also useful in the prerequisites of a rule.  With the
+following rule in the makefile, `make print' will print all the `.c'
+files that have changed since the last time you printed them:
+
+     print: *.c
+             lpr -p $?
+             touch print
+
+This rule uses `print' as an empty target file; see *note Empty Target
+Files to Record Events: Empty Targets.  (The automatic variable `$?' is
+used to print only those files that have changed; see *note Automatic
+Variables::.)
+
+   Wildcard expansion does not happen when you define a variable.
+Thus, if you write this:
+
+     objects = *.o
+
+then the value of the variable `objects' is the actual string `*.o'.
+However, if you use the value of `objects' in a target or prerequisite,
+wildcard expansion will take place there.  If you use the value of
+`objects' in a recipe, the shell may perform wildcard expansion when
+the recipe runs.  To set `objects' to the expansion, instead use:
+
+     objects := $(wildcard *.o)
+
+*Note Wildcard Function::.
+
+
+File: make.info,  Node: Wildcard Pitfall,  Next: Wildcard Function,  Prev: Wildcard Examples,  Up: Wildcards
+
+4.4.2 Pitfalls of Using Wildcards
+---------------------------------
+
+Now here is an example of a naive way of using wildcard expansion, that
+does not do what you would intend.  Suppose you would like to say that
+the executable file `foo' is made from all the object files in the
+directory, and you write this:
+
+     objects = *.o
+
+     foo : $(objects)
+             cc -o foo $(CFLAGS) $(objects)
+
+The value of `objects' is the actual string `*.o'.  Wildcard expansion
+happens in the rule for `foo', so that each _existing_ `.o' file
+becomes a prerequisite of `foo' and will be recompiled if necessary.
+
+   But what if you delete all the `.o' files?  When a wildcard matches
+no files, it is left as it is, so then `foo' will depend on the
+oddly-named file `*.o'.  Since no such file is likely to exist, `make'
+will give you an error saying it cannot figure out how to make `*.o'.
+This is not what you want!
+
+   Actually it is possible to obtain the desired result with wildcard
+expansion, but you need more sophisticated techniques, including the
+`wildcard' function and string substitution.  *Note The Function
+`wildcard': Wildcard Function.
+
+   Microsoft operating systems (MS-DOS and MS-Windows) use backslashes
+to separate directories in pathnames, like so:
+
+       c:\foo\bar\baz.c
+
+   This is equivalent to the Unix-style `c:/foo/bar/baz.c' (the `c:'
+part is the so-called drive letter).  When `make' runs on these
+systems, it supports backslashes as well as the Unix-style forward
+slashes in pathnames.  However, this support does _not_ include the
+wildcard expansion, where backslash is a quote character.  Therefore,
+you _must_ use Unix-style slashes in these cases.
+
+
+File: make.info,  Node: Wildcard Function,  Prev: Wildcard Pitfall,  Up: Wildcards
+
+4.4.3 The Function `wildcard'
+-----------------------------
+
+Wildcard expansion happens automatically in rules.  But wildcard
+expansion does not normally take place when a variable is set, or
+inside the arguments of a function.  If you want to do wildcard
+expansion in such places, you need to use the `wildcard' function, like
+this:
+
+     $(wildcard PATTERN...)
+
+This string, used anywhere in a makefile, is replaced by a
+space-separated list of names of existing files that match one of the
+given file name patterns.  If no existing file name matches a pattern,
+then that pattern is omitted from the output of the `wildcard'
+function.  Note that this is different from how unmatched wildcards
+behave in rules, where they are used verbatim rather than ignored
+(*note Wildcard Pitfall::).
+
+   One use of the `wildcard' function is to get a list of all the C
+source files in a directory, like this:
+
+     $(wildcard *.c)
+
+   We can change the list of C source files into a list of object files
+by replacing the `.c' suffix with `.o' in the result, like this:
+
+     $(patsubst %.c,%.o,$(wildcard *.c))
+
+(Here we have used another function, `patsubst'.  *Note Functions for
+String Substitution and Analysis: Text Functions.)
+
+   Thus, a makefile to compile all C source files in the directory and
+then link them together could be written as follows:
+
+     objects := $(patsubst %.c,%.o,$(wildcard *.c))
+
+     foo : $(objects)
+             cc -o foo $(objects)
+
+(This takes advantage of the implicit rule for compiling C programs, so
+there is no need to write explicit rules for compiling the files.
+*Note The Two Flavors of Variables: Flavors, for an explanation of
+`:=', which is a variant of `='.)
+
+
+File: make.info,  Node: Directory Search,  Next: Phony Targets,  Prev: Wildcards,  Up: Rules
+
+4.5 Searching Directories for Prerequisites
+===========================================
+
+For large systems, it is often desirable to put sources in a separate
+directory from the binaries.  The "directory search" features of `make'
+facilitate this by searching several directories automatically to find
+a prerequisite.  When you redistribute the files among directories, you
+do not need to change the individual rules, just the search paths.
+
+* Menu:
+
+* General Search::              Specifying a search path that applies
+                                  to every prerequisite.
+* Selective Search::            Specifying a search path
+                                  for a specified class of names.
+* Search Algorithm::            When and how search paths are applied.
+* Recipes/Search::             How to write recipes that work together
+                                  with search paths.
+* Implicit/Search::             How search paths affect implicit rules.
+* Libraries/Search::            Directory search for link libraries.
+
+
+File: make.info,  Node: General Search,  Next: Selective Search,  Prev: Directory Search,  Up: Directory Search
+
+4.5.1 `VPATH': Search Path for All Prerequisites
+------------------------------------------------
+
+The value of the `make' variable `VPATH' specifies a list of
+directories that `make' should search.  Most often, the directories are
+expected to contain prerequisite files that are not in the current
+directory; however, `make' uses `VPATH' as a search list for both
+prerequisites and targets of rules.
+
+   Thus, if a file that is listed as a target or prerequisite does not
+exist in the current directory, `make' searches the directories listed
+in `VPATH' for a file with that name.  If a file is found in one of
+them, that file may become the prerequisite (see below).  Rules may then
+specify the names of files in the prerequisite list as if they all
+existed in the current directory.  *Note Writing Recipes with Directory
+Search: Recipes/Search.
+
+   In the `VPATH' variable, directory names are separated by colons or
+blanks.  The order in which directories are listed is the order followed
+by `make' in its search.  (On MS-DOS and MS-Windows, semi-colons are
+used as separators of directory names in `VPATH', since the colon can
+be used in the pathname itself, after the drive letter.)
+
+   For example,
+
+     VPATH = src:../headers
+
+specifies a path containing two directories, `src' and `../headers',
+which `make' searches in that order.
+
+   With this value of `VPATH', the following rule,
+
+     foo.o : foo.c
+
+is interpreted as if it were written like this:
+
+     foo.o : src/foo.c
+
+assuming the file `foo.c' does not exist in the current directory but
+is found in the directory `src'.
+
+
+File: make.info,  Node: Selective Search,  Next: Search Algorithm,  Prev: General Search,  Up: Directory Search
+
+4.5.2 The `vpath' Directive
+---------------------------
+
+Similar to the `VPATH' variable, but more selective, is the `vpath'
+directive (note lower case), which allows you to specify a search path
+for a particular class of file names: those that match a particular
+pattern.  Thus you can supply certain search directories for one class
+of file names and other directories (or none) for other file names.
+
+   There are three forms of the `vpath' directive:
+
+`vpath PATTERN DIRECTORIES'
+     Specify the search path DIRECTORIES for file names that match
+     PATTERN.
+
+     The search path, DIRECTORIES, is a list of directories to be
+     searched, separated by colons (semi-colons on MS-DOS and
+     MS-Windows) or blanks, just like the search path used in the
+     `VPATH' variable.
+
+`vpath PATTERN'
+     Clear out the search path associated with PATTERN.
+
+`vpath'
+     Clear all search paths previously specified with `vpath'
+     directives.
+
+   A `vpath' pattern is a string containing a `%' character.  The
+string must match the file name of a prerequisite that is being searched
+for, the `%' character matching any sequence of zero or more characters
+(as in pattern rules; *note Defining and Redefining Pattern Rules:
+Pattern Rules.).  For example, `%.h' matches files that end in `.h'.
+(If there is no `%', the pattern must match the prerequisite exactly,
+which is not useful very often.)
+
+   `%' characters in a `vpath' directive's pattern can be quoted with
+preceding backslashes (`\').  Backslashes that would otherwise quote
+`%' characters can be quoted with more backslashes.  Backslashes that
+quote `%' characters or other backslashes are removed from the pattern
+before it is compared to file names.  Backslashes that are not in
+danger of quoting `%' characters go unmolested.
+
+   When a prerequisite fails to exist in the current directory, if the
+PATTERN in a `vpath' directive matches the name of the prerequisite
+file, then the DIRECTORIES in that directive are searched just like
+(and before) the directories in the `VPATH' variable.
+
+   For example,
+
+     vpath %.h ../headers
+
+tells `make' to look for any prerequisite whose name ends in `.h' in
+the directory `../headers' if the file is not found in the current
+directory.
+
+   If several `vpath' patterns match the prerequisite file's name, then
+`make' processes each matching `vpath' directive one by one, searching
+all the directories mentioned in each directive.  `make' handles
+multiple `vpath' directives in the order in which they appear in the
+makefile; multiple directives with the same pattern are independent of
+each other.
+
+   Thus,
+
+     vpath %.c foo
+     vpath %   blish
+     vpath %.c bar
+
+will look for a file ending in `.c' in `foo', then `blish', then `bar',
+while
+
+     vpath %.c foo:bar
+     vpath %   blish
+
+will look for a file ending in `.c' in `foo', then `bar', then `blish'.
+
+
+File: make.info,  Node: Search Algorithm,  Next: Recipes/Search,  Prev: Selective Search,  Up: Directory Search
+
+4.5.3 How Directory Searches are Performed
+------------------------------------------
+
+When a prerequisite is found through directory search, regardless of
+type (general or selective), the pathname located may not be the one
+that `make' actually provides you in the prerequisite list.  Sometimes
+the path discovered through directory search is thrown away.
+
+   The algorithm `make' uses to decide whether to keep or abandon a
+path found via directory search is as follows:
+
+  1. If a target file does not exist at the path specified in the
+     makefile, directory search is performed.
+
+  2. If the directory search is successful, that path is kept and this
+     file is tentatively stored as the target.
+
+  3. All prerequisites of this target are examined using this same
+     method.
+
+  4. After processing the prerequisites, the target may or may not need
+     to be rebuilt:
+
+       a. If the target does _not_ need to be rebuilt, the path to the
+          file found during directory search is used for any
+          prerequisite lists which contain this target.  In short, if
+          `make' doesn't need to rebuild the target then you use the
+          path found via directory search.
+
+       b. If the target _does_ need to be rebuilt (is out-of-date), the
+          pathname found during directory search is _thrown away_, and
+          the target is rebuilt using the file name specified in the
+          makefile.  In short, if `make' must rebuild, then the target
+          is rebuilt locally, not in the directory found via directory
+          search.
+
+   This algorithm may seem complex, but in practice it is quite often
+exactly what you want.
+
+   Other versions of `make' use a simpler algorithm: if the file does
+not exist, and it is found via directory search, then that pathname is
+always used whether or not the target needs to be built.  Thus, if the
+target is rebuilt it is created at the pathname discovered during
+directory search.
+
+   If, in fact, this is the behavior you want for some or all of your
+directories, you can use the `GPATH' variable to indicate this to
+`make'.
+
+   `GPATH' has the same syntax and format as `VPATH' (that is, a space-
+or colon-delimited list of pathnames).  If an out-of-date target is
+found by directory search in a directory that also appears in `GPATH',
+then that pathname is not thrown away.  The target is rebuilt using the
+expanded path.
+
+
+File: make.info,  Node: Recipes/Search,  Next: Implicit/Search,  Prev: Search Algorithm,  Up: Directory Search
+
+4.5.4 Writing Recipes with Directory Search
+-------------------------------------------
+
+When a prerequisite is found in another directory through directory
+search, this cannot change the recipe of the rule; they will execute as
+written.  Therefore, you must write the recipe with care so that it
+will look for the prerequisite in the directory where `make' finds it.
+
+   This is done with the "automatic variables" such as `$^' (*note
+Automatic Variables::).  For instance, the value of `$^' is a list of
+all the prerequisites of the rule, including the names of the
+directories in which they were found, and the value of `$@' is the
+target.  Thus:
+
+     foo.o : foo.c
+             cc -c $(CFLAGS) $^ -o $@
+
+(The variable `CFLAGS' exists so you can specify flags for C
+compilation by implicit rules; we use it here for consistency so it will
+affect all C compilations uniformly; *note Variables Used by Implicit
+Rules: Implicit Variables.)
+
+   Often the prerequisites include header files as well, which you do
+not want to mention in the recipe.  The automatic variable `$<' is just
+the first prerequisite:
+
+     VPATH = src:../headers
+     foo.o : foo.c defs.h hack.h
+             cc -c $(CFLAGS) $< -o $@
+
+
+File: make.info,  Node: Implicit/Search,  Next: Libraries/Search,  Prev: Recipes/Search,  Up: Directory Search
+
+4.5.5 Directory Search and Implicit Rules
+-----------------------------------------
+
+The search through the directories specified in `VPATH' or with `vpath'
+also happens during consideration of implicit rules (*note Using
+Implicit Rules: Implicit Rules.).
+
+   For example, when a file `foo.o' has no explicit rule, `make'
+considers implicit rules, such as the built-in rule to compile `foo.c'
+if that file exists.  If such a file is lacking in the current
+directory, the appropriate directories are searched for it.  If `foo.c'
+exists (or is mentioned in the makefile) in any of the directories, the
+implicit rule for C compilation is applied.
+
+   The recipes of implicit rules normally use automatic variables as a
+matter of necessity; consequently they will use the file names found by
+directory search with no extra effort.
+
+
+File: make.info,  Node: Libraries/Search,  Prev: Implicit/Search,  Up: Directory Search
+
+4.5.6 Directory Search for Link Libraries
+-----------------------------------------
+
+Directory search applies in a special way to libraries used with the
+linker.  This special feature comes into play when you write a
+prerequisite whose name is of the form `-lNAME'.  (You can tell
+something strange is going on here because the prerequisite is normally
+the name of a file, and the _file name_ of a library generally looks
+like `libNAME.a', not like `-lNAME'.)
+
+   When a prerequisite's name has the form `-lNAME', `make' handles it
+specially by searching for the file `libNAME.so', and, if it is not
+found, for the file `libNAME.a' in the current directory, in
+directories specified by matching `vpath' search paths and the `VPATH'
+search path, and then in the directories `/lib', `/usr/lib', and
+`PREFIX/lib' (normally `/usr/local/lib', but MS-DOS/MS-Windows versions
+of `make' behave as if PREFIX is defined to be the root of the DJGPP
+installation tree).
+
+   For example, if there is a `/usr/lib/libcurses.a' library on your
+system (and no `/usr/lib/libcurses.so' file), then
+
+     foo : foo.c -lcurses
+             cc $^ -o $@
+
+would cause the command `cc foo.c /usr/lib/libcurses.a -o foo' to be
+executed when `foo' is older than `foo.c' or than
+`/usr/lib/libcurses.a'.
+
+   Although the default set of files to be searched for is `libNAME.so'
+and `libNAME.a', this is customizable via the `.LIBPATTERNS' variable.
+Each word in the value of this variable is a pattern string.  When a
+prerequisite like `-lNAME' is seen, `make' will replace the percent in
+each pattern in the list with NAME and perform the above directory
+searches using each library filename.
+
+   The default value for `.LIBPATTERNS' is `lib%.so lib%.a', which
+provides the default behavior described above.
+
+   You can turn off link library expansion completely by setting this
+variable to an empty value.
+
+
+File: make.info,  Node: Phony Targets,  Next: Force Targets,  Prev: Directory Search,  Up: Rules
+
+4.6 Phony Targets
+=================
+
+A phony target is one that is not really the name of a file; rather it
+is just a name for a recipe to be executed when you make an explicit
+request.  There are two reasons to use a phony target: to avoid a
+conflict with a file of the same name, and to improve performance.
+
+   If you write a rule whose recipe will not create the target file, the
+recipe will be executed every time the target comes up for remaking.
+Here is an example:
+
+     clean:
+             rm *.o temp
+
+Because the `rm' command does not create a file named `clean', probably
+no such file will ever exist.  Therefore, the `rm' command will be
+executed every time you say `make clean'.  
+
+   The phony target will cease to work if anything ever does create a
+file named `clean' in this directory.  Since it has no prerequisites,
+the file `clean' would inevitably be considered up to date, and its
+recipe would not be executed.  To avoid this problem, you can explicitly
+declare the target to be phony, using the special target `.PHONY'
+(*note Special Built-in Target Names: Special Targets.) as follows:
+
+     .PHONY : clean
+
+Once this is done, `make clean' will run the recipe regardless of
+whether there is a file named `clean'.
+
+   Since it knows that phony targets do not name actual files that
+could be remade from other files, `make' skips the implicit rule search
+for phony targets (*note Implicit Rules::).  This is why declaring a
+target phony is good for performance, even if you are not worried about
+the actual file existing.
+
+   Thus, you first write the line that states that `clean' is a phony
+target, then you write the rule, like this:
+
+     .PHONY: clean
+     clean:
+             rm *.o temp
+
+   Another example of the usefulness of phony targets is in conjunction
+with recursive invocations of `make' (for more information, see *note
+Recursive Use of `make': Recursion.).  In this case the makefile will
+often contain a variable which lists a number of subdirectories to be
+built.  One way to handle this is with one rule whose recipe is a shell
+loop over the subdirectories, like this:
+
+     SUBDIRS = foo bar baz
+
+     subdirs:
+             for dir in $(SUBDIRS); do \
+               $(MAKE) -C $$dir; \
+             done
+
+   There are problems with this method, however.  First, any error
+detected in a submake is ignored by this rule, so it will continue to
+build the rest of the directories even when one fails.  This can be
+overcome by adding shell commands to note the error and exit, but then
+it will do so even if `make' is invoked with the `-k' option, which is
+unfortunate.  Second, and perhaps more importantly, you cannot take
+advantage of `make''s ability to build targets in parallel (*note
+Parallel Execution: Parallel.), since there is only one rule.
+
+   By declaring the subdirectories as phony targets (you must do this as
+the subdirectory obviously always exists; otherwise it won't be built)
+you can remove these problems:
+
+     SUBDIRS = foo bar baz
+
+     .PHONY: subdirs $(SUBDIRS)
+
+     subdirs: $(SUBDIRS)
+
+     $(SUBDIRS):
+             $(MAKE) -C $@
+
+     foo: baz
+
+   Here we've also declared that the `foo' subdirectory cannot be built
+until after the `baz' subdirectory is complete; this kind of
+relationship declaration is particularly important when attempting
+parallel builds.
+
+   A phony target should not be a prerequisite of a real target file;
+if it is, its recipe will be run every time `make' goes to update that
+file.  As long as a phony target is never a prerequisite of a real
+target, the phony target recipe will be executed only when the phony
+target is a specified goal (*note Arguments to Specify the Goals:
+Goals.).
+
+   Phony targets can have prerequisites.  When one directory contains
+multiple programs, it is most convenient to describe all of the
+programs in one makefile `./Makefile'.  Since the target remade by
+default will be the first one in the makefile, it is common to make
+this a phony target named `all' and give it, as prerequisites, all the
+individual programs.  For example:
+
+     all : prog1 prog2 prog3
+     .PHONY : all
+
+     prog1 : prog1.o utils.o
+             cc -o prog1 prog1.o utils.o
+
+     prog2 : prog2.o
+             cc -o prog2 prog2.o
+
+     prog3 : prog3.o sort.o utils.o
+             cc -o prog3 prog3.o sort.o utils.o
+
+Now you can say just `make' to remake all three programs, or specify as
+arguments the ones to remake (as in `make prog1 prog3').  Phoniness is
+not inherited: the prerequisites of a phony target are not themselves
+phony, unless explicitly declared to be so.
+
+   When one phony target is a prerequisite of another, it serves as a
+subroutine of the other.  For example, here `make cleanall' will delete
+the object files, the difference files, and the file `program':
+
+     .PHONY: cleanall cleanobj cleandiff
+
+     cleanall : cleanobj cleandiff
+             rm program
+
+     cleanobj :
+             rm *.o
+
+     cleandiff :
+             rm *.diff
+
+
+File: make.info,  Node: Force Targets,  Next: Empty Targets,  Prev: Phony Targets,  Up: Rules
+
+4.7 Rules without Recipes or Prerequisites
+==========================================
+
+If a rule has no prerequisites or recipe, and the target of the rule is
+a nonexistent file, then `make' imagines this target to have been
+updated whenever its rule is run.  This implies that all targets
+depending on this one will always have their recipe run.
+
+   An example will illustrate this:
+
+     clean: FORCE
+             rm $(objects)
+     FORCE:
+
+   Here the target `FORCE' satisfies the special conditions, so the
+target `clean' that depends on it is forced to run its recipe.  There
+is nothing special about the name `FORCE', but that is one name
+commonly used this way.
+
+   As you can see, using `FORCE' this way has the same results as using
+`.PHONY: clean'.
+
+   Using `.PHONY' is more explicit and more efficient.  However, other
+versions of `make' do not support `.PHONY'; thus `FORCE' appears in
+many makefiles.  *Note Phony Targets::.
+
+
+File: make.info,  Node: Empty Targets,  Next: Special Targets,  Prev: Force Targets,  Up: Rules
+
+4.8 Empty Target Files to Record Events
+=======================================
+
+The "empty target" is a variant of the phony target; it is used to hold
+recipes for an action that you request explicitly from time to time.
+Unlike a phony target, this target file can really exist; but the file's
+contents do not matter, and usually are empty.
+
+   The purpose of the empty target file is to record, with its
+last-modification time, when the rule's recipe was last executed.  It
+does so because one of the commands in the recipe is a `touch' command
+to update the target file.
+
+   The empty target file should have some prerequisites (otherwise it
+doesn't make sense).  When you ask to remake the empty target, the
+recipe is executed if any prerequisite is more recent than the target;
+in other words, if a prerequisite has changed since the last time you
+remade the target.  Here is an example:
+
+     print: foo.c bar.c
+             lpr -p $?
+             touch print
+   
+With this rule, `make print' will execute the `lpr' command if either
+source file has changed since the last `make print'.  The automatic
+variable `$?' is used to print only those files that have changed
+(*note Automatic Variables::).
+
+
+File: make.info,  Node: Special Targets,  Next: Multiple Targets,  Prev: Empty Targets,  Up: Rules
+
+4.9 Special Built-in Target Names
+=================================
+
+Certain names have special meanings if they appear as targets.
+
+`.PHONY'
+     The prerequisites of the special target `.PHONY' are considered to
+     be phony targets.  When it is time to consider such a target,
+     `make' will run its recipe unconditionally, regardless of whether
+     a file with that name exists or what its last-modification time
+     is.  *Note Phony Targets: Phony Targets.
+
+`.SUFFIXES'
+     The prerequisites of the special target `.SUFFIXES' are the list
+     of suffixes to be used in checking for suffix rules.  *Note
+     Old-Fashioned Suffix Rules: Suffix Rules.
+
+`.DEFAULT'
+     The recipe specified for `.DEFAULT' is used for any target for
+     which no rules are found (either explicit rules or implicit rules).
+     *Note Last Resort::.  If a `.DEFAULT' recipe is specified, every
+     file mentioned as a prerequisite, but not as a target in a rule,
+     will have that recipe executed on its behalf.  *Note Implicit Rule
+     Search Algorithm: Implicit Rule Search.
+
+`.PRECIOUS'
+     The targets which `.PRECIOUS' depends on are given the following
+     special treatment: if `make' is killed or interrupted during the
+     execution of their recipes, the target is not deleted.  *Note
+     Interrupting or Killing `make': Interrupts.  Also, if the target
+     is an intermediate file, it will not be deleted after it is no
+     longer needed, as is normally done.  *Note Chains of Implicit
+     Rules: Chained Rules.  In this latter respect it overlaps with the
+     `.SECONDARY' special target.
+
+     You can also list the target pattern of an implicit rule (such as
+     `%.o') as a prerequisite file of the special target `.PRECIOUS' to
+     preserve intermediate files created by rules whose target patterns
+     match that file's name.
+
+`.INTERMEDIATE'
+     The targets which `.INTERMEDIATE' depends on are treated as
+     intermediate files.  *Note Chains of Implicit Rules: Chained Rules.
+     `.INTERMEDIATE' with no prerequisites has no effect.
+
+`.SECONDARY'
+     The targets which `.SECONDARY' depends on are treated as
+     intermediate files, except that they are never automatically
+     deleted.  *Note Chains of Implicit Rules: Chained Rules.
+
+     `.SECONDARY' with no prerequisites causes all targets to be treated
+     as secondary (i.e., no target is removed because it is considered
+     intermediate).
+
+`.SECONDEXPANSION'
+     If `.SECONDEXPANSION' is mentioned as a target anywhere in the
+     makefile, then all prerequisite lists defined _after_ it appears
+     will be expanded a second time after all makefiles have been read
+     in.  *Note Secondary Expansion: Secondary Expansion.
+
+`.DELETE_ON_ERROR'
+     If `.DELETE_ON_ERROR' is mentioned as a target anywhere in the
+     makefile, then `make' will delete the target of a rule if it has
+     changed and its recipe exits with a nonzero exit status, just as it
+     does when it receives a signal.  *Note Errors in Recipes: Errors.
+
+`.IGNORE'
+     If you specify prerequisites for `.IGNORE', then `make' will
+     ignore errors in execution of the recipe for those particular
+     files.  The recipe for `.IGNORE' (if any) is ignored.
+
+     If mentioned as a target with no prerequisites, `.IGNORE' says to
+     ignore errors in execution of recipes for all files.  This usage of
+     `.IGNORE' is supported only for historical compatibility.  Since
+     this affects every recipe in the makefile, it is not very useful;
+     we recommend you use the more selective ways to ignore errors in
+     specific recipes.  *Note Errors in Recipes: Errors.
+
+`.LOW_RESOLUTION_TIME'
+     If you specify prerequisites for `.LOW_RESOLUTION_TIME', `make'
+     assumes that these files are created by commands that generate low
+     resolution time stamps.  The recipe for the `.LOW_RESOLUTION_TIME'
+     target are ignored.
+
+     The high resolution file time stamps of many modern file systems
+     lessen the chance of `make' incorrectly concluding that a file is
+     up to date.  Unfortunately, some hosts do not provide a way to set
+     a high resolution file time stamp, so commands like `cp -p' that
+     explicitly set a file's time stamp must discard its subsecond part.
+     If a file is created by such a command, you should list it as a
+     prerequisite of `.LOW_RESOLUTION_TIME' so that `make' does not
+     mistakenly conclude that the file is out of date.  For example:
+
+          .LOW_RESOLUTION_TIME: dst
+          dst: src
+                  cp -p src dst
+
+     Since `cp -p' discards the subsecond part of `src''s time stamp,
+     `dst' is typically slightly older than `src' even when it is up to
+     date.  The `.LOW_RESOLUTION_TIME' line causes `make' to consider
+     `dst' to be up to date if its time stamp is at the start of the
+     same second that `src''s time stamp is in.
+
+     Due to a limitation of the archive format, archive member time
+     stamps are always low resolution.  You need not list archive
+     members as prerequisites of `.LOW_RESOLUTION_TIME', as `make' does
+     this automatically.
+
+`.SILENT'
+     If you specify prerequisites for `.SILENT', then `make' will not
+     print the recipe used to remake those particular files before
+     executing them.  The recipe for `.SILENT' is ignored.
+
+     If mentioned as a target with no prerequisites, `.SILENT' says not
+     to print any recipes before executing them.  This usage of
+     `.SILENT' is supported only for historical compatibility.  We
+     recommend you use the more selective ways to silence specific
+     recipes.  *Note Recipe Echoing: Echoing.  If you want to silence
+     all recipes for a particular run of `make', use the `-s' or
+     `--silent' option (*note Options Summary::).
+
+`.EXPORT_ALL_VARIABLES'
+     Simply by being mentioned as a target, this tells `make' to export
+     all variables to child processes by default.  *Note Communicating
+     Variables to a Sub-`make': Variables/Recursion.
+
+`.NOTPARALLEL'
+     If `.NOTPARALLEL' is mentioned as a target, then this invocation
+     of `make' will be run serially, even if the `-j' option is given.
+     Any recursively invoked `make' command will still run recipes in
+     parallel (unless its makefile also contains this target).  Any
+     prerequisites on this target are ignored.
+
+`.ONESHELL'
+     If `.ONESHELL' is mentioned as a target, then when a target is
+     built all lines of the recipe will be given to a single invocation
+     of the shell rather than each line being invoked separately (*note
+     Recipe Execution: Execution.).
+
+`.POSIX'
+     If `.POSIX' is mentioned as a target, then the makefile will be
+     parsed and run in POSIX-conforming mode.  This does _not_ mean
+     that only POSIX-conforming makefiles will be accepted: all advanced
+     GNU `make' features are still available.  Rather, this target
+     causes `make' to behave as required by POSIX in those areas where
+     `make''s default behavior differs.
+
+     In particular, if this target is mentioned then recipes will be
+     invoked as if the shell had been passed the `-e' flag: the first
+     failing command in a recipe will cause the recipe to fail
+     immediately.
+
+   Any defined implicit rule suffix also counts as a special target if
+it appears as a target, and so does the concatenation of two suffixes,
+such as `.c.o'.  These targets are suffix rules, an obsolete way of
+defining implicit rules (but a way still widely used).  In principle,
+any target name could be special in this way if you break it in two and
+add both pieces to the suffix list.  In practice, suffixes normally
+begin with `.', so these special target names also begin with `.'.
+*Note Old-Fashioned Suffix Rules: Suffix Rules.
+
+
+File: make.info,  Node: Multiple Targets,  Next: Multiple Rules,  Prev: Special Targets,  Up: Rules
+
+4.10 Multiple Targets in a Rule
+===============================
+
+A rule with multiple targets is equivalent to writing many rules, each
+with one target, and all identical aside from that.  The same recipe
+applies to all the targets, but its effect may vary because you can
+substitute the actual target name into the recipe using `$@'.  The rule
+contributes the same prerequisites to all the targets also.
+
+   This is useful in two cases.
+
+   * You want just prerequisites, no recipe.  For example:
+
+          kbd.o command.o files.o: command.h
+
+     gives an additional prerequisite to each of the three object files
+     mentioned.
+
+   * Similar recipes work for all the targets.  The recipes do not need
+     to be absolutely identical, since the automatic variable `$@' can
+     be used to substitute the particular target to be remade into the
+     commands (*note Automatic Variables::).  For example:
+
+          bigoutput littleoutput : text.g
+                  generate text.g -$(subst output,,$@) > $@
+     
+     is equivalent to
+
+          bigoutput : text.g
+                  generate text.g -big > bigoutput
+          littleoutput : text.g
+                  generate text.g -little > littleoutput
+
+     Here we assume the hypothetical program `generate' makes two types
+     of output, one if given `-big' and one if given `-little'.  *Note
+     Functions for String Substitution and Analysis: Text Functions,
+     for an explanation of the `subst' function.
+
+   Suppose you would like to vary the prerequisites according to the
+target, much as the variable `$@' allows you to vary the recipe.  You
+cannot do this with multiple targets in an ordinary rule, but you can
+do it with a "static pattern rule".  *Note Static Pattern Rules: Static
+Pattern.
+
+
+File: make.info,  Node: Multiple Rules,  Next: Static Pattern,  Prev: Multiple Targets,  Up: Rules
+
+4.11 Multiple Rules for One Target
+==================================
+
+One file can be the target of several rules.  All the prerequisites
+mentioned in all the rules are merged into one list of prerequisites for
+the target.  If the target is older than any prerequisite from any rule,
+the recipe is executed.
+
+   There can only be one recipe to be executed for a file.  If more than
+one rule gives a recipe for the same file, `make' uses the last one
+given and prints an error message.  (As a special case, if the file's
+name begins with a dot, no error message is printed.  This odd behavior
+is only for compatibility with other implementations of `make'... you
+should avoid using it).  Occasionally it is useful to have the same
+target invoke multiple recipes which are defined in different parts of
+your makefile; you can use "double-colon rules" (*note Double-Colon::)
+for this.
+
+   An extra rule with just prerequisites can be used to give a few extra
+prerequisites to many files at once.  For example, makefiles often have
+a variable, such as `objects', containing a list of all the compiler
+output files in the system being made.  An easy way to say that all of
+them must be recompiled if `config.h' changes is to write the following:
+
+     objects = foo.o bar.o
+     foo.o : defs.h
+     bar.o : defs.h test.h
+     $(objects) : config.h
+
+   This could be inserted or taken out without changing the rules that
+really specify how to make the object files, making it a convenient
+form to use if you wish to add the additional prerequisite
+intermittently.
+
+   Another wrinkle is that the additional prerequisites could be
+specified with a variable that you set with a command line argument to
+`make' (*note Overriding Variables: Overriding.).  For example,
+
+     extradeps=
+     $(objects) : $(extradeps)
+
+means that the command `make extradeps=foo.h' will consider `foo.h' as
+a prerequisite of each object file, but plain `make' will not.
+
+   If none of the explicit rules for a target has a recipe, then `make'
+searches for an applicable implicit rule to find one *note Using
+Implicit Rules: Implicit Rules.).
+
+
+File: make.info,  Node: Static Pattern,  Next: Double-Colon,  Prev: Multiple Rules,  Up: Rules
+
+4.12 Static Pattern Rules
+=========================
+
+"Static pattern rules" are rules which specify multiple targets and
+construct the prerequisite names for each target based on the target
+name.  They are more general than ordinary rules with multiple targets
+because the targets do not have to have identical prerequisites.  Their
+prerequisites must be _analogous_, but not necessarily _identical_.
+
+* Menu:
+
+* Static Usage::                The syntax of static pattern rules.
+* Static versus Implicit::      When are they better than implicit rules?
+
+
+File: make.info,  Node: Static Usage,  Next: Static versus Implicit,  Prev: Static Pattern,  Up: Static Pattern
+
+4.12.1 Syntax of Static Pattern Rules
+-------------------------------------
+
+Here is the syntax of a static pattern rule:
+
+     TARGETS ...: TARGET-PATTERN: PREREQ-PATTERNS ...
+             RECIPE
+             ...
+
+The TARGETS list specifies the targets that the rule applies to.  The
+targets can contain wildcard characters, just like the targets of
+ordinary rules (*note Using Wildcard Characters in File Names:
+Wildcards.).
+
+   The TARGET-PATTERN and PREREQ-PATTERNS say how to compute the
+prerequisites of each target.  Each target is matched against the
+TARGET-PATTERN to extract a part of the target name, called the "stem".
+This stem is substituted into each of the PREREQ-PATTERNS to make the
+prerequisite names (one from each PREREQ-PATTERN).
+
+   Each pattern normally contains the character `%' just once.  When the
+TARGET-PATTERN matches a target, the `%' can match any part of the
+target name; this part is called the "stem".  The rest of the pattern
+must match exactly.  For example, the target `foo.o' matches the
+pattern `%.o', with `foo' as the stem.  The targets `foo.c' and
+`foo.out' do not match that pattern.
+
+   The prerequisite names for each target are made by substituting the
+stem for the `%' in each prerequisite pattern.  For example, if one
+prerequisite pattern is `%.c', then substitution of the stem `foo'
+gives the prerequisite name `foo.c'.  It is legitimate to write a
+prerequisite pattern that does not contain `%'; then this prerequisite
+is the same for all targets.
+
+   `%' characters in pattern rules can be quoted with preceding
+backslashes (`\').  Backslashes that would otherwise quote `%'
+characters can be quoted with more backslashes.  Backslashes that quote
+`%' characters or other backslashes are removed from the pattern before
+it is compared to file names or has a stem substituted into it.
+Backslashes that are not in danger of quoting `%' characters go
+unmolested.  For example, the pattern `the\%weird\\%pattern\\' has
+`the%weird\' preceding the operative `%' character, and `pattern\\'
+following it.  The final two backslashes are left alone because they
+cannot affect any `%' character.
+
+   Here is an example, which compiles each of `foo.o' and `bar.o' from
+the corresponding `.c' file:
+
+     objects = foo.o bar.o
+
+     all: $(objects)
+
+     $(objects): %.o: %.c
+             $(CC) -c $(CFLAGS) $< -o $@
+
+Here `$<' is the automatic variable that holds the name of the
+prerequisite and `$@' is the automatic variable that holds the name of
+the target; see *note Automatic Variables::.
+
+   Each target specified must match the target pattern; a warning is
+issued for each target that does not.  If you have a list of files,
+only some of which will match the pattern, you can use the `filter'
+function to remove nonmatching file names (*note Functions for String
+Substitution and Analysis: Text Functions.):
+
+     files = foo.elc bar.o lose.o
+
+     $(filter %.o,$(files)): %.o: %.c
+             $(CC) -c $(CFLAGS) $< -o $@
+     $(filter %.elc,$(files)): %.elc: %.el
+             emacs -f batch-byte-compile $<
+
+In this example the result of `$(filter %.o,$(files))' is `bar.o
+lose.o', and the first static pattern rule causes each of these object
+files to be updated by compiling the corresponding C source file.  The
+result of `$(filter %.elc,$(files))' is `foo.elc', so that file is made
+from `foo.el'.
+
+   Another example shows how to use `$*' in static pattern rules: 
+
+     bigoutput littleoutput : %output : text.g
+             generate text.g -$* > $@
+
+When the `generate' command is run, `$*' will expand to the stem,
+either `big' or `little'.
+
+
+File: make.info,  Node: Static versus Implicit,  Prev: Static Usage,  Up: Static Pattern
+
+4.12.2 Static Pattern Rules versus Implicit Rules
+-------------------------------------------------
+
+A static pattern rule has much in common with an implicit rule defined
+as a pattern rule (*note Defining and Redefining Pattern Rules: Pattern
+Rules.).  Both have a pattern for the target and patterns for
+constructing the names of prerequisites.  The difference is in how
+`make' decides _when_ the rule applies.
+
+   An implicit rule _can_ apply to any target that matches its pattern,
+but it _does_ apply only when the target has no recipe otherwise
+specified, and only when the prerequisites can be found.  If more than
+one implicit rule appears applicable, only one applies; the choice
+depends on the order of rules.
+
+   By contrast, a static pattern rule applies to the precise list of
+targets that you specify in the rule.  It cannot apply to any other
+target and it invariably does apply to each of the targets specified.
+If two conflicting rules apply, and both have recipes, that's an error.
+
+   The static pattern rule can be better than an implicit rule for these
+reasons:
+
+   * You may wish to override the usual implicit rule for a few files
+     whose names cannot be categorized syntactically but can be given
+     in an explicit list.
+
+   * If you cannot be sure of the precise contents of the directories
+     you are using, you may not be sure which other irrelevant files
+     might lead `make' to use the wrong implicit rule.  The choice
+     might depend on the order in which the implicit rule search is
+     done.  With static pattern rules, there is no uncertainty: each
+     rule applies to precisely the targets specified.
+
+
+File: make.info,  Node: Double-Colon,  Next: Automatic Prerequisites,  Prev: Static Pattern,  Up: Rules
+
+4.13 Double-Colon Rules
+=======================
+
+"Double-colon" rules are explicit rules written with `::' instead of
+`:' after the target names.  They are handled differently from ordinary
+rules when the same target appears in more than one rule.  Pattern
+rules with double-colons have an entirely different meaning (*note
+Match-Anything Rules::).
+
+   When a target appears in multiple rules, all the rules must be the
+same type: all ordinary, or all double-colon.  If they are
+double-colon, each of them is independent of the others.  Each
+double-colon rule's recipe is executed if the target is older than any
+prerequisites of that rule.  If there are no prerequisites for that
+rule, its recipe is always executed (even if the target already
+exists).  This can result in executing none, any, or all of the
+double-colon rules.
+
+   Double-colon rules with the same target are in fact completely
+separate from one another.  Each double-colon rule is processed
+individually, just as rules with different targets are processed.
+
+   The double-colon rules for a target are executed in the order they
+appear in the makefile.  However, the cases where double-colon rules
+really make sense are those where the order of executing the recipes
+would not matter.
+
+   Double-colon rules are somewhat obscure and not often very useful;
+they provide a mechanism for cases in which the method used to update a
+target differs depending on which prerequisite files caused the update,
+and such cases are rare.
+
+   Each double-colon rule should specify a recipe; if it does not, an
+implicit rule will be used if one applies.  *Note Using Implicit Rules:
+Implicit Rules.
+
+
+File: make.info,  Node: Automatic Prerequisites,  Prev: Double-Colon,  Up: Rules
+
+4.14 Generating Prerequisites Automatically
+===========================================
+
+In the makefile for a program, many of the rules you need to write often
+say only that some object file depends on some header file.  For
+example, if `main.c' uses `defs.h' via an `#include', you would write:
+
+     main.o: defs.h
+
+You need this rule so that `make' knows that it must remake `main.o'
+whenever `defs.h' changes.  You can see that for a large program you
+would have to write dozens of such rules in your makefile.  And, you
+must always be very careful to update the makefile every time you add
+or remove an `#include'.  
+
+   To avoid this hassle, most modern C compilers can write these rules
+for you, by looking at the `#include' lines in the source files.
+Usually this is done with the `-M' option to the compiler.  For
+example, the command:
+
+     cc -M main.c
+
+generates the output:
+
+     main.o : main.c defs.h
+
+Thus you no longer have to write all those rules yourself.  The
+compiler will do it for you.
+
+   Note that such a prerequisite constitutes mentioning `main.o' in a
+makefile, so it can never be considered an intermediate file by implicit
+rule search.  This means that `make' won't ever remove the file after
+using it; *note Chains of Implicit Rules: Chained Rules.
+
+   With old `make' programs, it was traditional practice to use this
+compiler feature to generate prerequisites on demand with a command like
+`make depend'.  That command would create a file `depend' containing
+all the automatically-generated prerequisites; then the makefile could
+use `include' to read them in (*note Include::).
+
+   In GNU `make', the feature of remaking makefiles makes this practice
+obsolete--you need never tell `make' explicitly to regenerate the
+prerequisites, because it always regenerates any makefile that is out
+of date.  *Note Remaking Makefiles::.
+
+   The practice we recommend for automatic prerequisite generation is
+to have one makefile corresponding to each source file.  For each
+source file `NAME.c' there is a makefile `NAME.d' which lists what
+files the object file `NAME.o' depends on.  That way only the source
+files that have changed need to be rescanned to produce the new
+prerequisites.
+
+   Here is the pattern rule to generate a file of prerequisites (i.e.,
+a makefile) called `NAME.d' from a C source file called `NAME.c':
+
+     %.d: %.c
+             @set -e; rm -f $@; \
+              $(CC) -M $(CPPFLAGS) $< > $@.$$$$; \
+              sed 's,\($*\)\.o[ :]*,\1.o $@ : ,g' < $@.$$$$ > $@; \
+              rm -f $@.$$$$
+
+*Note Pattern Rules::, for information on defining pattern rules.  The
+`-e' flag to the shell causes it to exit immediately if the `$(CC)'
+command (or any other command) fails (exits with a nonzero status).  
+
+   With the GNU C compiler, you may wish to use the `-MM' flag instead
+of `-M'.  This omits prerequisites on system header files.  *Note
+Options Controlling the Preprocessor: (gcc.info)Preprocessor Options,
+for details.
+
+   The purpose of the `sed' command is to translate (for example):
+
+     main.o : main.c defs.h
+
+into:
+
+     main.o main.d : main.c defs.h
+
+This makes each `.d' file depend on all the source and header files
+that the corresponding `.o' file depends on.  `make' then knows it must
+regenerate the prerequisites whenever any of the source or header files
+changes.
+
+   Once you've defined the rule to remake the `.d' files, you then use
+the `include' directive to read them all in.  *Note Include::.  For
+example:
+
+     sources = foo.c bar.c
+
+     include $(sources:.c=.d)
+
+(This example uses a substitution variable reference to translate the
+list of source files `foo.c bar.c' into a list of prerequisite
+makefiles, `foo.d bar.d'.  *Note Substitution Refs::, for full
+information on substitution references.)  Since the `.d' files are
+makefiles like any others, `make' will remake them as necessary with no
+further work from you.  *Note Remaking Makefiles::.
+
+   Note that the `.d' files contain target definitions; you should be
+sure to place the `include' directive _after_ the first, default goal
+in your makefiles or run the risk of having a random object file become
+the default goal.  *Note How Make Works::.
+
+
+File: make.info,  Node: Recipes,  Next: Using Variables,  Prev: Rules,  Up: Top
+
+5 Writing Recipes in Rules
+**************************
+
+The recipe of a rule consists of one or more shell command lines to be
+executed, one at a time, in the order they appear.  Typically, the
+result of executing these commands is that the target of the rule is
+brought up to date.
+
+   Users use many different shell programs, but recipes in makefiles are
+always interpreted by `/bin/sh' unless the makefile specifies
+otherwise.  *Note Recipe Execution: Execution.
+
+* Menu:
+
+* Recipe Syntax::               Recipe syntax features and pitfalls.
+* Echoing::                     How to control when recipes are echoed.
+* Execution::                   How recipes are executed.
+* Parallel::                    How recipes can be executed in parallel.
+* Errors::                      What happens after a recipe execution error.
+* Interrupts::                  What happens when a recipe is interrupted.
+* Recursion::                   Invoking `make' from makefiles.
+* Canned Recipes::              Defining canned recipes.
+* Empty Recipes::               Defining useful, do-nothing recipes.
+
+
+File: make.info,  Node: Recipe Syntax,  Next: Echoing,  Prev: Recipes,  Up: Recipes
+
+5.1 Recipe Syntax
+=================
+
+Makefiles have the unusual property that there are really two distinct
+syntaxes in one file.  Most of the makefile uses `make' syntax (*note
+Writing Makefiles: Makefiles.).  However, recipes are meant to be
+interpreted by the shell and so they are written using shell syntax.
+The `make' program does not try to understand shell syntax: it performs
+only a very few specific translations on the content of the recipe
+before handing it to the shell.
+
+   Each line in the recipe must start with a tab (or the first character
+in the value of the `.RECIPEPREFIX' variable; *note Special
+Variables::), except that the first recipe line may be attached to the
+target-and-prerequisites line with a semicolon in between.  _Any_ line
+in the makefile that begins with a tab and appears in a "rule context"
+(that is, after a rule has been started until another rule or variable
+definition) will be considered part of a recipe for that rule.  Blank
+lines and lines of just comments may appear among the recipe lines;
+they are ignored.
+
+   Some consequences of these rules include:
+
+   * A blank line that begins with a tab is not blank: it's an empty
+     recipe (*note Empty Recipes::).
+
+   * A comment in a recipe is not a `make' comment; it will be passed
+     to the shell as-is.  Whether the shell treats it as a comment or
+     not depends on your shell.
+
+   * A variable definition in a "rule context" which is indented by a
+     tab as the first character on the line, will be considered part of
+     a recipe, not a `make' variable definition, and passed to the
+     shell.
+
+   * A conditional expression (`ifdef', `ifeq', etc. *note Syntax of
+     Conditionals: Conditional Syntax.) in a "rule context" which is
+     indented by a tab as the first character on the line, will be
+     considered part of a recipe and be passed to the shell.
+
+
+* Menu:
+
+* Splitting Lines::             Breaking long recipe lines for readability.
+* Variables in Recipes::        Using `make' variables in recipes.
+
+
+File: make.info,  Node: Splitting Lines,  Next: Variables in Recipes,  Prev: Recipe Syntax,  Up: Recipe Syntax
+
+5.1.1 Splitting Recipe Lines
+----------------------------
+
+One of the few ways in which `make' does interpret recipes is checking
+for a backslash just before the newline.  As in normal makefile syntax,
+a single logical recipe line can be split into multiple physical lines
+in the makefile by placing a backslash before each newline.  A sequence
+of lines like this is considered a single recipe line, and one instance
+of the shell will be invoked to run it.
+
+   However, in contrast to how they are treated in other places in a
+makefile, backslash-newline pairs are _not_ removed from the recipe.
+Both the backslash and the newline characters are preserved and passed
+to the shell.  How the backslash-newline is interpreted depends on your
+shell.  If the first character of the next line after the
+backslash-newline is the recipe prefix character (a tab by default;
+*note Special Variables::), then that character (and only that
+character) is removed.  Whitespace is never added to the recipe.
+
+   For example, the recipe for the all target in this makefile:
+
+     all :
+             @echo no\
+     space
+             @echo no\
+             space
+             @echo one \
+             space
+             @echo one\
+              space
+
+consists of four separate shell commands where the output is:
+
+     nospace
+     nospace
+     one space
+     one space
+
+   As a more complex example, this makefile:
+
+     all : ; @echo 'hello \
+             world' ; echo "hello \
+         world"
+
+will invoke one shell with a command of:
+
+     echo 'hello \
+     world' ; echo "hello \
+         world"
+
+which, according to shell quoting rules, will yield the following
+output:
+
+     hello \
+     world
+     hello     world
+
+Notice how the backslash/newline pair was removed inside the string
+quoted with double quotes (`"..."'), but not from the string quoted
+with single quotes (`'...'').  This is the way the default shell
+(`/bin/sh') handles backslash/newline pairs.  If you specify a
+different shell in your makefiles it may treat them differently.
+
+   Sometimes you want to split a long line inside of single quotes, but
+you don't want the backslash-newline to appear in the quoted content.
+This is often the case when passing scripts to languages such as Perl,
+where extraneous backslashes inside the script can change its meaning
+or even be a syntax error.  One simple way of handling this is to place
+the quoted string, or even the entire command, into a `make' variable
+then use the variable in the recipe.  In this situation the newline
+quoting rules for makefiles will be used, and the backslash-newline
+will be removed.  If we rewrite our example above using this method:
+
+     HELLO = 'hello \
+     world'
+
+     all : ; @echo $(HELLO)
+
+we will get output like this:
+
+     hello world
+
+   If you like, you can also use target-specific variables (*note
+Target-specific Variable Values: Target-specific.) to obtain a tighter
+correspondence between the variable and the recipe that uses it.
+
+
+File: make.info,  Node: Variables in Recipes,  Prev: Splitting Lines,  Up: Recipe Syntax
+
+5.1.2 Using Variables in Recipes
+--------------------------------
+
+The other way in which `make' processes recipes is by expanding any
+variable references in them (*note Basics of Variable References:
+Reference.).  This occurs after make has finished reading all the
+makefiles and the target is determined to be out of date; so, the
+recipes for targets which are not rebuilt are never expanded.
+
+   Variable and function references in recipes have identical syntax and
+semantics to references elsewhere in the makefile.  They also have the
+same quoting rules: if you want a dollar sign to appear in your recipe,
+you must double it (`$$').  For shells like the default shell, that use
+dollar signs to introduce variables, it's important to keep clear in
+your mind whether the variable you want to reference is a `make'
+variable (use a single dollar sign) or a shell variable (use two dollar
+signs).  For example:
+
+     LIST = one two three
+     all:
+             for i in $(LIST); do \
+                 echo $$i; \
+             done
+
+results in the following command being passed to the shell:
+
+     for i in one two three; do \
+         echo $i; \
+     done
+
+which generates the expected result:
+
+     one
+     two
+     three
+
+
+File: make.info,  Node: Echoing,  Next: Execution,  Prev: Recipe Syntax,  Up: Recipes
+
+5.2 Recipe Echoing
+==================
+
+Normally `make' prints each line of the recipe before it is executed.
+We call this "echoing" because it gives the appearance that you are
+typing the lines yourself.
+
+   When a line starts with `@', the echoing of that line is suppressed.
+The `@' is discarded before the line is passed to the shell.  Typically
+you would use this for a command whose only effect is to print
+something, such as an `echo' command to indicate progress through the
+makefile:
+
+     @echo About to make distribution files
+
+   When `make' is given the flag `-n' or `--just-print' it only echoes
+most recipes, without executing them.  *Note Summary of Options:
+Options Summary.  In this case even the recipe lines starting with `@'
+are printed.  This flag is useful for finding out which recipes `make'
+thinks are necessary without actually doing them.
+
+   The `-s' or `--silent' flag to `make' prevents all echoing, as if
+all recipes started with `@'.  A rule in the makefile for the special
+target `.SILENT' without prerequisites has the same effect (*note
+Special Built-in Target Names: Special Targets.).  `.SILENT' is
+essentially obsolete since `@' is more flexible.
+
+
+File: make.info,  Node: Execution,  Next: Parallel,  Prev: Echoing,  Up: Recipes
+
+5.3 Recipe Execution
+====================
+
+When it is time to execute recipes to update a target, they are
+executed by invoking a new subshell for each line of the recipe, unless
+the `.ONESHELL' special target is in effect (*note Using One Shell: One
+Shell.)  (In practice, `make' may take shortcuts that do not affect the
+results.)
+
+   *Please note:* this implies that setting shell variables and
+invoking shell commands such as `cd' that set a context local to each
+process will not affect the following lines in the recipe.(1)  If you
+want to use `cd' to affect the next statement, put both statements in a
+single recipe line.  Then `make' will invoke one shell to run the
+entire line, and the shell will execute the statements in sequence.
+For example:
+
+     foo : bar/lose
+             cd $(@D) && gobble $(@F) > ../$@
+
+Here we use the shell AND operator (`&&') so that if the `cd' command
+fails, the script will fail without trying to invoke the `gobble'
+command in the wrong directory, which could cause problems (in this
+case it would certainly cause `../foo' to be truncated, at least).
+
+* Menu:
+
+* One Shell::                   One shell for all lines in a recipe
+* Choosing the Shell::          How `make' chooses the shell used
+                                  to run recipes.
+
+   ---------- Footnotes ----------
+
+   (1) On MS-DOS, the value of current working directory is *global*, so
+changing it _will_ affect the following recipe lines on those systems.
+
+
+File: make.info,  Node: One Shell,  Next: Choosing the Shell,  Prev: Execution,  Up: Execution
+
+5.3.1 Using One Shell
+---------------------
+
+Sometimes you would prefer that all the lines in the recipe be passed
+to a single invocation of the shell.  There are generally two
+situations where this is useful: first, it can improve performance in
+makefiles where recipes consist of many command lines, by avoiding
+extra processes.  Second, you might want newlines to be included in
+your recipe command (for example perhaps you are using a very different
+interpreter as your `SHELL').  If the `.ONESHELL' special target
+appears anywhere in the makefile then _all_ recipe lines for each
+target will be provided to a single invocation of the shell.  Newlines
+between recipe lines will be preserved.  For example:
+
+     .ONESHELL:
+     foo : bar/lose
+             cd $(@D)
+             gobble $(@F) > ../$@
+
+would now work as expected even though the commands are on different
+recipe lines.
+
+   If `.ONESHELL' is provided, then only the first line of the recipe
+will be checked for the special prefix characters (`@', `-', and `+').
+Subsequent lines will include the special characters in the recipe line
+when the `SHELL' is invoked.  If you want your recipe to start with one
+of these special characters you'll need to arrange for them to not be
+the first characters on the first line, perhaps by adding a comment or
+similar.  For example, this would be a syntax error in Perl because the
+first `@' is removed by make:
+
+     .ONESHELL:
+     SHELL = /usr/bin/perl
+     .SHELLFLAGS = -e
+     show :
+             @f = qw(a b c);
+             print "@f\n";
+
+However, either of these alternatives would work properly:
+
+     .ONESHELL:
+     SHELL = /usr/bin/perl
+     .SHELLFLAGS = -e
+     show :
+             # Make sure "@" is not the first character on the first line
+             @f = qw(a b c);
+             print "@f\n";
+
+or
+
+     .ONESHELL:
+     SHELL = /usr/bin/perl
+     .SHELLFLAGS = -e
+     show :
+             my @f = qw(a b c);
+             print "@f\n";
+
+   As a special feature, if `SHELL' is determined to be a POSIX-style
+shell, the special prefix characters in "internal" recipe lines will
+_removed_ before the recipe is processed.  This feature is intended to
+allow existing makefiles to add the `.ONESHELL' special target and
+still run properly without extensive modifications.  Since the special
+prefix characters are not legal at the beginning of a line in a POSIX
+shell script this is not a loss in functionality.  For example, this
+works as expected:
+
+     .ONESHELL:
+     foo : bar/lose
+             @cd $(@D)
+             @gobble $(@F) > ../$@
+
+   Even with this special feature, however, makefiles with `.ONESHELL'
+will behave differently in ways that could be noticeable.  For example,
+normally if any line in the recipe fails, that causes the rule to fail
+and no more recipe lines are processed.  Under `.ONESHELL' a failure of
+any but the final recipe line will not be noticed by `make'.  You can
+modify `.SHELLFLAGS' to add the `-e' option to the shell which will
+cause any failure anywhere in the command line to cause the shell to
+fail, but this could itself cause your recipe to behave differently.
+Ultimately you may need to harden your recipe lines to allow them to
+work with `.ONESHELL'.
+
+
+File: make.info,  Node: Choosing the Shell,  Prev: One Shell,  Up: Execution
+
+5.3.2 Choosing the Shell
+------------------------
+
+The program used as the shell is taken from the variable `SHELL'.  If
+this variable is not set in your makefile, the program `/bin/sh' is
+used as the shell.  The argument(s) passed to the shell are taken from
+the variable `.SHELLFLAGS'.  The default value of `.SHELLFLAGS' is `-c'
+normally, or `-ec' in POSIX-conforming mode.
+
+   Unlike most variables, the variable `SHELL' is never set from the
+environment.  This is because the `SHELL' environment variable is used
+to specify your personal choice of shell program for interactive use.
+It would be very bad for personal choices like this to affect the
+functioning of makefiles.  *Note Variables from the Environment:
+Environment.
+
+   Furthermore, when you do set `SHELL' in your makefile that value is
+_not_ exported in the environment to recipe lines that `make' invokes.
+Instead, the value inherited from the user's environment, if any, is
+exported.  You can override this behavior by explicitly exporting
+`SHELL' (*note Communicating Variables to a Sub-`make':
+Variables/Recursion.), forcing it to be passed in the environment to
+recipe lines.
+
+   However, on MS-DOS and MS-Windows the value of `SHELL' in the
+environment *is* used, since on those systems most users do not set
+this variable, and therefore it is most likely set specifically to be
+used by `make'.  On MS-DOS, if the setting of `SHELL' is not suitable
+for `make', you can set the variable `MAKESHELL' to the shell that
+`make' should use; if set it will be used as the shell instead of the
+value of `SHELL'.
+
+Choosing a Shell in DOS and Windows
+...................................
+
+Choosing a shell in MS-DOS and MS-Windows is much more complex than on
+other systems.
+
+   On MS-DOS, if `SHELL' is not set, the value of the variable
+`COMSPEC' (which is always set) is used instead.
+
+   The processing of lines that set the variable `SHELL' in Makefiles
+is different on MS-DOS.  The stock shell, `command.com', is
+ridiculously limited in its functionality and many users of `make' tend
+to install a replacement shell.  Therefore, on MS-DOS, `make' examines
+the value of `SHELL', and changes its behavior based on whether it
+points to a Unix-style or DOS-style shell.  This allows reasonable
+functionality even if `SHELL' points to `command.com'.
+
+   If `SHELL' points to a Unix-style shell, `make' on MS-DOS
+additionally checks whether that shell can indeed be found; if not, it
+ignores the line that sets `SHELL'.  In MS-DOS, GNU `make' searches for
+the shell in the following places:
+
+  1. In the precise place pointed to by the value of `SHELL'.  For
+     example, if the makefile specifies `SHELL = /bin/sh', `make' will
+     look in the directory `/bin' on the current drive.
+
+  2. In the current directory.
+
+  3. In each of the directories in the `PATH' variable, in order.
+
+
+   In every directory it examines, `make' will first look for the
+specific file (`sh' in the example above).  If this is not found, it
+will also look in that directory for that file with one of the known
+extensions which identify executable files.  For example `.exe',
+`.com', `.bat', `.btm', `.sh', and some others.
+
+   If any of these attempts is successful, the value of `SHELL' will be
+set to the full pathname of the shell as found.  However, if none of
+these is found, the value of `SHELL' will not be changed, and thus the
+line that sets it will be effectively ignored.  This is so `make' will
+only support features specific to a Unix-style shell if such a shell is
+actually installed on the system where `make' runs.
+
+   Note that this extended search for the shell is limited to the cases
+where `SHELL' is set from the Makefile; if it is set in the environment
+or command line, you are expected to set it to the full pathname of the
+shell, exactly as things are on Unix.
+
+   The effect of the above DOS-specific processing is that a Makefile
+that contains `SHELL = /bin/sh' (as many Unix makefiles do), will work
+on MS-DOS unaltered if you have e.g. `sh.exe' installed in some
+directory along your `PATH'.
+
+
+File: make.info,  Node: Parallel,  Next: Errors,  Prev: Execution,  Up: Recipes
+
+5.4 Parallel Execution
+======================
+
+GNU `make' knows how to execute several recipes at once.  Normally,
+`make' will execute only one recipe at a time, waiting for it to finish
+before executing the next.  However, the `-j' or `--jobs' option tells
+`make' to execute many recipes simultaneously.  You can inhibit
+parallelism in a particular makefile with the `.NOTPARALLEL'
+pseudo-target (*note Special Built-in Target Names: Special Targets.).
+
+   On MS-DOS, the `-j' option has no effect, since that system doesn't
+support multi-processing.
+
+   If the `-j' option is followed by an integer, this is the number of
+recipes to execute at once; this is called the number of "job slots".
+If there is nothing looking like an integer after the `-j' option,
+there is no limit on the number of job slots.  The default number of job
+slots is one, which means serial execution (one thing at a time).
+
+   One unpleasant consequence of running several recipes simultaneously
+is that output generated by the recipes appears whenever each recipe
+sends it, so messages from different recipes may be interspersed.
+
+   Another problem is that two processes cannot both take input from the
+same device; so to make sure that only one recipe tries to take input
+from the terminal at once, `make' will invalidate the standard input
+streams of all but one running recipe.  This means that attempting to
+read from standard input will usually be a fatal error (a `Broken pipe'
+signal) for most child processes if there are several.  
+
+   It is unpredictable which recipe will have a valid standard input
+stream (which will come from the terminal, or wherever you redirect the
+standard input of `make').  The first recipe run will always get it
+first, and the first recipe started after that one finishes will get it
+next, and so on.
+
+   We will change how this aspect of `make' works if we find a better
+alternative.  In the mean time, you should not rely on any recipe using
+standard input at all if you are using the parallel execution feature;
+but if you are not using this feature, then standard input works
+normally in all recipes.
+
+   Finally, handling recursive `make' invocations raises issues.  For
+more information on this, see *note Communicating Options to a
+Sub-`make': Options/Recursion.
+
+   If a recipe fails (is killed by a signal or exits with a nonzero
+status), and errors are not ignored for that recipe (*note Errors in
+Recipes: Errors.), the remaining recipe lines to remake the same target
+will not be run.  If a recipe fails and the `-k' or `--keep-going'
+option was not given (*note Summary of Options: Options Summary.),
+`make' aborts execution.  If make terminates for any reason (including
+a signal) with child processes running, it waits for them to finish
+before actually exiting.
+
+   When the system is heavily loaded, you will probably want to run
+fewer jobs than when it is lightly loaded.  You can use the `-l' option
+to tell `make' to limit the number of jobs to run at once, based on the
+load average.  The `-l' or `--max-load' option is followed by a
+floating-point number.  For example,
+
+     -l 2.5
+
+will not let `make' start more than one job if the load average is
+above 2.5.  The `-l' option with no following number removes the load
+limit, if one was given with a previous `-l' option.
+
+   More precisely, when `make' goes to start up a job, and it already
+has at least one job running, it checks the current load average; if it
+is not lower than the limit given with `-l', `make' waits until the load
+average goes below that limit, or until all the other jobs finish.
+
+   By default, there is no load limit.
+
+
+File: make.info,  Node: Errors,  Next: Interrupts,  Prev: Parallel,  Up: Recipes
+
+5.5 Errors in Recipes
+=====================
+
+After each shell invocation returns, `make' looks at its exit status.
+If the shell completed successfully (the exit status is zero), the next
+line in the recipe is executed in a new shell; after the last line is
+finished, the rule is finished.
+
+   If there is an error (the exit status is nonzero), `make' gives up on
+the current rule, and perhaps on all rules.
+
+   Sometimes the failure of a certain recipe line does not indicate a
+problem.  For example, you may use the `mkdir' command to ensure that a
+directory exists.  If the directory already exists, `mkdir' will report
+an error, but you probably want `make' to continue regardless.
+
+   To ignore errors in a recipe line, write a `-' at the beginning of
+the line's text (after the initial tab).  The `-' is discarded before
+the line is passed to the shell for execution.
+
+   For example,
+
+     clean:
+             -rm -f *.o
+
+This causes `make' to continue even if `rm' is unable to remove a file.
+
+   When you run `make' with the `-i' or `--ignore-errors' flag, errors
+are ignored in all recipes of all rules.  A rule in the makefile for
+the special target `.IGNORE' has the same effect, if there are no
+prerequisites.  These ways of ignoring errors are obsolete because `-'
+is more flexible.
+
+   When errors are to be ignored, because of either a `-' or the `-i'
+flag, `make' treats an error return just like success, except that it
+prints out a message that tells you the status code the shell exited
+with, and says that the error has been ignored.
+
+   When an error happens that `make' has not been told to ignore, it
+implies that the current target cannot be correctly remade, and neither
+can any other that depends on it either directly or indirectly.  No
+further recipes will be executed for these targets, since their
+preconditions have not been achieved.
+
+   Normally `make' gives up immediately in this circumstance, returning
+a nonzero status.  However, if the `-k' or `--keep-going' flag is
+specified, `make' continues to consider the other prerequisites of the
+pending targets, remaking them if necessary, before it gives up and
+returns nonzero status.  For example, after an error in compiling one
+object file, `make -k' will continue compiling other object files even
+though it already knows that linking them will be impossible.  *Note
+Summary of Options: Options Summary.
+
+   The usual behavior assumes that your purpose is to get the specified
+targets up to date; once `make' learns that this is impossible, it
+might as well report the failure immediately.  The `-k' option says
+that the real purpose is to test as many of the changes made in the
+program as possible, perhaps to find several independent problems so
+that you can correct them all before the next attempt to compile.  This
+is why Emacs' `compile' command passes the `-k' flag by default.  
+
+   Usually when a recipe line fails, if it has changed the target file
+at all, the file is corrupted and cannot be used--or at least it is not
+completely updated.  Yet the file's time stamp says that it is now up to
+date, so the next time `make' runs, it will not try to update that
+file.  The situation is just the same as when the shell is killed by a
+signal; *note Interrupts::.  So generally the right thing to do is to
+delete the target file if the recipe fails after beginning to change
+the file.  `make' will do this if `.DELETE_ON_ERROR' appears as a
+target.  This is almost always what you want `make' to do, but it is
+not historical practice; so for compatibility, you must explicitly
+request it.
+
+
+File: make.info,  Node: Interrupts,  Next: Recursion,  Prev: Errors,  Up: Recipes
+
+5.6 Interrupting or Killing `make'
+==================================
+
+If `make' gets a fatal signal while a shell is executing, it may delete
+the target file that the recipe was supposed to update.  This is done
+if the target file's last-modification time has changed since `make'
+first checked it.
+
+   The purpose of deleting the target is to make sure that it is remade
+from scratch when `make' is next run.  Why is this?  Suppose you type
+`Ctrl-c' while a compiler is running, and it has begun to write an
+object file `foo.o'.  The `Ctrl-c' kills the compiler, resulting in an
+incomplete file whose last-modification time is newer than the source
+file `foo.c'.  But `make' also receives the `Ctrl-c' signal and deletes
+this incomplete file.  If `make' did not do this, the next invocation
+of `make' would think that `foo.o' did not require updating--resulting
+in a strange error message from the linker when it tries to link an
+object file half of which is missing.
+
+   You can prevent the deletion of a target file in this way by making
+the special target `.PRECIOUS' depend on it.  Before remaking a target,
+`make' checks to see whether it appears on the prerequisites of
+`.PRECIOUS', and thereby decides whether the target should be deleted
+if a signal happens.  Some reasons why you might do this are that the
+target is updated in some atomic fashion, or exists only to record a
+modification-time (its contents do not matter), or must exist at all
+times to prevent other sorts of trouble.
+
+
+File: make.info,  Node: Recursion,  Next: Canned Recipes,  Prev: Interrupts,  Up: Recipes
+
+5.7 Recursive Use of `make'
+===========================
+
+Recursive use of `make' means using `make' as a command in a makefile.
+This technique is useful when you want separate makefiles for various
+subsystems that compose a larger system.  For example, suppose you have
+a subdirectory `subdir' which has its own makefile, and you would like
+the containing directory's makefile to run `make' on the subdirectory.
+You can do it by writing this:
+
+     subsystem:
+             cd subdir && $(MAKE)
+
+or, equivalently, this (*note Summary of Options: Options Summary.):
+
+     subsystem:
+             $(MAKE) -C subdir
+   
+   You can write recursive `make' commands just by copying this example,
+but there are many things to know about how they work and why, and about
+how the sub-`make' relates to the top-level `make'.  You may also find
+it useful to declare targets that invoke recursive `make' commands as
+`.PHONY' (for more discussion on when this is useful, see *note Phony
+Targets::).
+
+   For your convenience, when GNU `make' starts (after it has processed
+any `-C' options) it sets the variable `CURDIR' to the pathname of the
+current working directory.  This value is never touched by `make'
+again: in particular note that if you include files from other
+directories the value of `CURDIR' does not change.  The value has the
+same precedence it would have if it were set in the makefile (by
+default, an environment variable `CURDIR' will not override this
+value).  Note that setting this variable has no impact on the operation
+of `make' (it does not cause `make' to change its working directory,
+for example).
+
+* Menu:
+
+* MAKE Variable::               The special effects of using `$(MAKE)'.
+* Variables/Recursion::         How to communicate variables to a sub-`make'.
+* Options/Recursion::           How to communicate options to a sub-`make'.
+* -w Option::                   How the `-w' or `--print-directory' option
+                                  helps debug use of recursive `make' commands.
+
+
+File: make.info,  Node: MAKE Variable,  Next: Variables/Recursion,  Prev: Recursion,  Up: Recursion
+
+5.7.1 How the `MAKE' Variable Works
+-----------------------------------
+
+Recursive `make' commands should always use the variable `MAKE', not
+the explicit command name `make', as shown here:
+
+     subsystem:
+             cd subdir && $(MAKE)
+
+   The value of this variable is the file name with which `make' was
+invoked.  If this file name was `/bin/make', then the recipe executed
+is `cd subdir && /bin/make'.  If you use a special version of `make' to
+run the top-level makefile, the same special version will be executed
+for recursive invocations.  
+
+   As a special feature, using the variable `MAKE' in the recipe of a
+rule alters the effects of the `-t' (`--touch'), `-n' (`--just-print'),
+or `-q' (`--question') option.  Using the `MAKE' variable has the same
+effect as using a `+' character at the beginning of the recipe line.
+*Note Instead of Executing the Recipes: Instead of Execution.  This
+special feature is only enabled if the `MAKE' variable appears directly
+in the recipe: it does not apply if the `MAKE' variable is referenced
+through expansion of another variable.  In the latter case you must use
+the `+' token to get these special effects.
+
+   Consider the command `make -t' in the above example.  (The `-t'
+option marks targets as up to date without actually running any
+recipes; see *note Instead of Execution::.)  Following the usual
+definition of `-t', a `make -t' command in the example would create a
+file named `subsystem' and do nothing else.  What you really want it to
+do is run `cd subdir && make -t'; but that would require executing the
+recipe, and `-t' says not to execute recipes.  
+
+   The special feature makes this do what you want: whenever a recipe
+line of a rule contains the variable `MAKE', the flags `-t', `-n' and
+`-q' do not apply to that line.  Recipe lines containing `MAKE' are
+executed normally despite the presence of a flag that causes most
+recipes not to be run.  The usual `MAKEFLAGS' mechanism passes the
+flags to the sub-`make' (*note Communicating Options to a Sub-`make':
+Options/Recursion.), so your request to touch the files, or print the
+recipes, is propagated to the subsystem.
+
+
+File: make.info,  Node: Variables/Recursion,  Next: Options/Recursion,  Prev: MAKE Variable,  Up: Recursion
+
+5.7.2 Communicating Variables to a Sub-`make'
+---------------------------------------------
+
+Variable values of the top-level `make' can be passed to the sub-`make'
+through the environment by explicit request.  These variables are
+defined in the sub-`make' as defaults, but do not override what is
+specified in the makefile used by the sub-`make' makefile unless you
+use the `-e' switch (*note Summary of Options: Options Summary.).
+
+   To pass down, or "export", a variable, `make' adds the variable and
+its value to the environment for running each line of the recipe.  The
+sub-`make', in turn, uses the environment to initialize its table of
+variable values.  *Note Variables from the Environment: Environment.
+
+   Except by explicit request, `make' exports a variable only if it is
+either defined in the environment initially or set on the command line,
+and if its name consists only of letters, numbers, and underscores.
+Some shells cannot cope with environment variable names consisting of
+characters other than letters, numbers, and underscores.
+
+   The value of the `make' variable `SHELL' is not exported.  Instead,
+the value of the `SHELL' variable from the invoking environment is
+passed to the sub-`make'.  You can force `make' to export its value for
+`SHELL' by using the `export' directive, described below.  *Note
+Choosing the Shell::.
+
+   The special variable `MAKEFLAGS' is always exported (unless you
+unexport it).  `MAKEFILES' is exported if you set it to anything.
+
+   `make' automatically passes down variable values that were defined
+on the command line, by putting them in the `MAKEFLAGS' variable.
+*Note Options/Recursion::.
+
+   Variables are _not_ normally passed down if they were created by
+default by `make' (*note Variables Used by Implicit Rules: Implicit
+Variables.).  The sub-`make' will define these for itself.
+
+   If you want to export specific variables to a sub-`make', use the
+`export' directive, like this:
+
+     export VARIABLE ...
+
+If you want to _prevent_ a variable from being exported, use the
+`unexport' directive, like this:
+
+     unexport VARIABLE ...
+
+In both of these forms, the arguments to `export' and `unexport' are
+expanded, and so could be variables or functions which expand to a
+(list of) variable names to be (un)exported.
+
+   As a convenience, you can define a variable and export it at the same
+time by doing:
+
+     export VARIABLE = value
+
+has the same result as:
+
+     VARIABLE = value
+     export VARIABLE
+
+and
+
+     export VARIABLE := value
+
+has the same result as:
+
+     VARIABLE := value
+     export VARIABLE
+
+   Likewise,
+
+     export VARIABLE += value
+
+is just like:
+
+     VARIABLE += value
+     export VARIABLE
+
+*Note Appending More Text to Variables: Appending.
+
+   You may notice that the `export' and `unexport' directives work in
+`make' in the same way they work in the shell, `sh'.
+
+   If you want all variables to be exported by default, you can use
+`export' by itself:
+
+     export
+
+This tells `make' that variables which are not explicitly mentioned in
+an `export' or `unexport' directive should be exported.  Any variable
+given in an `unexport' directive will still _not_ be exported.  If you
+use `export' by itself to export variables by default, variables whose
+names contain characters other than alphanumerics and underscores will
+not be exported unless specifically mentioned in an `export' directive.
+
+   The behavior elicited by an `export' directive by itself was the
+default in older versions of GNU `make'.  If your makefiles depend on
+this behavior and you want to be compatible with old versions of
+`make', you can write a rule for the special target
+`.EXPORT_ALL_VARIABLES' instead of using the `export' directive.  This
+will be ignored by old `make's, while the `export' directive will cause
+a syntax error.  
+
+   Likewise, you can use `unexport' by itself to tell `make' _not_ to
+export variables by default.  Since this is the default behavior, you
+would only need to do this if `export' had been used by itself earlier
+(in an included makefile, perhaps).  You *cannot* use `export' and
+`unexport' by themselves to have variables exported for some recipes
+and not for others.  The last `export' or `unexport' directive that
+appears by itself determines the behavior for the entire run of `make'.
+
+   As a special feature, the variable `MAKELEVEL' is changed when it is
+passed down from level to level.  This variable's value is a string
+which is the depth of the level as a decimal number.  The value is `0'
+for the top-level `make'; `1' for a sub-`make', `2' for a
+sub-sub-`make', and so on.  The incrementation happens when `make' sets
+up the environment for a recipe.
+
+   The main use of `MAKELEVEL' is to test it in a conditional directive
+(*note Conditional Parts of Makefiles: Conditionals.); this way you can
+write a makefile that behaves one way if run recursively and another
+way if run directly by you.
+
+   You can use the variable `MAKEFILES' to cause all sub-`make'
+commands to use additional makefiles.  The value of `MAKEFILES' is a
+whitespace-separated list of file names.  This variable, if defined in
+the outer-level makefile, is passed down through the environment; then
+it serves as a list of extra makefiles for the sub-`make' to read
+before the usual or specified ones.  *Note The Variable `MAKEFILES':
+MAKEFILES Variable.
+
+
+File: make.info,  Node: Options/Recursion,  Next: -w Option,  Prev: Variables/Recursion,  Up: Recursion
+
+5.7.3 Communicating Options to a Sub-`make'
+-------------------------------------------
+
+Flags such as `-s' and `-k' are passed automatically to the sub-`make'
+through the variable `MAKEFLAGS'.  This variable is set up
+automatically by `make' to contain the flag letters that `make'
+received.  Thus, if you do `make -ks' then `MAKEFLAGS' gets the value
+`ks'.
+
+   As a consequence, every sub-`make' gets a value for `MAKEFLAGS' in
+its environment.  In response, it takes the flags from that value and
+processes them as if they had been given as arguments.  *Note Summary
+of Options: Options Summary.
+
+   Likewise variables defined on the command line are passed to the
+sub-`make' through `MAKEFLAGS'.  Words in the value of `MAKEFLAGS' that
+contain `=', `make' treats as variable definitions just as if they
+appeared on the command line.  *Note Overriding Variables: Overriding.
+
+   The options `-C', `-f', `-o', and `-W' are not put into `MAKEFLAGS';
+these options are not passed down.
+
+   The `-j' option is a special case (*note Parallel Execution:
+Parallel.).  If you set it to some numeric value `N' and your operating
+system supports it (most any UNIX system will; others typically won't),
+the parent `make' and all the sub-`make's will communicate to ensure
+that there are only `N' jobs running at the same time between them all.
+Note that any job that is marked recursive (*note Instead of Executing
+Recipes: Instead of Execution.)  doesn't count against the total jobs
+(otherwise we could get `N' sub-`make's running and have no slots left
+over for any real work!)
+
+   If your operating system doesn't support the above communication,
+then `-j 1' is always put into `MAKEFLAGS' instead of the value you
+specified.  This is because if the `-j' option were passed down to
+sub-`make's, you would get many more jobs running in parallel than you
+asked for.  If you give `-j' with no numeric argument, meaning to run
+as many jobs as possible in parallel, this is passed down, since
+multiple infinities are no more than one.
+
+   If you do not want to pass the other flags down, you must change the
+value of `MAKEFLAGS', like this:
+
+     subsystem:
+             cd subdir && $(MAKE) MAKEFLAGS=
+
+   The command line variable definitions really appear in the variable
+`MAKEOVERRIDES', and `MAKEFLAGS' contains a reference to this variable.
+If you do want to pass flags down normally, but don't want to pass down
+the command line variable definitions, you can reset `MAKEOVERRIDES' to
+empty, like this:
+
+     MAKEOVERRIDES =
+
+This is not usually useful to do.  However, some systems have a small
+fixed limit on the size of the environment, and putting so much
+information into the value of `MAKEFLAGS' can exceed it.  If you see
+the error message `Arg list too long', this may be the problem.  (For
+strict compliance with POSIX.2, changing `MAKEOVERRIDES' does not
+affect `MAKEFLAGS' if the special target `.POSIX' appears in the
+makefile.  You probably do not care about this.)
+
+   A similar variable `MFLAGS' exists also, for historical
+compatibility.  It has the same value as `MAKEFLAGS' except that it
+does not contain the command line variable definitions, and it always
+begins with a hyphen unless it is empty (`MAKEFLAGS' begins with a
+hyphen only when it begins with an option that has no single-letter
+version, such as `--warn-undefined-variables').  `MFLAGS' was
+traditionally used explicitly in the recursive `make' command, like
+this:
+
+     subsystem:
+             cd subdir && $(MAKE) $(MFLAGS)
+
+but now `MAKEFLAGS' makes this usage redundant.  If you want your
+makefiles to be compatible with old `make' programs, use this
+technique; it will work fine with more modern `make' versions too.
+
+   The `MAKEFLAGS' variable can also be useful if you want to have
+certain options, such as `-k' (*note Summary of Options: Options
+Summary.), set each time you run `make'.  You simply put a value for
+`MAKEFLAGS' in your environment.  You can also set `MAKEFLAGS' in a
+makefile, to specify additional flags that should also be in effect for
+that makefile.  (Note that you cannot use `MFLAGS' this way.  That
+variable is set only for compatibility; `make' does not interpret a
+value you set for it in any way.)
+
+   When `make' interprets the value of `MAKEFLAGS' (either from the
+environment or from a makefile), it first prepends a hyphen if the value
+does not already begin with one.  Then it chops the value into words
+separated by blanks, and parses these words as if they were options
+given on the command line (except that `-C', `-f', `-h', `-o', `-W',
+and their long-named versions are ignored; and there is no error for an
+invalid option).
+
+   If you do put `MAKEFLAGS' in your environment, you should be sure not
+to include any options that will drastically affect the actions of
+`make' and undermine the purpose of makefiles and of `make' itself.
+For instance, the `-t', `-n', and `-q' options, if put in one of these
+variables, could have disastrous consequences and would certainly have
+at least surprising and probably annoying effects.
+
+
+File: make.info,  Node: -w Option,  Prev: Options/Recursion,  Up: Recursion
+
+5.7.4 The `--print-directory' Option
+------------------------------------
+
+If you use several levels of recursive `make' invocations, the `-w' or
+`--print-directory' option can make the output a lot easier to
+understand by showing each directory as `make' starts processing it and
+as `make' finishes processing it.  For example, if `make -w' is run in
+the directory `/u/gnu/make', `make' will print a line of the form:
+
+     make: Entering directory `/u/gnu/make'.
+
+before doing anything else, and a line of the form:
+
+     make: Leaving directory `/u/gnu/make'.
+
+when processing is completed.
+
+   Normally, you do not need to specify this option because `make' does
+it for you: `-w' is turned on automatically when you use the `-C'
+option, and in sub-`make's.  `make' will not automatically turn on `-w'
+if you also use `-s', which says to be silent, or if you use
+`--no-print-directory' to explicitly disable it.
+
+
+File: make.info,  Node: Canned Recipes,  Next: Empty Recipes,  Prev: Recursion,  Up: Recipes
+
+5.8 Defining Canned Recipes
+===========================
+
+When the same sequence of commands is useful in making various targets,
+you can define it as a canned sequence with the `define' directive, and
+refer to the canned sequence from the recipes for those targets.  The
+canned sequence is actually a variable, so the name must not conflict
+with other variable names.
+
+   Here is an example of defining a canned recipe:
+
+     define run-yacc =
+     yacc $(firstword $^)
+     mv y.tab.c $@
+     endef
+   
+Here `run-yacc' is the name of the variable being defined; `endef'
+marks the end of the definition; the lines in between are the commands.
+The `define' directive does not expand variable references and function
+calls in the canned sequence; the `$' characters, parentheses, variable
+names, and so on, all become part of the value of the variable you are
+defining.  *Note Defining Multi-Line Variables: Multi-Line, for a
+complete explanation of `define'.
+
+   The first command in this example runs Yacc on the first
+prerequisite of whichever rule uses the canned sequence.  The output
+file from Yacc is always named `y.tab.c'.  The second command moves the
+output to the rule's target file name.
+
+   To use the canned sequence, substitute the variable into the recipe
+of a rule.  You can substitute it like any other variable (*note Basics
+of Variable References: Reference.).  Because variables defined by
+`define' are recursively expanded variables, all the variable
+references you wrote inside the `define' are expanded now.  For example:
+
+     foo.c : foo.y
+             $(run-yacc)
+
+`foo.y' will be substituted for the variable `$^' when it occurs in
+`run-yacc''s value, and `foo.c' for `$@'.
+
+   This is a realistic example, but this particular one is not needed in
+practice because `make' has an implicit rule to figure out these
+commands based on the file names involved (*note Using Implicit Rules:
+Implicit Rules.).
+
+   In recipe execution, each line of a canned sequence is treated just
+as if the line appeared on its own in the rule, preceded by a tab.  In
+particular, `make' invokes a separate subshell for each line.  You can
+use the special prefix characters that affect command lines (`@', `-',
+and `+') on each line of a canned sequence.  *Note Writing Recipes in
+Rules: Recipes.  For example, using this canned sequence:
+
+     define frobnicate =
+     @echo "frobnicating target $@"
+     frob-step-1 $< -o $@-step-1
+     frob-step-2 $@-step-1 -o $@
+     endef
+
+`make' will not echo the first line, the `echo' command.  But it _will_
+echo the following two recipe lines.
+
+   On the other hand, prefix characters on the recipe line that refers
+to a canned sequence apply to every line in the sequence.  So the rule:
+
+     frob.out: frob.in
+             @$(frobnicate)
+
+does not echo _any_ recipe lines.  (*Note Recipe Echoing: Echoing, for
+a full explanation of `@'.)
+
+
+File: make.info,  Node: Empty Recipes,  Prev: Canned Recipes,  Up: Recipes
+
+5.9 Using Empty Recipes
+=======================
+
+It is sometimes useful to define recipes which do nothing.  This is done
+simply by giving a recipe that consists of nothing but whitespace.  For
+example:
+
+     target: ;
+
+defines an empty recipe for `target'.  You could also use a line
+beginning with a recipe prefix character to define an empty recipe, but
+this would be confusing because such a line looks empty.
+
+   You may be wondering why you would want to define a recipe that does
+nothing.  The only reason this is useful is to prevent a target from
+getting implicit recipes (from implicit rules or the `.DEFAULT' special
+target; *note Implicit Rules:: and *note Defining Last-Resort Default
+Rules: Last Resort.).
+
+   You may be inclined to define empty recipes for targets that are not
+actual files, but only exist so that their prerequisites can be remade.
+However, this is not the best way to do that, because the prerequisites
+may not be remade properly if the target file actually does exist.
+*Note Phony Targets: Phony Targets, for a better way to do this.
+
+
+File: make.info,  Node: Using Variables,  Next: Conditionals,  Prev: Recipes,  Up: Top
+
+6 How to Use Variables
+**********************
+
+A "variable" is a name defined in a makefile to represent a string of
+text, called the variable's "value".  These values are substituted by
+explicit request into targets, prerequisites, recipes, and other parts
+of the makefile.  (In some other versions of `make', variables are
+called "macros".)  
+
+   Variables and functions in all parts of a makefile are expanded when
+read, except for in recipes, the right-hand sides of variable
+definitions using `=', and the bodies of variable definitions using the
+`define' directive.
+
+   Variables can represent lists of file names, options to pass to
+compilers, programs to run, directories to look in for source files,
+directories to write output in, or anything else you can imagine.
+
+   A variable name may be any sequence of characters not containing `:',
+`#', `=', or leading or trailing whitespace.  However, variable names
+containing characters other than letters, numbers, and underscores
+should be avoided, as they may be given special meanings in the future,
+and with some shells they cannot be passed through the environment to a
+sub-`make' (*note Communicating Variables to a Sub-`make':
+Variables/Recursion.).
+
+   Variable names are case-sensitive.  The names `foo', `FOO', and
+`Foo' all refer to different variables.
+
+   It is traditional to use upper case letters in variable names, but we
+recommend using lower case letters for variable names that serve
+internal purposes in the makefile, and reserving upper case for
+parameters that control implicit rules or for parameters that the user
+should override with command options (*note Overriding Variables:
+Overriding.).
+
+   A few variables have names that are a single punctuation character or
+just a few characters.  These are the "automatic variables", and they
+have particular specialized uses.  *Note Automatic Variables::.
+
+* Menu:
+
+* Reference::                   How to use the value of a variable.
+* Flavors::                     Variables come in two flavors.
+* Advanced::                    Advanced features for referencing a variable.
+* Values::                      All the ways variables get their values.
+* Setting::                     How to set a variable in the makefile.
+* Appending::                   How to append more text to the old value
+                                  of a variable.
+* Override Directive::          How to set a variable in the makefile even if
+                                  the user has set it with a command argument.
+* Multi-Line::                  An alternate way to set a variable
+                                  to a multi-line string.
+* Undefine Directive::          How to undefine a variable so that it appears
+                                  as if it was never set.
+* Environment::                 Variable values can come from the environment.
+* Target-specific::             Variable values can be defined on a per-target
+                                  basis.
+* Pattern-specific::            Target-specific variable values can be applied
+                                  to a group of targets that match a pattern.
+* Suppressing Inheritance::     Suppress inheritance of variables.
+* Special Variables::           Variables with special meaning or behavior.
+
+
+File: make.info,  Node: Reference,  Next: Flavors,  Prev: Using Variables,  Up: Using Variables
+
+6.1 Basics of Variable References
+=================================
+
+To substitute a variable's value, write a dollar sign followed by the
+name of the variable in parentheses or braces: either `$(foo)' or
+`${foo}' is a valid reference to the variable `foo'.  This special
+significance of `$' is why you must write `$$' to have the effect of a
+single dollar sign in a file name or recipe.
+
+   Variable references can be used in any context: targets,
+prerequisites, recipes, most directives, and new variable values.  Here
+is an example of a common case, where a variable holds the names of all
+the object files in a program:
+
+     objects = program.o foo.o utils.o
+     program : $(objects)
+             cc -o program $(objects)
+
+     $(objects) : defs.h
+
+   Variable references work by strict textual substitution.  Thus, the
+rule
+
+     foo = c
+     prog.o : prog.$(foo)
+             $(foo)$(foo) -$(foo) prog.$(foo)
+
+could be used to compile a C program `prog.c'.  Since spaces before the
+variable value are ignored in variable assignments, the value of `foo'
+is precisely `c'.  (Don't actually write your makefiles this way!)
+
+   A dollar sign followed by a character other than a dollar sign,
+open-parenthesis or open-brace treats that single character as the
+variable name.  Thus, you could reference the variable `x' with `$x'.
+However, this practice is strongly discouraged, except in the case of
+the automatic variables (*note Automatic Variables::).
+
+
+File: make.info,  Node: Flavors,  Next: Advanced,  Prev: Reference,  Up: Using Variables
+
+6.2 The Two Flavors of Variables
+================================
+
+There are two ways that a variable in GNU `make' can have a value; we
+call them the two "flavors" of variables.  The two flavors are
+distinguished in how they are defined and in what they do when expanded.
+
+   The first flavor of variable is a "recursively expanded" variable.
+Variables of this sort are defined by lines using `=' (*note Setting
+Variables: Setting.) or by the `define' directive (*note Defining
+Multi-Line Variables: Multi-Line.).  The value you specify is installed
+verbatim; if it contains references to other variables, these
+references are expanded whenever this variable is substituted (in the
+course of expanding some other string).  When this happens, it is
+called "recursive expansion".
+
+   For example,
+
+     foo = $(bar)
+     bar = $(ugh)
+     ugh = Huh?
+
+     all:;echo $(foo)
+
+will echo `Huh?': `$(foo)' expands to `$(bar)' which expands to
+`$(ugh)' which finally expands to `Huh?'.
+
+   This flavor of variable is the only sort supported by other versions
+of `make'.  It has its advantages and its disadvantages.  An advantage
+(most would say) is that:
+
+     CFLAGS = $(include_dirs) -O
+     include_dirs = -Ifoo -Ibar
+
+will do what was intended: when `CFLAGS' is expanded in a recipe, it
+will expand to `-Ifoo -Ibar -O'.  A major disadvantage is that you
+cannot append something on the end of a variable, as in
+
+     CFLAGS = $(CFLAGS) -O
+
+because it will cause an infinite loop in the variable expansion.
+(Actually `make' detects the infinite loop and reports an error.)  
+
+   Another disadvantage is that any functions (*note Functions for
+Transforming Text: Functions.)  referenced in the definition will be
+executed every time the variable is expanded.  This makes `make' run
+slower; worse, it causes the `wildcard' and `shell' functions to give
+unpredictable results because you cannot easily control when they are
+called, or even how many times.
+
+   To avoid all the problems and inconveniences of recursively expanded
+variables, there is another flavor: simply expanded variables.
+
+   "Simply expanded variables" are defined by lines using `:=' (*note
+Setting Variables: Setting.).  The value of a simply expanded variable
+is scanned once and for all, expanding any references to other
+variables and functions, when the variable is defined.  The actual
+value of the simply expanded variable is the result of expanding the
+text that you write.  It does not contain any references to other
+variables; it contains their values _as of the time this variable was
+defined_.  Therefore,
+
+     x := foo
+     y := $(x) bar
+     x := later
+
+is equivalent to
+
+     y := foo bar
+     x := later
+
+   When a simply expanded variable is referenced, its value is
+substituted verbatim.
+
+   Here is a somewhat more complicated example, illustrating the use of
+`:=' in conjunction with the `shell' function.  (*Note The `shell'
+Function: Shell Function.)  This example also shows use of the variable
+`MAKELEVEL', which is changed when it is passed down from level to
+level.  (*Note Communicating Variables to a Sub-`make':
+Variables/Recursion, for information about `MAKELEVEL'.)
+
+     ifeq (0,${MAKELEVEL})
+     whoami    := $(shell whoami)
+     host-type := $(shell arch)
+     MAKE := ${MAKE} host-type=${host-type} whoami=${whoami}
+     endif
+
+An advantage of this use of `:=' is that a typical `descend into a
+directory' recipe then looks like this:
+
+     ${subdirs}:
+             ${MAKE} -C $@ all
+
+   Simply expanded variables generally make complicated makefile
+programming more predictable because they work like variables in most
+programming languages.  They allow you to redefine a variable using its
+own value (or its value processed in some way by one of the expansion
+functions) and to use the expansion functions much more efficiently
+(*note Functions for Transforming Text: Functions.).
+
+   You can also use them to introduce controlled leading whitespace into
+variable values.  Leading whitespace characters are discarded from your
+input before substitution of variable references and function calls;
+this means you can include leading spaces in a variable value by
+protecting them with variable references, like this:
+
+     nullstring :=
+     space := $(nullstring) # end of the line
+
+Here the value of the variable `space' is precisely one space.  The
+comment `# end of the line' is included here just for clarity.  Since
+trailing space characters are _not_ stripped from variable values, just
+a space at the end of the line would have the same effect (but be
+rather hard to read).  If you put whitespace at the end of a variable
+value, it is a good idea to put a comment like that at the end of the
+line to make your intent clear.  Conversely, if you do _not_ want any
+whitespace characters at the end of your variable value, you must
+remember not to put a random comment on the end of the line after some
+whitespace, such as this:
+
+     dir := /foo/bar    # directory to put the frobs in
+
+Here the value of the variable `dir' is `/foo/bar    ' (with four
+trailing spaces), which was probably not the intention.  (Imagine
+something like `$(dir)/file' with this definition!)
+
+   There is another assignment operator for variables, `?='.  This is
+called a conditional variable assignment operator, because it only has
+an effect if the variable is not yet defined.  This statement:
+
+     FOO ?= bar
+
+is exactly equivalent to this (*note The `origin' Function: Origin
+Function.):
+
+     ifeq ($(origin FOO), undefined)
+       FOO = bar
+     endif
+
+   Note that a variable set to an empty value is still defined, so `?='
+will not set that variable.
+
+
+File: make.info,  Node: Advanced,  Next: Values,  Prev: Flavors,  Up: Using Variables
+
+6.3 Advanced Features for Reference to Variables
+================================================
+
+This section describes some advanced features you can use to reference
+variables in more flexible ways.
+
+* Menu:
+
+* Substitution Refs::           Referencing a variable with
+                                  substitutions on the value.
+* Computed Names::              Computing the name of the variable to refer to.
+
+
+File: make.info,  Node: Substitution Refs,  Next: Computed Names,  Prev: Advanced,  Up: Advanced
+
+6.3.1 Substitution References
+-----------------------------
+
+A "substitution reference" substitutes the value of a variable with
+alterations that you specify.  It has the form `$(VAR:A=B)' (or
+`${VAR:A=B}') and its meaning is to take the value of the variable VAR,
+replace every A at the end of a word with B in that value, and
+substitute the resulting string.
+
+   When we say "at the end of a word", we mean that A must appear
+either followed by whitespace or at the end of the value in order to be
+replaced; other occurrences of A in the value are unaltered.  For
+example:
+
+     foo := a.o b.o c.o
+     bar := $(foo:.o=.c)
+
+sets `bar' to `a.c b.c c.c'.  *Note Setting Variables: Setting.
+
+   A substitution reference is actually an abbreviation for use of the
+`patsubst' expansion function (*note Functions for String Substitution
+and Analysis: Text Functions.).  We provide substitution references as
+well as `patsubst' for compatibility with other implementations of
+`make'.
+
+   Another type of substitution reference lets you use the full power of
+the `patsubst' function.  It has the same form `$(VAR:A=B)' described
+above, except that now A must contain a single `%' character.  This
+case is equivalent to `$(patsubst A,B,$(VAR))'.  *Note Functions for
+String Substitution and Analysis: Text Functions, for a description of
+the `patsubst' function.
+
+For example:
+
+     foo := a.o b.o c.o
+     bar := $(foo:%.o=%.c)
+
+sets `bar' to `a.c b.c c.c'.
+
+
+File: make.info,  Node: Computed Names,  Prev: Substitution Refs,  Up: Advanced
+
+6.3.2 Computed Variable Names
+-----------------------------
+
+Computed variable names are a complicated concept needed only for
+sophisticated makefile programming.  For most purposes you need not
+consider them, except to know that making a variable with a dollar sign
+in its name might have strange results.  However, if you are the type
+that wants to understand everything, or you are actually interested in
+what they do, read on.
+
+   Variables may be referenced inside the name of a variable.  This is
+called a "computed variable name" or a "nested variable reference".
+For example,
+
+     x = y
+     y = z
+     a := $($(x))
+
+defines `a' as `z': the `$(x)' inside `$($(x))' expands to `y', so
+`$($(x))' expands to `$(y)' which in turn expands to `z'.  Here the
+name of the variable to reference is not stated explicitly; it is
+computed by expansion of `$(x)'.  The reference `$(x)' here is nested
+within the outer variable reference.
+
+   The previous example shows two levels of nesting, but any number of
+levels is possible.  For example, here are three levels:
+
+     x = y
+     y = z
+     z = u
+     a := $($($(x)))
+
+Here the innermost `$(x)' expands to `y', so `$($(x))' expands to
+`$(y)' which in turn expands to `z'; now we have `$(z)', which becomes
+`u'.
+
+   References to recursively-expanded variables within a variable name
+are reexpanded in the usual fashion.  For example:
+
+     x = $(y)
+     y = z
+     z = Hello
+     a := $($(x))
+
+defines `a' as `Hello': `$($(x))' becomes `$($(y))' which becomes
+`$(z)' which becomes `Hello'.
+
+   Nested variable references can also contain modified references and
+function invocations (*note Functions for Transforming Text:
+Functions.), just like any other reference.  For example, using the
+`subst' function (*note Functions for String Substitution and Analysis:
+Text Functions.):
+
+     x = variable1
+     variable2 := Hello
+     y = $(subst 1,2,$(x))
+     z = y
+     a := $($($(z)))
+
+eventually defines `a' as `Hello'.  It is doubtful that anyone would
+ever want to write a nested reference as convoluted as this one, but it
+works: `$($($(z)))' expands to `$($(y))' which becomes `$($(subst
+1,2,$(x)))'.  This gets the value `variable1' from `x' and changes it
+by substitution to `variable2', so that the entire string becomes
+`$(variable2)', a simple variable reference whose value is `Hello'.
+
+   A computed variable name need not consist entirely of a single
+variable reference.  It can contain several variable references, as
+well as some invariant text.  For example,
+
+     a_dirs := dira dirb
+     1_dirs := dir1 dir2
+
+     a_files := filea fileb
+     1_files := file1 file2
+
+     ifeq "$(use_a)" "yes"
+     a1 := a
+     else
+     a1 := 1
+     endif
+
+     ifeq "$(use_dirs)" "yes"
+     df := dirs
+     else
+     df := files
+     endif
+
+     dirs := $($(a1)_$(df))
+
+will give `dirs' the same value as `a_dirs', `1_dirs', `a_files' or
+`1_files' depending on the settings of `use_a' and `use_dirs'.
+
+   Computed variable names can also be used in substitution references:
+
+     a_objects := a.o b.o c.o
+     1_objects := 1.o 2.o 3.o
+
+     sources := $($(a1)_objects:.o=.c)
+
+defines `sources' as either `a.c b.c c.c' or `1.c 2.c 3.c', depending
+on the value of `a1'.
+
+   The only restriction on this sort of use of nested variable
+references is that they cannot specify part of the name of a function
+to be called.  This is because the test for a recognized function name
+is done before the expansion of nested references.  For example,
+
+     ifdef do_sort
+     func := sort
+     else
+     func := strip
+     endif
+
+     bar := a d b g q c
+
+     foo := $($(func) $(bar))
+
+attempts to give `foo' the value of the variable `sort a d b g q c' or
+`strip a d b g q c', rather than giving `a d b g q c' as the argument
+to either the `sort' or the `strip' function.  This restriction could
+be removed in the future if that change is shown to be a good idea.
+
+   You can also use computed variable names in the left-hand side of a
+variable assignment, or in a `define' directive, as in:
+
+     dir = foo
+     $(dir)_sources := $(wildcard $(dir)/*.c)
+     define $(dir)_print =
+     lpr $($(dir)_sources)
+     endef
+
+This example defines the variables `dir', `foo_sources', and
+`foo_print'.
+
+   Note that "nested variable references" are quite different from
+"recursively expanded variables" (*note The Two Flavors of Variables:
+Flavors.), though both are used together in complex ways when doing
+makefile programming.
+
+
+File: make.info,  Node: Values,  Next: Setting,  Prev: Advanced,  Up: Using Variables
+
+6.4 How Variables Get Their Values
+==================================
+
+Variables can get values in several different ways:
+
+   * You can specify an overriding value when you run `make'.  *Note
+     Overriding Variables: Overriding.
+
+   * You can specify a value in the makefile, either with an assignment
+     (*note Setting Variables: Setting.) or with a verbatim definition
+     (*note Defining Multi-Line Variables: Multi-Line.).
+
+   * Variables in the environment become `make' variables.  *Note
+     Variables from the Environment: Environment.
+
+   * Several "automatic" variables are given new values for each rule.
+     Each of these has a single conventional use.  *Note Automatic
+     Variables::.
+
+   * Several variables have constant initial values.  *Note Variables
+     Used by Implicit Rules: Implicit Variables.
+
+
+File: make.info,  Node: Setting,  Next: Appending,  Prev: Values,  Up: Using Variables
+
+6.5 Setting Variables
+=====================
+
+To set a variable from the makefile, write a line starting with the
+variable name followed by `=' or `:='.  Whatever follows the `=' or
+`:=' on the line becomes the value.  For example,
+
+     objects = main.o foo.o bar.o utils.o
+
+defines a variable named `objects'.  Whitespace around the variable
+name and immediately after the `=' is ignored.
+
+   Variables defined with `=' are "recursively expanded" variables.
+Variables defined with `:=' are "simply expanded" variables; these
+definitions can contain variable references which will be expanded
+before the definition is made.  *Note The Two Flavors of Variables:
+Flavors.
+
+   The variable name may contain function and variable references, which
+are expanded when the line is read to find the actual variable name to
+use.
+
+   There is no limit on the length of the value of a variable except the
+amount of swapping space on the computer.  When a variable definition is
+long, it is a good idea to break it into several lines by inserting
+backslash-newline at convenient places in the definition.  This will not
+affect the functioning of `make', but it will make the makefile easier
+to read.
+
+   Most variable names are considered to have the empty string as a
+value if you have never set them.  Several variables have built-in
+initial values that are not empty, but you can set them in the usual
+ways (*note Variables Used by Implicit Rules: Implicit Variables.).
+Several special variables are set automatically to a new value for each
+rule; these are called the "automatic" variables (*note Automatic
+Variables::).
+
+   If you'd like a variable to be set to a value only if it's not
+already set, then you can use the shorthand operator `?=' instead of
+`='.  These two settings of the variable `FOO' are identical (*note The
+`origin' Function: Origin Function.):
+
+     FOO ?= bar
+
+and
+
+     ifeq ($(origin FOO), undefined)
+     FOO = bar
+     endif
+
+
+File: make.info,  Node: Appending,  Next: Override Directive,  Prev: Setting,  Up: Using Variables
+
+6.6 Appending More Text to Variables
+====================================
+
+Often it is useful to add more text to the value of a variable already
+defined.  You do this with a line containing `+=', like this:
+
+     objects += another.o
+
+This takes the value of the variable `objects', and adds the text
+`another.o' to it (preceded by a single space).  Thus:
+
+     objects = main.o foo.o bar.o utils.o
+     objects += another.o
+
+sets `objects' to `main.o foo.o bar.o utils.o another.o'.
+
+   Using `+=' is similar to:
+
+     objects = main.o foo.o bar.o utils.o
+     objects := $(objects) another.o
+
+but differs in ways that become important when you use more complex
+values.
+
+   When the variable in question has not been defined before, `+=' acts
+just like normal `=': it defines a recursively-expanded variable.
+However, when there _is_ a previous definition, exactly what `+=' does
+depends on what flavor of variable you defined originally.  *Note The
+Two Flavors of Variables: Flavors, for an explanation of the two
+flavors of variables.
+
+   When you add to a variable's value with `+=', `make' acts
+essentially as if you had included the extra text in the initial
+definition of the variable.  If you defined it first with `:=', making
+it a simply-expanded variable, `+=' adds to that simply-expanded
+definition, and expands the new text before appending it to the old
+value just as `:=' does (see *note Setting Variables: Setting, for a
+full explanation of `:=').  In fact,
+
+     variable := value
+     variable += more
+
+is exactly equivalent to:
+
+
+     variable := value
+     variable := $(variable) more
+
+   On the other hand, when you use `+=' with a variable that you defined
+first to be recursively-expanded using plain `=', `make' does something
+a bit different.  Recall that when you define a recursively-expanded
+variable, `make' does not expand the value you set for variable and
+function references immediately.  Instead it stores the text verbatim,
+and saves these variable and function references to be expanded later,
+when you refer to the new variable (*note The Two Flavors of Variables:
+Flavors.).  When you use `+=' on a recursively-expanded variable, it is
+this unexpanded text to which `make' appends the new text you specify.
+
+     variable = value
+     variable += more
+
+is roughly equivalent to:
+
+     temp = value
+     variable = $(temp) more
+
+except that of course it never defines a variable called `temp'.  The
+importance of this comes when the variable's old value contains
+variable references.  Take this common example:
+
+     CFLAGS = $(includes) -O
+     ...
+     CFLAGS += -pg # enable profiling
+
+The first line defines the `CFLAGS' variable with a reference to another
+variable, `includes'.  (`CFLAGS' is used by the rules for C
+compilation; *note Catalogue of Implicit Rules: Catalogue of Rules.)
+Using `=' for the definition makes `CFLAGS' a recursively-expanded
+variable, meaning `$(includes) -O' is _not_ expanded when `make'
+processes the definition of `CFLAGS'.  Thus, `includes' need not be
+defined yet for its value to take effect.  It only has to be defined
+before any reference to `CFLAGS'.  If we tried to append to the value
+of `CFLAGS' without using `+=', we might do it like this:
+
+     CFLAGS := $(CFLAGS) -pg # enable profiling
+
+This is pretty close, but not quite what we want.  Using `:=' redefines
+`CFLAGS' as a simply-expanded variable; this means `make' expands the
+text `$(CFLAGS) -pg' before setting the variable.  If `includes' is not
+yet defined, we get ` -O -pg', and a later definition of `includes'
+will have no effect.  Conversely, by using `+=' we set `CFLAGS' to the
+_unexpanded_ value `$(includes) -O -pg'.  Thus we preserve the
+reference to `includes', so if that variable gets defined at any later
+point, a reference like `$(CFLAGS)' still uses its value.
+
+
+File: make.info,  Node: Override Directive,  Next: Multi-Line,  Prev: Appending,  Up: Using Variables
+
+6.7 The `override' Directive
+============================
+
+If a variable has been set with a command argument (*note Overriding
+Variables: Overriding.), then ordinary assignments in the makefile are
+ignored.  If you want to set the variable in the makefile even though
+it was set with a command argument, you can use an `override'
+directive, which is a line that looks like this:
+
+     override VARIABLE = VALUE
+
+or
+
+     override VARIABLE := VALUE
+
+   To append more text to a variable defined on the command line, use:
+
+     override VARIABLE += MORE TEXT
+
+*Note Appending More Text to Variables: Appending.
+
+   Variable assignments marked with the `override' flag have a higher
+priority than all other assignments, except another `override'.
+Subsequent assignments or appends to this variable which are not marked
+`override' will be ignored.
+
+   The `override' directive was not invented for escalation in the war
+between makefiles and command arguments.  It was invented so you can
+alter and add to values that the user specifies with command arguments.
+
+   For example, suppose you always want the `-g' switch when you run the
+C compiler, but you would like to allow the user to specify the other
+switches with a command argument just as usual.  You could use this
+`override' directive:
+
+     override CFLAGS += -g
+
+   You can also use `override' directives with `define' directives.
+This is done as you might expect:
+
+     override define foo =
+     bar
+     endef
+
+*Note Defining Multi-Line Variables: Multi-Line.
+
+
+File: make.info,  Node: Multi-Line,  Next: Undefine Directive,  Prev: Override Directive,  Up: Using Variables
+
+6.8 Defining Multi-Line Variables
+=================================
+
+Another way to set the value of a variable is to use the `define'
+directive.  This directive has an unusual syntax which allows newline
+characters to be included in the value, which is convenient for
+defining both canned sequences of commands (*note Defining Canned
+Recipes: Canned Recipes.), and also sections of makefile syntax to use
+with `eval' (*note Eval Function::).
+
+   The `define' directive is followed on the same line by the name of
+the variable being defined and an (optional) assignment operator, and
+nothing more.  The value to give the variable appears on the following
+lines.  The end of the value is marked by a line containing just the
+word `endef'.  Aside from this difference in syntax, `define' works
+just like any other variable definition.  The variable name may contain
+function and variable references, which are expanded when the directive
+is read to find the actual variable name to use.
+
+   You may omit the variable assignment operator if you prefer.  If
+omitted, `make' assumes it to be `=' and creates a recursively-expanded
+variable (*note The Two Flavors of Variables: Flavors.).  When using a
+`+=' operator, the value is appended to the previous value as with any
+other append operation: with a single space separating the old and new
+values.
+
+   You may nest `define' directives: `make' will keep track of nested
+directives and report an error if they are not all properly closed with
+`endef'.  Note that lines beginning with the recipe prefix character
+are considered part of a recipe, so any `define' or `endef' strings
+appearing on such a line will not be considered `make' directives.
+
+     define two-lines =
+     echo foo
+     echo $(bar)
+     endef
+
+   The value in an ordinary assignment cannot contain a newline; but the
+newlines that separate the lines of the value in a `define' become part
+of the variable's value (except for the final newline which precedes
+the `endef' and is not considered part of the value).
+
+   When used in a recipe, the previous example is functionally
+equivalent to this:
+
+     two-lines = echo foo; echo $(bar)
+
+since two commands separated by semicolon behave much like two separate
+shell commands.  However, note that using two separate lines means
+`make' will invoke the shell twice, running an independent subshell for
+each line.  *Note Recipe Execution: Execution.
+
+   If you want variable definitions made with `define' to take
+precedence over command-line variable definitions, you can use the
+`override' directive together with `define':
+
+     override define two-lines =
+     foo
+     $(bar)
+     endef
+
+*Note The `override' Directive: Override Directive.
+
+
+File: make.info,  Node: Undefine Directive,  Next: Environment,  Prev: Multi-Line,  Up: Using Variables
+
+6.9 Undefining Variables
+========================
+
+If you want to clear a variable, setting its value to empty is usually
+sufficient. Expanding such a variable will yield the same result (empty
+string) regardless of whether it was set or not. However, if you are
+using the `flavor' (*note Flavor Function::) and `origin' (*note Origin
+Function::) functions, there is a difference between a variable that
+was never set and a variable with an empty value.  In such situations
+you may want to use the `undefine' directive to make a variable appear
+as if it was never set. For example:
+
+     foo := foo
+     bar = bar
+
+     undefine foo
+     undefine bar
+
+     $(info $(origin foo))
+     $(info $(flavor bar))
+
+   This example will print "undefined" for both variables.
+
+   If you want to undefine a command-line variable definition, you can
+use the `override' directive together with `undefine', similar to how
+this is done for variable definitions:
+
+     override undefine CFLAGS
+
+
+File: make.info,  Node: Environment,  Next: Target-specific,  Prev: Undefine Directive,  Up: Using Variables
+
+6.10 Variables from the Environment
+===================================
+
+Variables in `make' can come from the environment in which `make' is
+run.  Every environment variable that `make' sees when it starts up is
+transformed into a `make' variable with the same name and value.
+However, an explicit assignment in the makefile, or with a command
+argument, overrides the environment.  (If the `-e' flag is specified,
+then values from the environment override assignments in the makefile.
+*Note Summary of Options: Options Summary.  But this is not recommended
+practice.)
+
+   Thus, by setting the variable `CFLAGS' in your environment, you can
+cause all C compilations in most makefiles to use the compiler switches
+you prefer.  This is safe for variables with standard or conventional
+meanings because you know that no makefile will use them for other
+things.  (Note this is not totally reliable; some makefiles set
+`CFLAGS' explicitly and therefore are not affected by the value in the
+environment.)
+
+   When `make' runs a recipe, variables defined in the makefile are
+placed into the environment of each shell.  This allows you to pass
+values to sub-`make' invocations (*note Recursive Use of `make':
+Recursion.).  By default, only variables that came from the environment
+or the command line are passed to recursive invocations.  You can use
+the `export' directive to pass other variables.  *Note Communicating
+Variables to a Sub-`make': Variables/Recursion, for full details.
+
+   Other use of variables from the environment is not recommended.  It
+is not wise for makefiles to depend for their functioning on
+environment variables set up outside their control, since this would
+cause different users to get different results from the same makefile.
+This is against the whole purpose of most makefiles.
+
+   Such problems would be especially likely with the variable `SHELL',
+which is normally present in the environment to specify the user's
+choice of interactive shell.  It would be very undesirable for this
+choice to affect `make'; so, `make' handles the `SHELL' environment
+variable in a special way; see *note Choosing the Shell::.
+
+
+File: make.info,  Node: Target-specific,  Next: Pattern-specific,  Prev: Environment,  Up: Using Variables
+
+6.11 Target-specific Variable Values
+====================================
+
+Variable values in `make' are usually global; that is, they are the
+same regardless of where they are evaluated (unless they're reset, of
+course).  One exception to that is automatic variables (*note Automatic
+Variables::).
+
+   The other exception is "target-specific variable values".  This
+feature allows you to define different values for the same variable,
+based on the target that `make' is currently building.  As with
+automatic variables, these values are only available within the context
+of a target's recipe (and in other target-specific assignments).
+
+   Set a target-specific variable value like this:
+
+     TARGET ... : VARIABLE-ASSIGNMENT
+
+   Target-specific variable assignments can be prefixed with any or all
+of the special keywords `export', `override', or `private'; these apply
+their normal behavior to this instance of the variable only.
+
+   Multiple TARGET values create a target-specific variable value for
+each member of the target list individually.
+
+   The VARIABLE-ASSIGNMENT can be any valid form of assignment;
+recursive (`='), static (`:='), appending (`+='), or conditional
+(`?=').  All variables that appear within the VARIABLE-ASSIGNMENT are
+evaluated within the context of the target: thus, any
+previously-defined target-specific variable values will be in effect.
+Note that this variable is actually distinct from any "global" value:
+the two variables do not have to have the same flavor (recursive vs.
+static).
+
+   Target-specific variables have the same priority as any other
+makefile variable.  Variables provided on the command line (and in the
+environment if the `-e' option is in force) will take precedence.
+Specifying the `override' directive will allow the target-specific
+variable value to be preferred.
+
+   There is one more special feature of target-specific variables: when
+you define a target-specific variable that variable value is also in
+effect for all prerequisites of this target, and all their
+prerequisites, etc. (unless those prerequisites override that variable
+with their own target-specific variable value).  So, for example, a
+statement like this:
+
+     prog : CFLAGS = -g
+     prog : prog.o foo.o bar.o
+
+will set `CFLAGS' to `-g' in the recipe for `prog', but it will also
+set `CFLAGS' to `-g' in the recipes that create `prog.o', `foo.o', and
+`bar.o', and any recipes which create their prerequisites.
+
+   Be aware that a given prerequisite will only be built once per
+invocation of make, at most.  If the same file is a prerequisite of
+multiple targets, and each of those targets has a different value for
+the same target-specific variable, then the first target to be built
+will cause that prerequisite to be built and the prerequisite will
+inherit the target-specific value from the first target.  It will
+ignore the target-specific values from any other targets.
+
+
+File: make.info,  Node: Pattern-specific,  Next: Suppressing Inheritance,  Prev: Target-specific,  Up: Using Variables
+
+6.12 Pattern-specific Variable Values
+=====================================
+
+In addition to target-specific variable values (*note Target-specific
+Variable Values: Target-specific.), GNU `make' supports
+pattern-specific variable values.  In this form, the variable is
+defined for any target that matches the pattern specified.
+
+   Set a pattern-specific variable value like this:
+
+     PATTERN ... : VARIABLE-ASSIGNMENT
+   where PATTERN is a %-pattern.  As with target-specific variable
+values, multiple PATTERN values create a pattern-specific variable
+value for each pattern individually.  The VARIABLE-ASSIGNMENT can be
+any valid form of assignment.  Any command line variable setting will
+take precedence, unless `override' is specified.
+
+   For example:
+
+     %.o : CFLAGS = -O
+
+will assign `CFLAGS' the value of `-O' for all targets matching the
+pattern `%.o'.
+
+   If a target matches more than one pattern, the matching
+pattern-specific variables with longer stems are interpreted first.
+This results in more specific variables taking precedence over the more
+generic ones, for example:
+
+     %.o: %.c
+             $(CC) -c $(CFLAGS) $(CPPFLAGS) $< -o $@
+
+     lib/%.o: CFLAGS := -fPIC -g
+     %.o: CFLAGS := -g
+
+     all: foo.o lib/bar.o
+
+   In this example the first definition of the `CFLAGS' variable will
+be used to update `lib/bar.o' even though the second one also applies
+to this target. Pattern-specific variables which result in the same
+stem length are considered in the order in which they were defined in
+the makefile.
+
+   Pattern-specific variables are searched after any target-specific
+variables defined explicitly for that target, and before target-specific
+variables defined for the parent target.
+
+
+File: make.info,  Node: Suppressing Inheritance,  Next: Special Variables,  Prev: Pattern-specific,  Up: Using Variables
+
+6.13 Suppressing Inheritance
+============================
+
+As described in previous sections, `make' variables are inherited by
+prerequisites.  This capability allows you to modify the behavior of a
+prerequisite based on which targets caused it to be rebuilt.  For
+example, you might set a target-specific variable on a `debug' target,
+then running `make debug' will cause that variable to be inherited by
+all prerequisites of `debug', while just running `make all' (for
+example) would not have that assignment.
+
+   Sometimes, however, you may not want a variable to be inherited.  For
+these situations, `make' provides the `private' modifier.  Although
+this modifier can be used with any variable assignment, it makes the
+most sense with target- and pattern-specific variables.  Any variable
+marked `private' will be visible to its local target but will not be
+inherited by prerequisites of that target.  A global variable marked
+`private' will be visible in the global scope but will not be inherited
+by any target, and hence will not be visible in any recipe.
+
+   As an example, consider this makefile:
+     EXTRA_CFLAGS =
+
+     prog: private EXTRA_CFLAGS = -L/usr/local/lib
+     prog: a.o b.o
+
+   Due to the `private' modifier, `a.o' and `b.o' will not inherit the
+`EXTRA_CFLAGS' variable assignment from the `progs' target.
+
+
+File: make.info,  Node: Special Variables,  Prev: Suppressing Inheritance,  Up: Using Variables
+
+6.14 Other Special Variables
+============================
+
+GNU `make' supports some variables that have special properties.
+
+`MAKEFILE_LIST'
+     Contains the name of each makefile that is parsed by `make', in
+     the order in which it was parsed.  The name is appended just
+     before `make' begins to parse the makefile.  Thus, if the first
+     thing a makefile does is examine the last word in this variable, it
+     will be the name of the current makefile.  Once the current
+     makefile has used `include', however, the last word will be the
+     just-included makefile.
+
+     If a makefile named `Makefile' has this content:
+
+          name1 := $(lastword $(MAKEFILE_LIST))
+
+          include inc.mk
+
+          name2 := $(lastword $(MAKEFILE_LIST))
+
+          all:
+                  @echo name1 = $(name1)
+                  @echo name2 = $(name2)
+
+     then you would expect to see this output:
+
+          name1 = Makefile
+          name2 = inc.mk
+
+`.DEFAULT_GOAL'
+     Sets the default goal to be used if no targets were specified on
+     the command line (*note Arguments to Specify the Goals: Goals.).
+     The `.DEFAULT_GOAL' variable allows you to discover the current
+     default goal, restart the default goal selection algorithm by
+     clearing its value, or to explicitly set the default goal.  The
+     following example illustrates these cases:
+
+          # Query the default goal.
+          ifeq ($(.DEFAULT_GOAL),)
+            $(warning no default goal is set)
+          endif
+
+          .PHONY: foo
+          foo: ; @echo $@
+
+          $(warning default goal is $(.DEFAULT_GOAL))
+
+          # Reset the default goal.
+          .DEFAULT_GOAL :=
+
+          .PHONY: bar
+          bar: ; @echo $@
+
+          $(warning default goal is $(.DEFAULT_GOAL))
+
+          # Set our own.
+          .DEFAULT_GOAL := foo
+
+     This makefile prints:
+
+          no default goal is set
+          default goal is foo
+          default goal is bar
+          foo
+
+     Note that assigning more than one target name to `.DEFAULT_GOAL' is
+     illegal and will result in an error.
+
+`MAKE_RESTARTS'
+     This variable is set only if this instance of `make' has restarted
+     (*note How Makefiles Are Remade: Remaking Makefiles.): it will
+     contain the number of times this instance has restarted.  Note
+     this is not the same as recursion (counted by the `MAKELEVEL'
+     variable).  You should not set, modify, or export this variable.
+
+`.RECIPEPREFIX'
+     The first character of the value of this variable is used as the
+     character make assumes is introducing a recipe line.  If the
+     variable is empty (as it is by default) that character is the
+     standard tab character.  For example, this is a valid makefile:
+
+          .RECIPEPREFIX = >
+          all:
+          > @echo Hello, world
+
+     The value of `.RECIPEPREFIX' can be changed multiple times; once
+     set it stays in effect for all rules parsed until it is modified.
+
+`.VARIABLES'
+     Expands to a list of the _names_ of all global variables defined
+     so far.  This includes variables which have empty values, as well
+     as built-in variables (*note Variables Used by Implicit Rules:
+     Implicit Variables.), but does not include any variables which are
+     only defined in a target-specific context.  Note that any value
+     you assign to this variable will be ignored; it will always return
+     its special value.
+
+`.FEATURES'
+     Expands to a list of special features supported by this version of
+     `make'.  Possible values include:
+
+    `archives'
+          Supports `ar' (archive) files using special filename syntax.
+          *Note Using `make' to Update Archive Files: Archives.
+
+    `check-symlink'
+          Supports the `-L' (`--check-symlink-times') flag.  *Note
+          Summary of Options: Options Summary.
+
+    `else-if'
+          Supports "else if" non-nested conditionals.  *Note Syntax of
+          Conditionals: Conditional Syntax.
+
+    `jobserver'
+          Supports "job server" enhanced parallel builds.  *Note
+          Parallel Execution: Parallel.
+
+    `second-expansion'
+          Supports secondary expansion of prerequisite lists.
+
+    `order-only'
+          Supports order-only prerequisites.  *Note Types of
+          Prerequisites: Prerequisite Types.
+
+    `target-specific'
+          Supports target-specific and pattern-specific variable
+          assignments.  *Note Target-specific Variable Values:
+          Target-specific.
+
+
+`.INCLUDE_DIRS'
+     Expands to a list of directories that `make' searches for included
+     makefiles (*note Including Other Makefiles: Include.).
+
+
+
+File: make.info,  Node: Conditionals,  Next: Functions,  Prev: Using Variables,  Up: Top
+
+7 Conditional Parts of Makefiles
+********************************
+
+A "conditional" directive causes part of a makefile to be obeyed or
+ignored depending on the values of variables.  Conditionals can compare
+the value of one variable to another, or the value of a variable to a
+constant string.  Conditionals control what `make' actually "sees" in
+the makefile, so they _cannot_ be used to control recipes at the time
+of execution.
+
+* Menu:
+
+* Conditional Example::         Example of a conditional
+* Conditional Syntax::          The syntax of conditionals.
+* Testing Flags::               Conditionals that test flags.
+
+
+File: make.info,  Node: Conditional Example,  Next: Conditional Syntax,  Prev: Conditionals,  Up: Conditionals
+
+7.1 Example of a Conditional
+============================
+
+The following example of a conditional tells `make' to use one set of
+libraries if the `CC' variable is `gcc', and a different set of
+libraries otherwise.  It works by controlling which of two recipe lines
+will be used for the rule.  The result is that `CC=gcc' as an argument
+to `make' changes not only which compiler is used but also which
+libraries are linked.
+
+     libs_for_gcc = -lgnu
+     normal_libs =
+
+     foo: $(objects)
+     ifeq ($(CC),gcc)
+             $(CC) -o foo $(objects) $(libs_for_gcc)
+     else
+             $(CC) -o foo $(objects) $(normal_libs)
+     endif
+
+   This conditional uses three directives: one `ifeq', one `else' and
+one `endif'.
+
+   The `ifeq' directive begins the conditional, and specifies the
+condition.  It contains two arguments, separated by a comma and
+surrounded by parentheses.  Variable substitution is performed on both
+arguments and then they are compared.  The lines of the makefile
+following the `ifeq' are obeyed if the two arguments match; otherwise
+they are ignored.
+
+   The `else' directive causes the following lines to be obeyed if the
+previous conditional failed.  In the example above, this means that the
+second alternative linking command is used whenever the first
+alternative is not used.  It is optional to have an `else' in a
+conditional.
+
+   The `endif' directive ends the conditional.  Every conditional must
+end with an `endif'.  Unconditional makefile text follows.
+
+   As this example illustrates, conditionals work at the textual level:
+the lines of the conditional are treated as part of the makefile, or
+ignored, according to the condition.  This is why the larger syntactic
+units of the makefile, such as rules, may cross the beginning or the
+end of the conditional.
+
+   When the variable `CC' has the value `gcc', the above example has
+this effect:
+
+     foo: $(objects)
+             $(CC) -o foo $(objects) $(libs_for_gcc)
+
+When the variable `CC' has any other value, the effect is this:
+
+     foo: $(objects)
+             $(CC) -o foo $(objects) $(normal_libs)
+
+   Equivalent results can be obtained in another way by
+conditionalizing a variable assignment and then using the variable
+unconditionally:
+
+     libs_for_gcc = -lgnu
+     normal_libs =
+
+     ifeq ($(CC),gcc)
+       libs=$(libs_for_gcc)
+     else
+       libs=$(normal_libs)
+     endif
+
+     foo: $(objects)
+             $(CC) -o foo $(objects) $(libs)
+
+
+File: make.info,  Node: Conditional Syntax,  Next: Testing Flags,  Prev: Conditional Example,  Up: Conditionals
+
+7.2 Syntax of Conditionals
+==========================
+
+The syntax of a simple conditional with no `else' is as follows:
+
+     CONDITIONAL-DIRECTIVE
+     TEXT-IF-TRUE
+     endif
+
+The TEXT-IF-TRUE may be any lines of text, to be considered as part of
+the makefile if the condition is true.  If the condition is false, no
+text is used instead.
+
+   The syntax of a complex conditional is as follows:
+
+     CONDITIONAL-DIRECTIVE
+     TEXT-IF-TRUE
+     else
+     TEXT-IF-FALSE
+     endif
+
+   or:
+
+     CONDITIONAL-DIRECTIVE
+     TEXT-IF-ONE-IS-TRUE
+     else CONDITIONAL-DIRECTIVE
+     TEXT-IF-TRUE
+     else
+     TEXT-IF-FALSE
+     endif
+
+There can be as many "`else' CONDITIONAL-DIRECTIVE" clauses as
+necessary.  Once a given condition is true, TEXT-IF-TRUE is used and no
+other clause is used; if no condition is true then TEXT-IF-FALSE is
+used.  The TEXT-IF-TRUE and TEXT-IF-FALSE can be any number of lines of
+text.
+
+   The syntax of the CONDITIONAL-DIRECTIVE is the same whether the
+conditional is simple or complex; after an `else' or not.  There are
+four different directives that test different conditions.  Here is a
+table of them:
+
+`ifeq (ARG1, ARG2)'
+`ifeq 'ARG1' 'ARG2''
+`ifeq "ARG1" "ARG2"'
+`ifeq "ARG1" 'ARG2''
+`ifeq 'ARG1' "ARG2"'
+     Expand all variable references in ARG1 and ARG2 and compare them.
+     If they are identical, the TEXT-IF-TRUE is effective; otherwise,
+     the TEXT-IF-FALSE, if any, is effective.
+
+     Often you want to test if a variable has a non-empty value.  When
+     the value results from complex expansions of variables and
+     functions, expansions you would consider empty may actually
+     contain whitespace characters and thus are not seen as empty.
+     However, you can use the `strip' function (*note Text Functions::)
+     to avoid interpreting whitespace as a non-empty value.  For
+     example:
+
+          ifeq ($(strip $(foo)),)
+          TEXT-IF-EMPTY
+          endif
+
+     will evaluate TEXT-IF-EMPTY even if the expansion of `$(foo)'
+     contains whitespace characters.
+
+`ifneq (ARG1, ARG2)'
+`ifneq 'ARG1' 'ARG2''
+`ifneq "ARG1" "ARG2"'
+`ifneq "ARG1" 'ARG2''
+`ifneq 'ARG1' "ARG2"'
+     Expand all variable references in ARG1 and ARG2 and compare them.
+     If they are different, the TEXT-IF-TRUE is effective; otherwise,
+     the TEXT-IF-FALSE, if any, is effective.
+
+`ifdef VARIABLE-NAME'
+     The `ifdef' form takes the _name_ of a variable as its argument,
+     not a reference to a variable.  The value of that variable has a
+     non-empty value, the TEXT-IF-TRUE is effective; otherwise, the
+     TEXT-IF-FALSE, if any, is effective.  Variables that have never
+     been defined have an empty value.  The text VARIABLE-NAME is
+     expanded, so it could be a variable or function that expands to
+     the name of a variable.  For example:
+
+          bar = true
+          foo = bar
+          ifdef $(foo)
+          frobozz = yes
+          endif
+
+     The variable reference `$(foo)' is expanded, yielding `bar', which
+     is considered to be the name of a variable.  The variable `bar' is
+     not expanded, but its value is examined to determine if it is
+     non-empty.
+
+     Note that `ifdef' only tests whether a variable has a value.  It
+     does not expand the variable to see if that value is nonempty.
+     Consequently, tests using `ifdef' return true for all definitions
+     except those like `foo ='.  To test for an empty value, use
+     `ifeq ($(foo),)'.  For example,
+
+          bar =
+          foo = $(bar)
+          ifdef foo
+          frobozz = yes
+          else
+          frobozz = no
+          endif
+
+     sets `frobozz' to `yes', while:
+
+          foo =
+          ifdef foo
+          frobozz = yes
+          else
+          frobozz = no
+          endif
+
+     sets `frobozz' to `no'.
+
+`ifndef VARIABLE-NAME'
+     If the variable VARIABLE-NAME has an empty value, the TEXT-IF-TRUE
+     is effective; otherwise, the TEXT-IF-FALSE, if any, is effective.
+     The rules for expansion and testing of VARIABLE-NAME are identical
+     to the `ifdef' directive.
+
+   Extra spaces are allowed and ignored at the beginning of the
+conditional directive line, but a tab is not allowed.  (If the line
+begins with a tab, it will be considered part of a recipe for a rule.)
+Aside from this, extra spaces or tabs may be inserted with no effect
+anywhere except within the directive name or within an argument.  A
+comment starting with `#' may appear at the end of the line.
+
+   The other two directives that play a part in a conditional are `else'
+and `endif'.  Each of these directives is written as one word, with no
+arguments.  Extra spaces are allowed and ignored at the beginning of the
+line, and spaces or tabs at the end.  A comment starting with `#' may
+appear at the end of the line.
+
+   Conditionals affect which lines of the makefile `make' uses.  If the
+condition is true, `make' reads the lines of the TEXT-IF-TRUE as part
+of the makefile; if the condition is false, `make' ignores those lines
+completely.  It follows that syntactic units of the makefile, such as
+rules, may safely be split across the beginning or the end of the
+conditional.
+
+   `make' evaluates conditionals when it reads a makefile.
+Consequently, you cannot use automatic variables in the tests of
+conditionals because they are not defined until recipes are run (*note
+Automatic Variables::).
+
+   To prevent intolerable confusion, it is not permitted to start a
+conditional in one makefile and end it in another.  However, you may
+write an `include' directive within a conditional, provided you do not
+attempt to terminate the conditional inside the included file.
+
+
+File: make.info,  Node: Testing Flags,  Prev: Conditional Syntax,  Up: Conditionals
+
+7.3 Conditionals that Test Flags
+================================
+
+You can write a conditional that tests `make' command flags such as
+`-t' by using the variable `MAKEFLAGS' together with the `findstring'
+function (*note Functions for String Substitution and Analysis: Text
+Functions.).  This is useful when `touch' is not enough to make a file
+appear up to date.
+
+   The `findstring' function determines whether one string appears as a
+substring of another.  If you want to test for the `-t' flag, use `t'
+as the first string and the value of `MAKEFLAGS' as the other.
+
+   For example, here is how to arrange to use `ranlib -t' to finish
+marking an archive file up to date:
+
+     archive.a: ...
+     ifneq (,$(findstring t,$(MAKEFLAGS)))
+             +touch archive.a
+             +ranlib -t archive.a
+     else
+             ranlib archive.a
+     endif
+
+The `+' prefix marks those recipe lines as "recursive" so that they
+will be executed despite use of the `-t' flag.  *Note Recursive Use of
+`make': Recursion.
+
+
+File: make.info,  Node: Functions,  Next: Running,  Prev: Conditionals,  Up: Top
+
+8 Functions for Transforming Text
+*********************************
+
+"Functions" allow you to do text processing in the makefile to compute
+the files to operate on or the commands to use in recipes.  You use a
+function in a "function call", where you give the name of the function
+and some text (the "arguments") for the function to operate on.  The
+result of the function's processing is substituted into the makefile at
+the point of the call, just as a variable might be substituted.
+
+* Menu:
+
+* Syntax of Functions::         How to write a function call.
+* Text Functions::              General-purpose text manipulation functions.
+* File Name Functions::         Functions for manipulating file names.
+* Conditional Functions::       Functions that implement conditions.
+* Foreach Function::            Repeat some text with controlled variation.
+* Call Function::               Expand a user-defined function.
+* Value Function::              Return the un-expanded value of a variable.
+* Eval Function::               Evaluate the arguments as makefile syntax.
+* Origin Function::             Find where a variable got its value.
+* Flavor Function::             Find out the flavor of a variable.
+* Shell Function::              Substitute the output of a shell command.
+* Make Control Functions::      Functions that control how make runs.
+
+
+File: make.info,  Node: Syntax of Functions,  Next: Text Functions,  Prev: Functions,  Up: Functions
+
+8.1 Function Call Syntax
+========================
+
+A function call resembles a variable reference.  It looks like this:
+
+     $(FUNCTION ARGUMENTS)
+
+or like this:
+
+     ${FUNCTION ARGUMENTS}
+
+   Here FUNCTION is a function name; one of a short list of names that
+are part of `make'.  You can also essentially create your own functions
+by using the `call' builtin function.
+
+   The ARGUMENTS are the arguments of the function.  They are separated
+from the function name by one or more spaces or tabs, and if there is
+more than one argument, then they are separated by commas.  Such
+whitespace and commas are not part of an argument's value.  The
+delimiters which you use to surround the function call, whether
+parentheses or braces, can appear in an argument only in matching pairs;
+the other kind of delimiters may appear singly.  If the arguments
+themselves contain other function calls or variable references, it is
+wisest to use the same kind of delimiters for all the references; write
+`$(subst a,b,$(x))', not `$(subst a,b,${x})'.  This is because it is
+clearer, and because only one type of delimiter is matched to find the
+end of the reference.
+
+   The text written for each argument is processed by substitution of
+variables and function calls to produce the argument value, which is
+the text on which the function acts.  The substitution is done in the
+order in which the arguments appear.
+
+   Commas and unmatched parentheses or braces cannot appear in the text
+of an argument as written; leading spaces cannot appear in the text of
+the first argument as written.  These characters can be put into the
+argument value by variable substitution.  First define variables
+`comma' and `space' whose values are isolated comma and space
+characters, then substitute these variables where such characters are
+wanted, like this:
+
+     comma:= ,
+     empty:=
+     space:= $(empty) $(empty)
+     foo:= a b c
+     bar:= $(subst $(space),$(comma),$(foo))
+     # bar is now `a,b,c'.
+
+Here the `subst' function replaces each space with a comma, through the
+value of `foo', and substitutes the result.
+
+
+File: make.info,  Node: Text Functions,  Next: File Name Functions,  Prev: Syntax of Functions,  Up: Functions
+
+8.2 Functions for String Substitution and Analysis
+==================================================
+
+Here are some functions that operate on strings:
+
+`$(subst FROM,TO,TEXT)'
+     Performs a textual replacement on the text TEXT: each occurrence
+     of FROM is replaced by TO.  The result is substituted for the
+     function call.  For example,
+
+          $(subst ee,EE,feet on the street)
+
+     substitutes the string `fEEt on the strEEt'.
+
+`$(patsubst PATTERN,REPLACEMENT,TEXT)'
+     Finds whitespace-separated words in TEXT that match PATTERN and
+     replaces them with REPLACEMENT.  Here PATTERN may contain a `%'
+     which acts as a wildcard, matching any number of any characters
+     within a word.  If REPLACEMENT also contains a `%', the `%' is
+     replaced by the text that matched the `%' in PATTERN.  Only the
+     first `%' in the PATTERN and REPLACEMENT is treated this way; any
+     subsequent `%' is unchanged.
+
+     `%' characters in `patsubst' function invocations can be quoted
+     with preceding backslashes (`\').  Backslashes that would
+     otherwise quote `%' characters can be quoted with more backslashes.
+     Backslashes that quote `%' characters or other backslashes are
+     removed from the pattern before it is compared file names or has a
+     stem substituted into it.  Backslashes that are not in danger of
+     quoting `%' characters go unmolested.  For example, the pattern
+     `the\%weird\\%pattern\\' has `the%weird\' preceding the operative
+     `%' character, and `pattern\\' following it.  The final two
+     backslashes are left alone because they cannot affect any `%'
+     character.
+
+     Whitespace between words is folded into single space characters;
+     leading and trailing whitespace is discarded.
+
+     For example,
+
+          $(patsubst %.c,%.o,x.c.c bar.c)
+
+     produces the value `x.c.o bar.o'.
+
+     Substitution references (*note Substitution References:
+     Substitution Refs.) are a simpler way to get the effect of the
+     `patsubst' function:
+
+          $(VAR:PATTERN=REPLACEMENT)
+
+     is equivalent to
+
+          $(patsubst PATTERN,REPLACEMENT,$(VAR))
+
+     The second shorthand simplifies one of the most common uses of
+     `patsubst': replacing the suffix at the end of file names.
+
+          $(VAR:SUFFIX=REPLACEMENT)
+
+     is equivalent to
+
+          $(patsubst %SUFFIX,%REPLACEMENT,$(VAR))
+
+     For example, you might have a list of object files:
+
+          objects = foo.o bar.o baz.o
+
+     To get the list of corresponding source files, you could simply
+     write:
+
+          $(objects:.o=.c)
+
+     instead of using the general form:
+
+          $(patsubst %.o,%.c,$(objects))
+
+`$(strip STRING)'
+     Removes leading and trailing whitespace from STRING and replaces
+     each internal sequence of one or more whitespace characters with a
+     single space.  Thus, `$(strip a b  c )' results in `a b c'.
+
+     The function `strip' can be very useful when used in conjunction
+     with conditionals.  When comparing something with the empty string
+     `' using `ifeq' or `ifneq', you usually want a string of just
+     whitespace to match the empty string (*note Conditionals::).
+
+     Thus, the following may fail to have the desired results:
+
+          .PHONY: all
+          ifneq   "$(needs_made)" ""
+          all: $(needs_made)
+          else
+          all:;@echo 'Nothing to make!'
+          endif
+
+     Replacing the variable reference `$(needs_made)' with the function
+     call `$(strip $(needs_made))' in the `ifneq' directive would make
+     it more robust.
+
+`$(findstring FIND,IN)'
+     Searches IN for an occurrence of FIND.  If it occurs, the value is
+     FIND; otherwise, the value is empty.  You can use this function in
+     a conditional to test for the presence of a specific substring in
+     a given string.  Thus, the two examples,
+
+          $(findstring a,a b c)
+          $(findstring a,b c)
+
+     produce the values `a' and `' (the empty string), respectively.
+     *Note Testing Flags::, for a practical application of `findstring'.
+
+`$(filter PATTERN...,TEXT)'
+     Returns all whitespace-separated words in TEXT that _do_ match any
+     of the PATTERN words, removing any words that _do not_ match.  The
+     patterns are written using `%', just like the patterns used in the
+     `patsubst' function above.
+
+     The `filter' function can be used to separate out different types
+     of strings (such as file names) in a variable.  For example:
+
+          sources := foo.c bar.c baz.s ugh.h
+          foo: $(sources)
+                  cc $(filter %.c %.s,$(sources)) -o foo
+
+     says that `foo' depends of `foo.c', `bar.c', `baz.s' and `ugh.h'
+     but only `foo.c', `bar.c' and `baz.s' should be specified in the
+     command to the compiler.
+
+`$(filter-out PATTERN...,TEXT)'
+     Returns all whitespace-separated words in TEXT that _do not_ match
+     any of the PATTERN words, removing the words that _do_ match one
+     or more.  This is the exact opposite of the `filter' function.
+
+     For example, given:
+
+          objects=main1.o foo.o main2.o bar.o
+          mains=main1.o main2.o
+
+     the following generates a list which contains all the object files
+     not in `mains':
+
+          $(filter-out $(mains),$(objects))
+
+`$(sort LIST)'
+     Sorts the words of LIST in lexical order, removing duplicate
+     words.  The output is a list of words separated by single spaces.
+     Thus,
+
+          $(sort foo bar lose)
+
+     returns the value `bar foo lose'.
+
+     Incidentally, since `sort' removes duplicate words, you can use it
+     for this purpose even if you don't care about the sort order.
+
+`$(word N,TEXT)'
+     Returns the Nth word of TEXT.  The legitimate values of N start
+     from 1.  If N is bigger than the number of words in TEXT, the
+     value is empty.  For example,
+
+          $(word 2, foo bar baz)
+
+     returns `bar'.
+
+`$(wordlist S,E,TEXT)'
+     Returns the list of words in TEXT starting with word S and ending
+     with word E (inclusive).  The legitimate values of S start from 1;
+     E may start from 0.  If S is bigger than the number of words in
+     TEXT, the value is empty.  If E is bigger than the number of words
+     in TEXT, words up to the end of TEXT are returned.  If S is
+     greater than E, nothing is returned.  For example,
+
+          $(wordlist 2, 3, foo bar baz)
+
+     returns `bar baz'.
+
+`$(words TEXT)'
+     Returns the number of words in TEXT.  Thus, the last word of TEXT
+     is `$(word $(words TEXT),TEXT)'.
+
+`$(firstword NAMES...)'
+     The argument NAMES is regarded as a series of names, separated by
+     whitespace.  The value is the first name in the series.  The rest
+     of the names are ignored.
+
+     For example,
+
+          $(firstword foo bar)
+
+     produces the result `foo'.  Although `$(firstword TEXT)' is the
+     same as `$(word 1,TEXT)', the `firstword' function is retained for
+     its simplicity.
+
+`$(lastword NAMES...)'
+     The argument NAMES is regarded as a series of names, separated by
+     whitespace.  The value is the last name in the series.
+
+     For example,
+
+          $(lastword foo bar)
+
+     produces the result `bar'.  Although `$(lastword TEXT)' is the
+     same as `$(word $(words TEXT),TEXT)', the `lastword' function was
+     added for its simplicity and better performance.
+
+   Here is a realistic example of the use of `subst' and `patsubst'.
+Suppose that a makefile uses the `VPATH' variable to specify a list of
+directories that `make' should search for prerequisite files (*note
+`VPATH' Search Path for All Prerequisites: General Search.).  This
+example shows how to tell the C compiler to search for header files in
+the same list of directories.
+
+   The value of `VPATH' is a list of directories separated by colons,
+such as `src:../headers'.  First, the `subst' function is used to
+change the colons to spaces:
+
+     $(subst :, ,$(VPATH))
+
+This produces `src ../headers'.  Then `patsubst' is used to turn each
+directory name into a `-I' flag.  These can be added to the value of
+the variable `CFLAGS', which is passed automatically to the C compiler,
+like this:
+
+     override CFLAGS += $(patsubst %,-I%,$(subst :, ,$(VPATH)))
+
+The effect is to append the text `-Isrc -I../headers' to the previously
+given value of `CFLAGS'.  The `override' directive is used so that the
+new value is assigned even if the previous value of `CFLAGS' was
+specified with a command argument (*note The `override' Directive:
+Override Directive.).
+
+
+File: make.info,  Node: File Name Functions,  Next: Conditional Functions,  Prev: Text Functions,  Up: Functions
+
+8.3 Functions for File Names
+============================
+
+Several of the built-in expansion functions relate specifically to
+taking apart file names or lists of file names.
+
+   Each of the following functions performs a specific transformation
+on a file name.  The argument of the function is regarded as a series
+of file names, separated by whitespace.  (Leading and trailing
+whitespace is ignored.)  Each file name in the series is transformed in
+the same way and the results are concatenated with single spaces
+between them.
+
+`$(dir NAMES...)'
+     Extracts the directory-part of each file name in NAMES.  The
+     directory-part of the file name is everything up through (and
+     including) the last slash in it.  If the file name contains no
+     slash, the directory part is the string `./'.  For example,
+
+          $(dir src/foo.c hacks)
+
+     produces the result `src/ ./'.
+
+`$(notdir NAMES...)'
+     Extracts all but the directory-part of each file name in NAMES.
+     If the file name contains no slash, it is left unchanged.
+     Otherwise, everything through the last slash is removed from it.
+
+     A file name that ends with a slash becomes an empty string.  This
+     is unfortunate, because it means that the result does not always
+     have the same number of whitespace-separated file names as the
+     argument had; but we do not see any other valid alternative.
+
+     For example,
+
+          $(notdir src/foo.c hacks)
+
+     produces the result `foo.c hacks'.
+
+`$(suffix NAMES...)'
+     Extracts the suffix of each file name in NAMES.  If the file name
+     contains a period, the suffix is everything starting with the last
+     period.  Otherwise, the suffix is the empty string.  This
+     frequently means that the result will be empty when NAMES is not,
+     and if NAMES contains multiple file names, the result may contain
+     fewer file names.
+
+     For example,
+
+          $(suffix src/foo.c src-1.0/bar.c hacks)
+
+     produces the result `.c .c'.
+
+`$(basename NAMES...)'
+     Extracts all but the suffix of each file name in NAMES.  If the
+     file name contains a period, the basename is everything starting
+     up to (and not including) the last period.  Periods in the
+     directory part are ignored.  If there is no period, the basename
+     is the entire file name.  For example,
+
+          $(basename src/foo.c src-1.0/bar hacks)
+
+     produces the result `src/foo src-1.0/bar hacks'.
+
+`$(addsuffix SUFFIX,NAMES...)'
+     The argument NAMES is regarded as a series of names, separated by
+     whitespace; SUFFIX is used as a unit.  The value of SUFFIX is
+     appended to the end of each individual name and the resulting
+     larger names are concatenated with single spaces between them.
+     For example,
+
+          $(addsuffix .c,foo bar)
+
+     produces the result `foo.c bar.c'.
+
+`$(addprefix PREFIX,NAMES...)'
+     The argument NAMES is regarded as a series of names, separated by
+     whitespace; PREFIX is used as a unit.  The value of PREFIX is
+     prepended to the front of each individual name and the resulting
+     larger names are concatenated with single spaces between them.
+     For example,
+
+          $(addprefix src/,foo bar)
+
+     produces the result `src/foo src/bar'.
+
+`$(join LIST1,LIST2)'
+     Concatenates the two arguments word by word: the two first words
+     (one from each argument) concatenated form the first word of the
+     result, the two second words form the second word of the result,
+     and so on.  So the Nth word of the result comes from the Nth word
+     of each argument.  If one argument has more words that the other,
+     the extra words are copied unchanged into the result.
+
+     For example, `$(join a b,.c .o)' produces `a.c b.o'.
+
+     Whitespace between the words in the lists is not preserved; it is
+     replaced with a single space.
+
+     This function can merge the results of the `dir' and `notdir'
+     functions, to produce the original list of files which was given
+     to those two functions.
+
+`$(wildcard PATTERN)'
+     The argument PATTERN is a file name pattern, typically containing
+     wildcard characters (as in shell file name patterns).  The result
+     of `wildcard' is a space-separated list of the names of existing
+     files that match the pattern.  *Note Using Wildcard Characters in
+     File Names: Wildcards.
+
+`$(realpath NAMES...)'
+     For each file name in NAMES return the canonical absolute name.  A
+     canonical name does not contain any `.' or `..' components, nor
+     any repeated path separators (`/') or symlinks.  In case of a
+     failure the empty string is returned.  Consult the `realpath(3)'
+     documentation for a list of possible failure causes.
+
+`$(abspath NAMES...)'
+     For each file name in NAMES return an absolute name that does not
+     contain any `.' or `..' components, nor any repeated path
+     separators (`/').  Note that, in contrast to `realpath' function,
+     `abspath' does not resolve symlinks and does not require the file
+     names to refer to an existing file or directory.  Use the
+     `wildcard' function to test for existence.
+
+
+File: make.info,  Node: Conditional Functions,  Next: Foreach Function,  Prev: File Name Functions,  Up: Functions
+
+8.4 Functions for Conditionals
+==============================
+
+There are three functions that provide conditional expansion.  A key
+aspect of these functions is that not all of the arguments are expanded
+initially.  Only those arguments which need to be expanded, will be
+expanded.
+
+`$(if CONDITION,THEN-PART[,ELSE-PART])'
+     The `if' function provides support for conditional expansion in a
+     functional context (as opposed to the GNU `make' makefile
+     conditionals such as `ifeq' (*note Syntax of Conditionals:
+     Conditional Syntax.).
+
+     The first argument, CONDITION, first has all preceding and
+     trailing whitespace stripped, then is expanded.  If it expands to
+     any non-empty string, then the condition is considered to be true.
+     If it expands to an empty string, the condition is considered to
+     be false.
+
+     If the condition is true then the second argument, THEN-PART, is
+     evaluated and this is used as the result of the evaluation of the
+     entire `if' function.
+
+     If the condition is false then the third argument, ELSE-PART, is
+     evaluated and this is the result of the `if' function.  If there is
+     no third argument, the `if' function evaluates to nothing (the
+     empty string).
+
+     Note that only one of the THEN-PART or the ELSE-PART will be
+     evaluated, never both.  Thus, either can contain side-effects
+     (such as `shell' function calls, etc.)
+
+`$(or CONDITION1[,CONDITION2[,CONDITION3...]])'
+     The `or' function provides a "short-circuiting" OR operation.
+     Each argument is expanded, in order.  If an argument expands to a
+     non-empty string the processing stops and the result of the
+     expansion is that string.  If, after all arguments are expanded,
+     all of them are false (empty), then the result of the expansion is
+     the empty string.
+
+`$(and CONDITION1[,CONDITION2[,CONDITION3...]])'
+     The `and' function provides a "short-circuiting" AND operation.
+     Each argument is expanded, in order.  If an argument expands to an
+     empty string the processing stops and the result of the expansion
+     is the empty string.  If all arguments expand to a non-empty
+     string then the result of the expansion is the expansion of the
+     last argument.
+
+
+
+File: make.info,  Node: Foreach Function,  Next: Call Function,  Prev: Conditional Functions,  Up: Functions
+
+8.5 The `foreach' Function
+==========================
+
+The `foreach' function is very different from other functions.  It
+causes one piece of text to be used repeatedly, each time with a
+different substitution performed on it.  It resembles the `for' command
+in the shell `sh' and the `foreach' command in the C-shell `csh'.
+
+   The syntax of the `foreach' function is:
+
+     $(foreach VAR,LIST,TEXT)
+
+The first two arguments, VAR and LIST, are expanded before anything
+else is done; note that the last argument, TEXT, is *not* expanded at
+the same time.  Then for each word of the expanded value of LIST, the
+variable named by the expanded value of VAR is set to that word, and
+TEXT is expanded.  Presumably TEXT contains references to that
+variable, so its expansion will be different each time.
+
+   The result is that TEXT is expanded as many times as there are
+whitespace-separated words in LIST.  The multiple expansions of TEXT
+are concatenated, with spaces between them, to make the result of
+`foreach'.
+
+   This simple example sets the variable `files' to the list of all
+files in the directories in the list `dirs':
+
+     dirs := a b c d
+     files := $(foreach dir,$(dirs),$(wildcard $(dir)/*))
+
+   Here TEXT is `$(wildcard $(dir)/*)'.  The first repetition finds the
+value `a' for `dir', so it produces the same result as `$(wildcard
+a/*)'; the second repetition produces the result of `$(wildcard b/*)';
+and the third, that of `$(wildcard c/*)'.
+
+   This example has the same result (except for setting `dirs') as the
+following example:
+
+     files := $(wildcard a/* b/* c/* d/*)
+
+   When TEXT is complicated, you can improve readability by giving it a
+name, with an additional variable:
+
+     find_files = $(wildcard $(dir)/*)
+     dirs := a b c d
+     files := $(foreach dir,$(dirs),$(find_files))
+
+Here we use the variable `find_files' this way.  We use plain `=' to
+define a recursively-expanding variable, so that its value contains an
+actual function call to be reexpanded under the control of `foreach'; a
+simply-expanded variable would not do, since `wildcard' would be called
+only once at the time of defining `find_files'.
+
+   The `foreach' function has no permanent effect on the variable VAR;
+its value and flavor after the `foreach' function call are the same as
+they were beforehand.  The other values which are taken from LIST are
+in effect only temporarily, during the execution of `foreach'.  The
+variable VAR is a simply-expanded variable during the execution of
+`foreach'.  If VAR was undefined before the `foreach' function call, it
+is undefined after the call.  *Note The Two Flavors of Variables:
+Flavors.
+
+   You must take care when using complex variable expressions that
+result in variable names because many strange things are valid variable
+names, but are probably not what you intended.  For example,
+
+     files := $(foreach Esta escrito en espanol!,b c ch,$(find_files))
+
+might be useful if the value of `find_files' references the variable
+whose name is `Esta escrito en espanol!' (es un nombre bastante largo,
+no?), but it is more likely to be a mistake.
+
+
+File: make.info,  Node: Call Function,  Next: Value Function,  Prev: Foreach Function,  Up: Functions
+
+8.6 The `call' Function
+=======================
+
+The `call' function is unique in that it can be used to create new
+parameterized functions.  You can write a complex expression as the
+value of a variable, then use `call' to expand it with different values.
+
+   The syntax of the `call' function is:
+
+     $(call VARIABLE,PARAM,PARAM,...)
+
+   When `make' expands this function, it assigns each PARAM to
+temporary variables `$(1)', `$(2)', etc.  The variable `$(0)' will
+contain VARIABLE.  There is no maximum number of parameter arguments.
+There is no minimum, either, but it doesn't make sense to use `call'
+with no parameters.
+
+   Then VARIABLE is expanded as a `make' variable in the context of
+these temporary assignments.  Thus, any reference to `$(1)' in the
+value of VARIABLE will resolve to the first PARAM in the invocation of
+`call'.
+
+   Note that VARIABLE is the _name_ of a variable, not a _reference_ to
+that variable.  Therefore you would not normally use a `$' or
+parentheses when writing it.  (You can, however, use a variable
+reference in the name if you want the name not to be a constant.)
+
+   If VARIABLE is the name of a builtin function, the builtin function
+is always invoked (even if a `make' variable by that name also exists).
+
+   The `call' function expands the PARAM arguments before assigning
+them to temporary variables.  This means that VARIABLE values
+containing references to builtin functions that have special expansion
+rules, like `foreach' or `if', may not work as you expect.
+
+   Some examples may make this clearer.
+
+   This macro simply reverses its arguments:
+
+     reverse = $(2) $(1)
+
+     foo = $(call reverse,a,b)
+
+Here FOO will contain `b a'.
+
+   This one is slightly more interesting: it defines a macro to search
+for the first instance of a program in `PATH':
+
+     pathsearch = $(firstword $(wildcard $(addsuffix /$(1),$(subst :, ,$(PATH)))))
+
+     LS := $(call pathsearch,ls)
+
+Now the variable LS contains `/bin/ls' or similar.
+
+   The `call' function can be nested.  Each recursive invocation gets
+its own local values for `$(1)', etc. that mask the values of
+higher-level `call'.  For example, here is an implementation of a "map"
+function:
+
+     map = $(foreach a,$(2),$(call $(1),$(a)))
+
+   Now you can MAP a function that normally takes only one argument,
+such as `origin', to multiple values in one step:
+
+     o = $(call map,origin,o map MAKE)
+
+   and end up with O containing something like `file file default'.
+
+   A final caution: be careful when adding whitespace to the arguments
+to `call'.  As with other functions, any whitespace contained in the
+second and subsequent arguments is kept; this can cause strange
+effects.  It's generally safest to remove all extraneous whitespace when
+providing parameters to `call'.
+
+
+File: make.info,  Node: Value Function,  Next: Eval Function,  Prev: Call Function,  Up: Functions
+
+8.7 The `value' Function
+========================
+
+The `value' function provides a way for you to use the value of a
+variable _without_ having it expanded.  Please note that this does not
+undo expansions which have already occurred; for example if you create
+a simply expanded variable its value is expanded during the definition;
+in that case the `value' function will return the same result as using
+the variable directly.
+
+   The syntax of the `value' function is:
+
+     $(value VARIABLE)
+
+   Note that VARIABLE is the _name_ of a variable; not a _reference_ to
+that variable.  Therefore you would not normally use a `$' or
+parentheses when writing it.  (You can, however, use a variable
+reference in the name if you want the name not to be a constant.)
+
+   The result of this function is a string containing the value of
+VARIABLE, without any expansion occurring.  For example, in this
+makefile:
+
+     FOO = $PATH
+
+     all:
+             @echo $(FOO)
+             @echo $(value FOO)
+
+The first output line would be `ATH', since the "$P" would be expanded
+as a `make' variable, while the second output line would be the current
+value of your `$PATH' environment variable, since the `value' function
+avoided the expansion.
+
+   The `value' function is most often used in conjunction with the
+`eval' function (*note Eval Function::).
+
+
+File: make.info,  Node: Eval Function,  Next: Origin Function,  Prev: Value Function,  Up: Functions
+
+8.8 The `eval' Function
+=======================
+
+The `eval' function is very special: it allows you to define new
+makefile constructs that are not constant; which are the result of
+evaluating other variables and functions.  The argument to the `eval'
+function is expanded, then the results of that expansion are parsed as
+makefile syntax.  The expanded results can define new `make' variables,
+targets, implicit or explicit rules, etc.
+
+   The result of the `eval' function is always the empty string; thus,
+it can be placed virtually anywhere in a makefile without causing
+syntax errors.
+
+   It's important to realize that the `eval' argument is expanded
+_twice_; first by the `eval' function, then the results of that
+expansion are expanded again when they are parsed as makefile syntax.
+This means you may need to provide extra levels of escaping for "$"
+characters when using `eval'.  The `value' function (*note Value
+Function::) can sometimes be useful in these situations, to circumvent
+unwanted expansions.
+
+   Here is an example of how `eval' can be used; this example combines
+a number of concepts and other functions.  Although it might seem
+overly complex to use `eval' in this example, rather than just writing
+out the rules, consider two things: first, the template definition (in
+`PROGRAM_template') could need to be much more complex than it is here;
+and second, you might put the complex, "generic" part of this example
+into another makefile, then include it in all the individual makefiles.
+Now your individual makefiles are quite straightforward.
+
+     PROGRAMS    = server client
+
+     server_OBJS = server.o server_priv.o server_access.o
+     server_LIBS = priv protocol
+
+     client_OBJS = client.o client_api.o client_mem.o
+     client_LIBS = protocol
+
+     # Everything after this is generic
+
+     .PHONY: all
+     all: $(PROGRAMS)
+
+     define PROGRAM_template =
+      $(1): $$($(1)_OBJS) $$($(1)_LIBS:%=-l%)
+      ALL_OBJS   += $$($(1)_OBJS)
+     endef
+
+     $(foreach prog,$(PROGRAMS),$(eval $(call PROGRAM_template,$(prog))))
+
+     $(PROGRAMS):
+             $(LINK.o) $^ $(LDLIBS) -o $@
+
+     clean:
+             rm -f $(ALL_OBJS) $(PROGRAMS)
+
+
+File: make.info,  Node: Origin Function,  Next: Flavor Function,  Prev: Eval Function,  Up: Functions
+
+8.9 The `origin' Function
+=========================
+
+The `origin' function is unlike most other functions in that it does
+not operate on the values of variables; it tells you something _about_
+a variable.  Specifically, it tells you where it came from.
+
+   The syntax of the `origin' function is:
+
+     $(origin VARIABLE)
+
+   Note that VARIABLE is the _name_ of a variable to inquire about; not
+a _reference_ to that variable.  Therefore you would not normally use a
+`$' or parentheses when writing it.  (You can, however, use a variable
+reference in the name if you want the name not to be a constant.)
+
+   The result of this function is a string telling you how the variable
+VARIABLE was defined:
+
+`undefined'
+     if VARIABLE was never defined.
+
+`default'
+     if VARIABLE has a default definition, as is usual with `CC' and so
+     on.  *Note Variables Used by Implicit Rules: Implicit Variables.
+     Note that if you have redefined a default variable, the `origin'
+     function will return the origin of the later definition.
+
+`environment'
+     if VARIABLE was inherited from the environment provided to `make'.
+
+`environment override'
+     if VARIABLE was inherited from the environment provided to `make',
+     and is overriding a setting for VARIABLE in the makefile as a
+     result of the `-e' option (*note Summary of Options: Options
+     Summary.).
+
+`file'
+     if VARIABLE was defined in a makefile.
+
+`command line'
+     if VARIABLE was defined on the command line.
+
+`override'
+     if VARIABLE was defined with an `override' directive in a makefile
+     (*note The `override' Directive: Override Directive.).
+
+`automatic'
+     if VARIABLE is an automatic variable defined for the execution of
+     the recipe for each rule (*note Automatic Variables::).
+
+   This information is primarily useful (other than for your curiosity)
+to determine if you want to believe the value of a variable.  For
+example, suppose you have a makefile `foo' that includes another
+makefile `bar'.  You want a variable `bletch' to be defined in `bar' if
+you run the command `make -f bar', even if the environment contains a
+definition of `bletch'.  However, if `foo' defined `bletch' before
+including `bar', you do not want to override that definition.  This
+could be done by using an `override' directive in `foo', giving that
+definition precedence over the later definition in `bar';
+unfortunately, the `override' directive would also override any command
+line definitions.  So, `bar' could include:
+
+     ifdef bletch
+     ifeq "$(origin bletch)" "environment"
+     bletch = barf, gag, etc.
+     endif
+     endif
+
+If `bletch' has been defined from the environment, this will redefine
+it.
+
+   If you want to override a previous definition of `bletch' if it came
+from the environment, even under `-e', you could instead write:
+
+     ifneq "$(findstring environment,$(origin bletch))" ""
+     bletch = barf, gag, etc.
+     endif
+
+   Here the redefinition takes place if `$(origin bletch)' returns
+either `environment' or `environment override'.  *Note Functions for
+String Substitution and Analysis: Text Functions.
+
+
+File: make.info,  Node: Flavor Function,  Next: Shell Function,  Prev: Origin Function,  Up: Functions
+
+8.10 The `flavor' Function
+==========================
+
+The `flavor' function is unlike most other functions (and like `origin'
+function) in that it does not operate on the values of variables; it
+tells you something _about_ a variable.  Specifically, it tells you the
+flavor of a variable (*note The Two Flavors of Variables: Flavors.).
+
+   The syntax of the `flavor' function is:
+
+     $(flavor VARIABLE)
+
+   Note that VARIABLE is the _name_ of a variable to inquire about; not
+a _reference_ to that variable.  Therefore you would not normally use a
+`$' or parentheses when writing it.  (You can, however, use a variable
+reference in the name if you want the name not to be a constant.)
+
+   The result of this function is a string that identifies the flavor
+of the variable VARIABLE:
+
+`undefined'
+     if VARIABLE was never defined.
+
+`recursive'
+     if VARIABLE is a recursively expanded variable.
+
+`simple'
+     if VARIABLE is a simply expanded variable.
+
+
+
+File: make.info,  Node: Shell Function,  Next: Make Control Functions,  Prev: Flavor Function,  Up: Functions
+
+8.11 The `shell' Function
+=========================
+
+The `shell' function is unlike any other function other than the
+`wildcard' function (*note The Function `wildcard': Wildcard Function.)
+in that it communicates with the world outside of `make'.
+
+   The `shell' function performs the same function that backquotes
+(``') perform in most shells: it does "command expansion".  This means
+that it takes as an argument a shell command and evaluates to the
+output of the command.  The only processing `make' does on the result
+is to convert each newline (or carriage-return / newline pair) to a
+single space.  If there is a trailing (carriage-return and) newline it
+will simply be removed.
+
+   The commands run by calls to the `shell' function are run when the
+function calls are expanded (*note How `make' Reads a Makefile: Reading
+Makefiles.).  Because this function involves spawning a new shell, you
+should carefully consider the performance implications of using the
+`shell' function within recursively expanded variables vs. simply
+expanded variables (*note The Two Flavors of Variables: Flavors.).
+
+   Here are some examples of the use of the `shell' function:
+
+     contents := $(shell cat foo)
+
+sets `contents' to the contents of the file `foo', with a space (rather
+than a newline) separating each line.
+
+     files := $(shell echo *.c)
+
+sets `files' to the expansion of `*.c'.  Unless `make' is using a very
+strange shell, this has the same result as `$(wildcard *.c)' (as long
+as at least one `.c' file exists).
+
+
+File: make.info,  Node: Make Control Functions,  Prev: Shell Function,  Up: Functions
+
+8.12 Functions That Control Make
+================================
+
+These functions control the way make runs.  Generally, they are used to
+provide information to the user of the makefile or to cause make to stop
+if some sort of environmental error is detected.
+
+`$(error TEXT...)'
+     Generates a fatal error where the message is TEXT.  Note that the
+     error is generated whenever this function is evaluated.  So, if
+     you put it inside a recipe or on the right side of a recursive
+     variable assignment, it won't be evaluated until later.  The TEXT
+     will be expanded before the error is generated.
+
+     For example,
+
+          ifdef ERROR1
+          $(error error is $(ERROR1))
+          endif
+
+     will generate a fatal error during the read of the makefile if the
+     `make' variable `ERROR1' is defined.  Or,
+
+          ERR = $(error found an error!)
+
+          .PHONY: err
+          err: ; $(ERR)
+
+     will generate a fatal error while `make' is running, if the `err'
+     target is invoked.
+
+`$(warning TEXT...)'
+     This function works similarly to the `error' function, above,
+     except that `make' doesn't exit.  Instead, TEXT is expanded and
+     the resulting message is displayed, but processing of the makefile
+     continues.
+
+     The result of the expansion of this function is the empty string.
+
+`$(info TEXT...)'
+     This function does nothing more than print its (expanded)
+     argument(s) to standard output.  No makefile name or line number
+     is added.  The result of the expansion of this function is the
+     empty string.
+
+
+File: make.info,  Node: Running,  Next: Implicit Rules,  Prev: Functions,  Up: Top
+
+9 How to Run `make'
+*******************
+
+A makefile that says how to recompile a program can be used in more
+than one way.  The simplest use is to recompile every file that is out
+of date.  Usually, makefiles are written so that if you run `make' with
+no arguments, it does just that.
+
+   But you might want to update only some of the files; you might want
+to use a different compiler or different compiler options; you might
+want just to find out which files are out of date without changing them.
+
+   By giving arguments when you run `make', you can do any of these
+things and many others.
+
+   The exit status of `make' is always one of three values:
+`0'
+     The exit status is zero if `make' is successful.
+
+`2'
+     The exit status is two if `make' encounters any errors.  It will
+     print messages describing the particular errors.
+
+`1'
+     The exit status is one if you use the `-q' flag and `make'
+     determines that some target is not already up to date.  *Note
+     Instead of Executing Recipes: Instead of Execution.
+
+* Menu:
+
+* Makefile Arguments::          How to specify which makefile to use.
+* Goals::                       How to use goal arguments to specify which
+                                  parts of the makefile to use.
+* Instead of Execution::        How to use mode flags to specify what
+                                  kind of thing to do with the recipes
+                                  in the makefile other than simply
+                                  execute them.
+* Avoiding Compilation::        How to avoid recompiling certain files.
+* Overriding::                  How to override a variable to specify
+                                  an alternate compiler and other things.
+* Testing::                     How to proceed past some errors, to
+                                  test compilation.
+* Options Summary::             Summary of Options
+
+
+File: make.info,  Node: Makefile Arguments,  Next: Goals,  Prev: Running,  Up: Running
+
+9.1 Arguments to Specify the Makefile
+=====================================
+
+The way to specify the name of the makefile is with the `-f' or
+`--file' option (`--makefile' also works).  For example, `-f altmake'
+says to use the file `altmake' as the makefile.
+
+   If you use the `-f' flag several times and follow each `-f' with an
+argument, all the specified files are used jointly as makefiles.
+
+   If you do not use the `-f' or `--file' flag, the default is to try
+`GNUmakefile', `makefile', and `Makefile', in that order, and use the
+first of these three which exists or can be made (*note Writing
+Makefiles: Makefiles.).
+
+
+File: make.info,  Node: Goals,  Next: Instead of Execution,  Prev: Makefile Arguments,  Up: Running
+
+9.2 Arguments to Specify the Goals
+==================================
+
+The "goals" are the targets that `make' should strive ultimately to
+update.  Other targets are updated as well if they appear as
+prerequisites of goals, or prerequisites of prerequisites of goals, etc.
+
+   By default, the goal is the first target in the makefile (not
+counting targets that start with a period).  Therefore, makefiles are
+usually written so that the first target is for compiling the entire
+program or programs they describe.  If the first rule in the makefile
+has several targets, only the first target in the rule becomes the
+default goal, not the whole list.  You can manage the selection of the
+default goal from within your makefile using the `.DEFAULT_GOAL'
+variable (*note Other Special Variables: Special Variables.).
+
+   You can also specify a different goal or goals with command line
+arguments to `make'.  Use the name of the goal as an argument.  If you
+specify several goals, `make' processes each of them in turn, in the
+order you name them.
+
+   Any target in the makefile may be specified as a goal (unless it
+starts with `-' or contains an `=', in which case it will be parsed as
+a switch or variable definition, respectively).  Even targets not in
+the makefile may be specified, if `make' can find implicit rules that
+say how to make them.
+
+   `Make' will set the special variable `MAKECMDGOALS' to the list of
+goals you specified on the command line.  If no goals were given on the
+command line, this variable is empty.  Note that this variable should
+be used only in special circumstances.
+
+   An example of appropriate use is to avoid including `.d' files
+during `clean' rules (*note Automatic Prerequisites::), so `make' won't
+create them only to immediately remove them again:
+
+     sources = foo.c bar.c
+
+     ifneq ($(MAKECMDGOALS),clean)
+     include $(sources:.c=.d)
+     endif
+
+   One use of specifying a goal is if you want to compile only a part of
+the program, or only one of several programs.  Specify as a goal each
+file that you wish to remake.  For example, consider a directory
+containing several programs, with a makefile that starts like this:
+
+     .PHONY: all
+     all: size nm ld ar as
+
+   If you are working on the program `size', you might want to say
+`make size' so that only the files of that program are recompiled.
+
+   Another use of specifying a goal is to make files that are not
+normally made.  For example, there may be a file of debugging output,
+or a version of the program that is compiled specially for testing,
+which has a rule in the makefile but is not a prerequisite of the
+default goal.
+
+   Another use of specifying a goal is to run the recipe associated with
+a phony target (*note Phony Targets::) or empty target (*note Empty
+Target Files to Record Events: Empty Targets.).  Many makefiles contain
+a phony target named `clean' which deletes everything except source
+files.  Naturally, this is done only if you request it explicitly with
+`make clean'.  Following is a list of typical phony and empty target
+names.  *Note Standard Targets::, for a detailed list of all the
+standard target names which GNU software packages use.
+
+`all'
+     Make all the top-level targets the makefile knows about.
+
+`clean'
+     Delete all files that are normally created by running `make'.
+
+`mostlyclean'
+     Like `clean', but may refrain from deleting a few files that people
+     normally don't want to recompile.  For example, the `mostlyclean'
+     target for GCC does not delete `libgcc.a', because recompiling it
+     is rarely necessary and takes a lot of time.
+
+`distclean'
+`realclean'
+`clobber'
+     Any of these targets might be defined to delete _more_ files than
+     `clean' does.  For example, this would delete configuration files
+     or links that you would normally create as preparation for
+     compilation, even if the makefile itself cannot create these files.
+
+`install'
+     Copy the executable file into a directory that users typically
+     search for commands; copy any auxiliary files that the executable
+     uses into the directories where it will look for them.
+
+`print'
+     Print listings of the source files that have changed.
+
+`tar'
+     Create a tar file of the source files.
+
+`shar'
+     Create a shell archive (shar file) of the source files.
+
+`dist'
+     Create a distribution file of the source files.  This might be a
+     tar file, or a shar file, or a compressed version of one of the
+     above, or even more than one of the above.
+
+`TAGS'
+     Update a tags table for this program.
+
+`check'
+`test'
+     Perform self tests on the program this makefile builds.
+
+
+File: make.info,  Node: Instead of Execution,  Next: Avoiding Compilation,  Prev: Goals,  Up: Running
+
+9.3 Instead of Executing Recipes
+================================
+
+The makefile tells `make' how to tell whether a target is up to date,
+and how to update each target.  But updating the targets is not always
+what you want.  Certain options specify other activities for `make'.
+
+`-n'
+`--just-print'
+`--dry-run'
+`--recon'
+     "No-op".  The activity is to print what recipe would be used to
+     make the targets up to date, but not actually execute it.  Some
+     recipes are still executed, even with this flag (*note How the
+     `MAKE' Variable Works: MAKE Variable.).
+
+`-t'
+`--touch'
+     "Touch".  The activity is to mark the targets as up to date without
+     actually changing them.  In other words, `make' pretends to compile
+     the targets but does not really change their contents.
+
+`-q'
+`--question'
+     "Question".  The activity is to find out silently whether the
+     targets are up to date already; but execute no recipe in either
+     case.  In other words, neither compilation nor output will occur.
+
+`-W FILE'
+`--what-if=FILE'
+`--assume-new=FILE'
+`--new-file=FILE'
+     "What if".  Each `-W' flag is followed by a file name.  The given
+     files' modification times are recorded by `make' as being the
+     present time, although the actual modification times remain the
+     same.  You can use the `-W' flag in conjunction with the `-n' flag
+     to see what would happen if you were to modify specific files.
+
+   With the `-n' flag, `make' prints the recipe that it would normally
+execute but usually does not execute it.
+
+   With the `-t' flag, `make' ignores the recipes in the rules and uses
+(in effect) the command `touch' for each target that needs to be
+remade.  The `touch' command is also printed, unless `-s' or `.SILENT'
+is used.  For speed, `make' does not actually invoke the program
+`touch'.  It does the work directly.
+
+   With the `-q' flag, `make' prints nothing and executes no recipes,
+but the exit status code it returns is zero if and only if the targets
+to be considered are already up to date.  If the exit status is one,
+then some updating needs to be done.  If `make' encounters an error,
+the exit status is two, so you can distinguish an error from a target
+that is not up to date.
+
+   It is an error to use more than one of these three flags in the same
+invocation of `make'.
+
+   The `-n', `-t', and `-q' options do not affect recipe lines that
+begin with `+' characters or contain the strings `$(MAKE)' or
+`${MAKE}'.  Note that only the line containing the `+' character or the
+strings `$(MAKE)' or `${MAKE}' is run regardless of these options.
+Other lines in the same rule are not run unless they too begin with `+'
+or contain `$(MAKE)' or `${MAKE}' (*Note How the `MAKE' Variable Works:
+MAKE Variable.)
+
+   The `-t' flag prevents phony targets (*note Phony Targets::) from
+being updated, unless there are recipe lines beginning with `+' or
+containing `$(MAKE)' or `${MAKE}'.
+
+   The `-W' flag provides two features:
+
+   * If you also use the `-n' or `-q' flag, you can see what `make'
+     would do if you were to modify some files.
+
+   * Without the `-n' or `-q' flag, when `make' is actually executing
+     recipes, the `-W' flag can direct `make' to act as if some files
+     had been modified, without actually running the recipes for those
+     files.
+
+   Note that the options `-p' and `-v' allow you to obtain other
+information about `make' or about the makefiles in use (*note Summary
+of Options: Options Summary.).
+
+
+File: make.info,  Node: Avoiding Compilation,  Next: Overriding,  Prev: Instead of Execution,  Up: Running
+
+9.4 Avoiding Recompilation of Some Files
+========================================
+
+Sometimes you may have changed a source file but you do not want to
+recompile all the files that depend on it.  For example, suppose you add
+a macro or a declaration to a header file that many other files depend
+on.  Being conservative, `make' assumes that any change in the header
+file requires recompilation of all dependent files, but you know that
+they do not need to be recompiled and you would rather not waste the
+time waiting for them to compile.
+
+   If you anticipate the problem before changing the header file, you
+can use the `-t' flag.  This flag tells `make' not to run the recipes
+in the rules, but rather to mark the target up to date by changing its
+last-modification date.  You would follow this procedure:
+
+  1. Use the command `make' to recompile the source files that really
+     need recompilation, ensuring that the object files are up-to-date
+     before you begin.
+
+  2. Make the changes in the header files.
+
+  3. Use the command `make -t' to mark all the object files as up to
+     date.  The next time you run `make', the changes in the header
+     files will not cause any recompilation.
+
+   If you have already changed the header file at a time when some files
+do need recompilation, it is too late to do this.  Instead, you can use
+the `-o FILE' flag, which marks a specified file as "old" (*note
+Summary of Options: Options Summary.).  This means that the file itself
+will not be remade, and nothing else will be remade on its account.
+Follow this procedure:
+
+  1. Recompile the source files that need compilation for reasons
+     independent of the particular header file, with `make -o
+     HEADERFILE'.  If several header files are involved, use a separate
+     `-o' option for each header file.
+
+  2. Touch all the object files with `make -t'.
+
+
+File: make.info,  Node: Overriding,  Next: Testing,  Prev: Avoiding Compilation,  Up: Running
+
+9.5 Overriding Variables
+========================
+
+An argument that contains `=' specifies the value of a variable: `V=X'
+sets the value of the variable V to X.  If you specify a value in this
+way, all ordinary assignments of the same variable in the makefile are
+ignored; we say they have been "overridden" by the command line
+argument.
+
+   The most common way to use this facility is to pass extra flags to
+compilers.  For example, in a properly written makefile, the variable
+`CFLAGS' is included in each recipe that runs the C compiler, so a file
+`foo.c' would be compiled something like this:
+
+     cc -c $(CFLAGS) foo.c
+
+   Thus, whatever value you set for `CFLAGS' affects each compilation
+that occurs.  The makefile probably specifies the usual value for
+`CFLAGS', like this:
+
+     CFLAGS=-g
+
+   Each time you run `make', you can override this value if you wish.
+For example, if you say `make CFLAGS='-g -O'', each C compilation will
+be done with `cc -c -g -O'.  (This also illustrates how you can use
+quoting in the shell to enclose spaces and other special characters in
+the value of a variable when you override it.)
+
+   The variable `CFLAGS' is only one of many standard variables that
+exist just so that you can change them this way.  *Note Variables Used
+by Implicit Rules: Implicit Variables, for a complete list.
+
+   You can also program the makefile to look at additional variables of
+your own, giving the user the ability to control other aspects of how
+the makefile works by changing the variables.
+
+   When you override a variable with a command line argument, you can
+define either a recursively-expanded variable or a simply-expanded
+variable.  The examples shown above make a recursively-expanded
+variable; to make a simply-expanded variable, write `:=' instead of
+`='.  But, unless you want to include a variable reference or function
+call in the _value_ that you specify, it makes no difference which kind
+of variable you create.
+
+   There is one way that the makefile can change a variable that you
+have overridden.  This is to use the `override' directive, which is a
+line that looks like this: `override VARIABLE = VALUE' (*note The
+`override' Directive: Override Directive.).
+
+
+File: make.info,  Node: Testing,  Next: Options Summary,  Prev: Overriding,  Up: Running
+
+9.6 Testing the Compilation of a Program
+========================================
+
+Normally, when an error happens in executing a shell command, `make'
+gives up immediately, returning a nonzero status.  No further recipes
+are executed for any target.  The error implies that the goal cannot be
+correctly remade, and `make' reports this as soon as it knows.
+
+   When you are compiling a program that you have just changed, this is
+not what you want.  Instead, you would rather that `make' try compiling
+every file that can be tried, to show you as many compilation errors as
+possible.
+
+   On these occasions, you should use the `-k' or `--keep-going' flag.
+This tells `make' to continue to consider the other prerequisites of
+the pending targets, remaking them if necessary, before it gives up and
+returns nonzero status.  For example, after an error in compiling one
+object file, `make -k' will continue compiling other object files even
+though it already knows that linking them will be impossible.  In
+addition to continuing after failed shell commands, `make -k' will
+continue as much as possible after discovering that it does not know
+how to make a target or prerequisite file.  This will always cause an
+error message, but without `-k', it is a fatal error (*note Summary of
+Options: Options Summary.).
+
+   The usual behavior of `make' assumes that your purpose is to get the
+goals up to date; once `make' learns that this is impossible, it might
+as well report the failure immediately.  The `-k' flag says that the
+real purpose is to test as much as possible of the changes made in the
+program, perhaps to find several independent problems so that you can
+correct them all before the next attempt to compile.  This is why Emacs'
+`M-x compile' command passes the `-k' flag by default.
+
+
+File: make.info,  Node: Options Summary,  Prev: Testing,  Up: Running
+
+9.7 Summary of Options
+======================
+
+Here is a table of all the options `make' understands:
+
+`-b'
+`-m'
+     These options are ignored for compatibility with other versions of
+     `make'.
+
+`-B'
+`--always-make'
+     Consider all targets out-of-date.  GNU `make' proceeds to consider
+     targets and their prerequisites using the normal algorithms;
+     however, all targets so considered are always remade regardless of
+     the status of their prerequisites.  To avoid infinite recursion, if
+     `MAKE_RESTARTS' (*note Other Special Variables: Special
+     Variables.) is set to a number greater than 0 this option is
+     disabled when considering whether to remake makefiles (*note How
+     Makefiles Are Remade: Remaking Makefiles.).
+
+`-C DIR'
+`--directory=DIR'
+     Change to directory DIR before reading the makefiles.  If multiple
+     `-C' options are specified, each is interpreted relative to the
+     previous one: `-C / -C etc' is equivalent to `-C /etc'.  This is
+     typically used with recursive invocations of `make' (*note
+     Recursive Use of `make': Recursion.).
+
+`-d'
+     Print debugging information in addition to normal processing.  The
+     debugging information says which files are being considered for
+     remaking, which file-times are being compared and with what
+     results, which files actually need to be remade, which implicit
+     rules are considered and which are applied--everything interesting
+     about how `make' decides what to do.  The `-d' option is
+     equivalent to `--debug=a' (see below).
+
+`--debug[=OPTIONS]'
+     Print debugging information in addition to normal processing.
+     Various levels and types of output can be chosen.  With no
+     arguments, print the "basic" level of debugging.  Possible
+     arguments are below; only the first character is considered, and
+     values must be comma- or space-separated.
+
+    `a (all)'
+          All types of debugging output are enabled.  This is
+          equivalent to using `-d'.
+
+    `b (basic)'
+          Basic debugging prints each target that was found to be
+          out-of-date, and whether the build was successful or not.
+
+    `v (verbose)'
+          A level above `basic'; includes messages about which
+          makefiles were parsed, prerequisites that did not need to be
+          rebuilt, etc.  This option also enables `basic' messages.
+
+    `i (implicit)'
+          Prints messages describing the implicit rule searches for
+          each target.  This option also enables `basic' messages.
+
+    `j (jobs)'
+          Prints messages giving details on the invocation of specific
+          subcommands.
+
+    `m (makefile)'
+          By default, the above messages are not enabled while trying
+          to remake the makefiles.  This option enables messages while
+          rebuilding makefiles, too.  Note that the `all' option does
+          enable this option.  This option also enables `basic'
+          messages.
+
+`-e'
+`--environment-overrides'
+     Give variables taken from the environment precedence over
+     variables from makefiles.  *Note Variables from the Environment:
+     Environment.
+
+`--eval=STRING'
+     Evaluate STRING as makefile syntax.  This is a command-line
+     version of the `eval' function (*note Eval Function::).  The
+     evaluation is performed after the default rules and variables have
+     been defined, but before any makefiles are read.
+
+`-f FILE'
+`--file=FILE'
+`--makefile=FILE'
+     Read the file named FILE as a makefile.  *Note Writing Makefiles:
+     Makefiles.
+
+`-h'
+`--help'
+     Remind you of the options that `make' understands and then exit.
+
+`-i'
+`--ignore-errors'
+     Ignore all errors in recipes executed to remake files.  *Note
+     Errors in Recipes: Errors.
+
+`-I DIR'
+`--include-dir=DIR'
+     Specifies a directory DIR to search for included makefiles.  *Note
+     Including Other Makefiles: Include.  If several `-I' options are
+     used to specify several directories, the directories are searched
+     in the order specified.
+
+`-j [JOBS]'
+`--jobs[=JOBS]'
+     Specifies the number of recipes (jobs) to run simultaneously.
+     With no argument, `make' runs as many recipes simultaneously as
+     possible.  If there is more than one `-j' option, the last one is
+     effective.  *Note Parallel Execution: Parallel, for more
+     information on how recipes are run.  Note that this option is
+     ignored on MS-DOS.
+
+`-k'
+`--keep-going'
+     Continue as much as possible after an error.  While the target that
+     failed, and those that depend on it, cannot be remade, the other
+     prerequisites of these targets can be processed all the same.
+     *Note Testing the Compilation of a Program: Testing.
+
+`-l [LOAD]'
+`--load-average[=LOAD]'
+`--max-load[=LOAD]'
+     Specifies that no new recipes should be started if there are other
+     recipes running and the load average is at least LOAD (a
+     floating-point number).  With no argument, removes a previous load
+     limit.  *Note Parallel Execution: Parallel.
+
+`-L'
+`--check-symlink-times'
+     On systems that support symbolic links, this option causes `make'
+     to consider the timestamps on any symbolic links in addition to the
+     timestamp on the file referenced by those links.  When this option
+     is provided, the most recent timestamp among the file and the
+     symbolic links is taken as the modification time for this target
+     file.
+
+`-n'
+`--just-print'
+`--dry-run'
+`--recon'
+     Print the recipe that would be executed, but do not execute it
+     (except in certain circumstances).  *Note Instead of Executing
+     Recipes: Instead of Execution.
+
+`-o FILE'
+`--old-file=FILE'
+`--assume-old=FILE'
+     Do not remake the file FILE even if it is older than its
+     prerequisites, and do not remake anything on account of changes in
+     FILE.  Essentially the file is treated as very old and its rules
+     are ignored.  *Note Avoiding Recompilation of Some Files: Avoiding
+     Compilation.
+
+`-p'
+`--print-data-base'
+     Print the data base (rules and variable values) that results from
+     reading the makefiles; then execute as usual or as otherwise
+     specified.  This also prints the version information given by the
+     `-v' switch (see below).  To print the data base without trying to
+     remake any files, use `make -qp'.  To print the data base of
+     predefined rules and variables, use `make -p -f /dev/null'.  The
+     data base output contains filename and linenumber information for
+     recipe and variable definitions, so it can be a useful debugging
+     tool in complex environments.
+
+`-q'
+`--question'
+     "Question mode".  Do not run any recipes, or print anything; just
+     return an exit status that is zero if the specified targets are
+     already up to date, one if any remaking is required, or two if an
+     error is encountered.  *Note Instead of Executing Recipes: Instead
+     of Execution.
+
+`-r'
+`--no-builtin-rules'
+     Eliminate use of the built-in implicit rules (*note Using Implicit
+     Rules: Implicit Rules.).  You can still define your own by writing
+     pattern rules (*note Defining and Redefining Pattern Rules:
+     Pattern Rules.).  The `-r' option also clears out the default list
+     of suffixes for suffix rules (*note Old-Fashioned Suffix Rules:
+     Suffix Rules.).  But you can still define your own suffixes with a
+     rule for `.SUFFIXES', and then define your own suffix rules.  Note
+     that only _rules_ are affected by the `-r' option; default
+     variables remain in effect (*note Variables Used by Implicit
+     Rules: Implicit Variables.); see the `-R' option below.
+
+`-R'
+`--no-builtin-variables'
+     Eliminate use of the built-in rule-specific variables (*note
+     Variables Used by Implicit Rules: Implicit Variables.).  You can
+     still define your own, of course.  The `-R' option also
+     automatically enables the `-r' option (see above), since it
+     doesn't make sense to have implicit rules without any definitions
+     for the variables that they use.
+
+`-s'
+`--silent'
+`--quiet'
+     Silent operation; do not print the recipes as they are executed.
+     *Note Recipe Echoing: Echoing.
+
+`-S'
+`--no-keep-going'
+`--stop'
+     Cancel the effect of the `-k' option.  This is never necessary
+     except in a recursive `make' where `-k' might be inherited from
+     the top-level `make' via `MAKEFLAGS' (*note Recursive Use of
+     `make': Recursion.)  or if you set `-k' in `MAKEFLAGS' in your
+     environment.
+
+`-t'
+`--touch'
+     Touch files (mark them up to date without really changing them)
+     instead of running their recipes.  This is used to pretend that the
+     recipes were done, in order to fool future invocations of `make'.
+     *Note Instead of Executing Recipes: Instead of Execution.
+
+`-v'
+`--version'
+     Print the version of the `make' program plus a copyright, a list
+     of authors, and a notice that there is no warranty; then exit.
+
+`-w'
+`--print-directory'
+     Print a message containing the working directory both before and
+     after executing the makefile.  This may be useful for tracking
+     down errors from complicated nests of recursive `make' commands.
+     *Note Recursive Use of `make': Recursion.  (In practice, you
+     rarely need to specify this option since `make' does it for you;
+     see *note The `--print-directory' Option: -w Option.)
+
+`--no-print-directory'
+     Disable printing of the working directory under `-w'.  This option
+     is useful when `-w' is turned on automatically, but you do not
+     want to see the extra messages.  *Note The `--print-directory'
+     Option: -w Option.
+
+`-W FILE'
+`--what-if=FILE'
+`--new-file=FILE'
+`--assume-new=FILE'
+     Pretend that the target FILE has just been modified.  When used
+     with the `-n' flag, this shows you what would happen if you were
+     to modify that file.  Without `-n', it is almost the same as
+     running a `touch' command on the given file before running `make',
+     except that the modification time is changed only in the
+     imagination of `make'.  *Note Instead of Executing Recipes:
+     Instead of Execution.
+
+`--warn-undefined-variables'
+     Issue a warning message whenever `make' sees a reference to an
+     undefined variable.  This can be helpful when you are trying to
+     debug makefiles which use variables in complex ways.
+
+
+File: make.info,  Node: Implicit Rules,  Next: Archives,  Prev: Running,  Up: Top
+
+10 Using Implicit Rules
+***********************
+
+Certain standard ways of remaking target files are used very often.  For
+example, one customary way to make an object file is from a C source
+file using the C compiler, `cc'.
+
+   "Implicit rules" tell `make' how to use customary techniques so that
+you do not have to specify them in detail when you want to use them.
+For example, there is an implicit rule for C compilation.  File names
+determine which implicit rules are run.  For example, C compilation
+typically takes a `.c' file and makes a `.o' file.  So `make' applies
+the implicit rule for C compilation when it sees this combination of
+file name endings.
+
+   A chain of implicit rules can apply in sequence; for example, `make'
+will remake a `.o' file from a `.y' file by way of a `.c' file.
+
+   The built-in implicit rules use several variables in their recipes so
+that, by changing the values of the variables, you can change the way
+the implicit rule works.  For example, the variable `CFLAGS' controls
+the flags given to the C compiler by the implicit rule for C
+compilation.
+
+   You can define your own implicit rules by writing "pattern rules".
+
+   "Suffix rules" are a more limited way to define implicit rules.
+Pattern rules are more general and clearer, but suffix rules are
+retained for compatibility.
+
+* Menu:
+
+* Using Implicit::              How to use an existing implicit rule
+                                  to get the recipes for updating a file.
+* Catalogue of Rules::          A list of built-in implicit rules.
+* Implicit Variables::          How to change what predefined rules do.
+* Chained Rules::               How to use a chain of implicit rules.
+* Pattern Rules::               How to define new implicit rules.
+* Last Resort::                 How to define recipes for rules which
+                                  cannot find any.
+* Suffix Rules::                The old-fashioned style of implicit rule.
+* Implicit Rule Search::        The precise algorithm for applying
+                                  implicit rules.
+
+
+File: make.info,  Node: Using Implicit,  Next: Catalogue of Rules,  Prev: Implicit Rules,  Up: Implicit Rules
+
+10.1 Using Implicit Rules
+=========================
+
+To allow `make' to find a customary method for updating a target file,
+all you have to do is refrain from specifying recipes yourself.  Either
+write a rule with no recipe, or don't write a rule at all.  Then `make'
+will figure out which implicit rule to use based on which kind of
+source file exists or can be made.
+
+   For example, suppose the makefile looks like this:
+
+     foo : foo.o bar.o
+             cc -o foo foo.o bar.o $(CFLAGS) $(LDFLAGS)
+
+Because you mention `foo.o' but do not give a rule for it, `make' will
+automatically look for an implicit rule that tells how to update it.
+This happens whether or not the file `foo.o' currently exists.
+
+   If an implicit rule is found, it can supply both a recipe and one or
+more prerequisites (the source files).  You would want to write a rule
+for `foo.o' with no recipe if you need to specify additional
+prerequisites, such as header files, that the implicit rule cannot
+supply.
+
+   Each implicit rule has a target pattern and prerequisite patterns.
+There may be many implicit rules with the same target pattern.  For
+example, numerous rules make `.o' files: one, from a `.c' file with the
+C compiler; another, from a `.p' file with the Pascal compiler; and so
+on.  The rule that actually applies is the one whose prerequisites
+exist or can be made.  So, if you have a file `foo.c', `make' will run
+the C compiler; otherwise, if you have a file `foo.p', `make' will run
+the Pascal compiler; and so on.
+
+   Of course, when you write the makefile, you know which implicit rule
+you want `make' to use, and you know it will choose that one because you
+know which possible prerequisite files are supposed to exist.  *Note
+Catalogue of Implicit Rules: Catalogue of Rules, for a catalogue of all
+the predefined implicit rules.
+
+   Above, we said an implicit rule applies if the required
+prerequisites "exist or can be made".  A file "can be made" if it is
+mentioned explicitly in the makefile as a target or a prerequisite, or
+if an implicit rule can be recursively found for how to make it.  When
+an implicit prerequisite is the result of another implicit rule, we say
+that "chaining" is occurring.  *Note Chains of Implicit Rules: Chained
+Rules.
+
+   In general, `make' searches for an implicit rule for each target, and
+for each double-colon rule, that has no recipe.  A file that is
+mentioned only as a prerequisite is considered a target whose rule
+specifies nothing, so implicit rule search happens for it.  *Note
+Implicit Rule Search Algorithm: Implicit Rule Search, for the details
+of how the search is done.
+
+   Note that explicit prerequisites do not influence implicit rule
+search.  For example, consider this explicit rule:
+
+     foo.o: foo.p
+
+The prerequisite on `foo.p' does not necessarily mean that `make' will
+remake `foo.o' according to the implicit rule to make an object file, a
+`.o' file, from a Pascal source file, a `.p' file.  For example, if
+`foo.c' also exists, the implicit rule to make an object file from a C
+source file is used instead, because it appears before the Pascal rule
+in the list of predefined implicit rules (*note Catalogue of Implicit
+Rules: Catalogue of Rules.).
+
+   If you do not want an implicit rule to be used for a target that has
+no recipe, you can give that target an empty recipe by writing a
+semicolon (*note Defining Empty Recipes: Empty Recipes.).
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-2 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-2
new file mode 100644
index 0000000..cd95ff0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/make.info-2
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/standards.info b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/standards.info
new file mode 100644
index 0000000..be301b2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/info/standards.info
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/compile b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/compile
new file mode 100755
index 0000000..bac481c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/compile
@@ -0,0 +1,310 @@
+#! /bin/sh
+# Wrapper for compilers which do not understand `-c -o'.
+
+scriptversion=2010-11-15.09; # UTC
+
+# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2009, 2010 Free Software
+# Foundation, Inc.
+# Written by Tom Tromey <tromey@cygnus.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent tools from complaining about whitespace usage.
+IFS=" ""	$nl"
+
+file_conv=
+
+# func_file_conv build_file lazy
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Win32 hosts. If the determined conversion
+# type is listed in (the comma separated) LAZY, no conversion will
+# take place.
+func_file_conv ()
+{
+  file=$1
+  case $file in
+    / | /[!/]*) # absolute file, and not a UNC file
+      if test -z "$file_conv"; then
+	# lazily determine how to convert abs files
+	case `uname -s` in
+	  MINGW*)
+	    file_conv=mingw
+	    ;;
+	  CYGWIN*)
+	    file_conv=cygwin
+	    ;;
+	  *)
+	    file_conv=wine
+	    ;;
+	esac
+      fi
+      case $file_conv/,$2, in
+	*,$file_conv,*)
+	  ;;
+	mingw/*)
+	  file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+	  ;;
+	cygwin/*)
+	  file=`cygpath -m "$file" || echo "$file"`
+	  ;;
+	wine/*)
+	  file=`winepath -w "$file" || echo "$file"`
+	  ;;
+      esac
+      ;;
+  esac
+}
+
+# func_cl_wrapper cl arg...
+# Adjust compile command to suit cl
+func_cl_wrapper ()
+{
+  # Assume a capable shell
+  lib_path=
+  shared=:
+  linker_opts=
+  for arg
+  do
+    if test -n "$eat"; then
+      eat=
+    else
+      case $1 in
+	-o)
+	  # configure might choose to run compile as `compile cc -o foo foo.c'.
+	  eat=1
+	  case $2 in
+	    *.o | *.[oO][bB][jJ])
+	      func_file_conv "$2"
+	      set x "$@" -Fo"$file"
+	      shift
+	      ;;
+	    *)
+	      func_file_conv "$2"
+	      set x "$@" -Fe"$file"
+	      shift
+	      ;;
+	  esac
+	  ;;
+	-I*)
+	  func_file_conv "${1#-I}" mingw
+	  set x "$@" -I"$file"
+	  shift
+	  ;;
+	-l*)
+	  lib=${1#-l}
+	  found=no
+	  save_IFS=$IFS
+	  IFS=';'
+	  for dir in $lib_path $LIB
+	  do
+	    IFS=$save_IFS
+	    if $shared && test -f "$dir/$lib.dll.lib"; then
+	      found=yes
+	      set x "$@" "$dir/$lib.dll.lib"
+	      break
+	    fi
+	    if test -f "$dir/$lib.lib"; then
+	      found=yes
+	      set x "$@" "$dir/$lib.lib"
+	      break
+	    fi
+	  done
+	  IFS=$save_IFS
+
+	  test "$found" != yes && set x "$@" "$lib.lib"
+	  shift
+	  ;;
+	-L*)
+	  func_file_conv "${1#-L}"
+	  if test -z "$lib_path"; then
+	    lib_path=$file
+	  else
+	    lib_path="$lib_path;$file"
+	  fi
+	  linker_opts="$linker_opts -LIBPATH:$file"
+	  ;;
+	-static)
+	  shared=false
+	  ;;
+	-Wl,*)
+	  arg=${1#-Wl,}
+	  save_ifs="$IFS"; IFS=','
+	  for flag in $arg; do
+	    IFS="$save_ifs"
+	    linker_opts="$linker_opts $flag"
+	  done
+	  IFS="$save_ifs"
+	  ;;
+	-Xlinker)
+	  eat=1
+	  linker_opts="$linker_opts $2"
+	  ;;
+	-*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+	*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
+	  func_file_conv "$1"
+	  set x "$@" -Tp"$file"
+	  shift
+	  ;;
+	*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
+	  func_file_conv "$1" mingw
+	  set x "$@" "$file"
+	  shift
+	  ;;
+	*)
+	  set x "$@" "$1"
+	  shift
+	  ;;
+      esac
+    fi
+    shift
+  done
+  if test -n "$linker_opts"; then
+    linker_opts="-link$linker_opts"
+  fi
+  exec "$@" $linker_opts
+  exit 1
+}
+
+eat=
+
+case $1 in
+  '')
+     echo "$0: No command.  Try \`$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: compile [--help] [--version] PROGRAM [ARGS]
+
+Wrapper for compilers which do not understand `-c -o'.
+Remove `-o dest.o' from ARGS, run PROGRAM with the remaining
+arguments, and rename the output as expected.
+
+If you are trying to build a whole package this is not the
+right script to run: please start by reading the file `INSTALL'.
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "compile $scriptversion"
+    exit $?
+    ;;
+  cl | *[/\\]cl | cl.exe | *[/\\]cl.exe )
+    func_cl_wrapper "$@"      # Doesn't return...
+    ;;
+esac
+
+ofile=
+cfile=
+
+for arg
+do
+  if test -n "$eat"; then
+    eat=
+  else
+    case $1 in
+      -o)
+	# configure might choose to run compile as `compile cc -o foo foo.c'.
+	# So we strip `-o arg' only if arg is an object.
+	eat=1
+	case $2 in
+	  *.o | *.obj)
+	    ofile=$2
+	    ;;
+	  *)
+	    set x "$@" -o "$2"
+	    shift
+	    ;;
+	esac
+	;;
+      *.c)
+	cfile=$1
+	set x "$@" "$1"
+	shift
+	;;
+      *)
+	set x "$@" "$1"
+	shift
+	;;
+    esac
+  fi
+  shift
+done
+
+if test -z "$ofile" || test -z "$cfile"; then
+  # If no `-o' option was seen then we might have been invoked from a
+  # pattern rule where we don't need one.  That is ok -- this is a
+  # normal compilation that the losing compiler can handle.  If no
+  # `.c' file was seen then we are probably linking.  That is also
+  # ok.
+  exec "$@"
+fi
+
+# Name of file we expect compiler to create.
+cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
+
+# Create the lock directory.
+# Note: use `[/\\:.-]' here to ensure that we don't use the same name
+# that we are using for the .o file.  Also, base the name on the expected
+# object file name, since that is what matters with a parallel build.
+lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
+while true; do
+  if mkdir "$lockdir" >/dev/null 2>&1; then
+    break
+  fi
+  sleep 1
+done
+# FIXME: race condition here if user kills between mkdir and trap.
+trap "rmdir '$lockdir'; exit 1" 1 2 15
+
+# Run the compile.
+"$@"
+ret=$?
+
+if test -f "$cofile"; then
+  test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
+elif test -f "${cofile}bj"; then
+  test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
+fi
+
+rmdir "$lockdir"
+exit $ret
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.guess b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.guess
new file mode 100755
index 0000000..43f0cdb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.guess
@@ -0,0 +1,1519 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+#   2011 Free Software Foundation, Inc.
+
+timestamp='2011-10-01'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Originally written by Per Bothner.  Please send patches (context
+# diff format) to <config-patches@gnu.org> and include a ChangeLog
+# entry.
+#
+# This script attempts to guess a canonical system name similar to
+# config.sub.  If it succeeds, it prints the system name on stdout, and
+# exits with 0.  Otherwise, it exits with 1.
+#
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free
+Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,)    echo "int x;" > $dummy.c ;
+	for c in cc gcc c89 c99 ; do
+	  if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+	     CC_FOR_BUILD="$c"; break ;
+	  fi ;
+	done ;
+	if test x"$CC_FOR_BUILD" = x ; then
+	  CC_FOR_BUILD=no_compiler_found ;
+	fi
+	;;
+ ,,*)   CC_FOR_BUILD=$CC ;;
+ ,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null`  || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	sysctl="sysctl -n hw.machine_arch"
+	UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+	    /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+	case "${UNAME_MACHINE_ARCH}" in
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    sh5el) machine=sh5le-unknown ;;
+	    *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently, or will in the future.
+	case "${UNAME_MACHINE_ARCH}" in
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		eval $set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep -q __ELF__
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+		os=netbsd
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case "${UNAME_VERSION}" in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	echo "${machine}-${os}${release}"
+	exit ;;
+    *:OpenBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+	echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+	exit ;;
+    *:ekkoBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+	exit ;;
+    *:SolidBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
+	exit ;;
+    macppc:MirBSD:*:*)
+	echo powerpc-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    *:MirBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    alpha:OSF1:*:*)
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case "$ALPHA_CPU_TYPE" in
+	    "EV4 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE="alphaev5" ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE="alphaev56" ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE="alphapca56" ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE="alphapca57" ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE="alphaev6" ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE="alphaev67" ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE="alphaev69" ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE="alphaev7" ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE="alphaev79" ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	# Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+	exitcode=$?
+	trap '' 0
+	exit $exitcode ;;
+    Alpha\ *:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# Should we change UNAME_MACHINE based on the output of uname instead
+	# of the specific Alpha model?
+	echo alpha-pc-interix
+	exit ;;
+    21064:Windows_NT:50:3)
+	echo alpha-dec-winnt3.5
+	exit ;;
+    Amiga*:UNIX_System_V:4.0:*)
+	echo m68k-unknown-sysv4
+	exit ;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-amigaos
+	exit ;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-morphos
+	exit ;;
+    *:OS/390:*:*)
+	echo i370-ibm-openedition
+	exit ;;
+    *:z/VM:*:*)
+	echo s390-ibm-zvmoe
+	exit ;;
+    *:OS400:*:*)
+	echo powerpc-ibm-os400
+	exit ;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	echo arm-acorn-riscix${UNAME_RELEASE}
+	exit ;;
+    arm:riscos:*:*|arm:RISCOS:*:*)
+	echo arm-unknown-riscos
+	exit ;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	echo hppa1.1-hitachi-hiuxmpp
+	exit ;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	if test "`(/bin/universe) 2>/dev/null`" = att ; then
+		echo pyramid-pyramid-sysv3
+	else
+		echo pyramid-pyramid-bsd
+	fi
+	exit ;;
+    NILE*:*:*:dcosx)
+	echo pyramid-pyramid-svr4
+	exit ;;
+    DRS?6000:unix:4.0:6*)
+	echo sparc-icl-nx6
+	exit ;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) echo sparc-icl-nx7; exit ;;
+	esac ;;
+    s390x:SunOS:*:*)
+	echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4H:SunOS:5.*:*)
+	echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+	echo i386-pc-auroraux${UNAME_RELEASE}
+	exit ;;
+    i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+	eval $set_cc_for_build
+	SUN_ARCH="i386"
+	# If there is a compiler, see if it is configured for 64-bit objects.
+	# Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+	# This test works for both compilers.
+	if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+	    if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+		(CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		grep IS_64BIT_ARCH >/dev/null
+	    then
+		SUN_ARCH="x86_64"
+	    fi
+	fi
+	echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:*:*)
+	case "`/usr/bin/arch -k`" in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+	exit ;;
+    sun3*:SunOS:*:*)
+	echo m68k-sun-sunos${UNAME_RELEASE}
+	exit ;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+	case "`/bin/arch`" in
+	    sun3)
+		echo m68k-sun-sunos${UNAME_RELEASE}
+		;;
+	    sun4)
+		echo sparc-sun-sunos${UNAME_RELEASE}
+		;;
+	esac
+	exit ;;
+    aushp:SunOS:*:*)
+	echo sparc-auspex-sunos${UNAME_RELEASE}
+	exit ;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+	echo m68k-milan-mint${UNAME_RELEASE}
+	exit ;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+	echo m68k-hades-mint${UNAME_RELEASE}
+	exit ;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+	echo m68k-unknown-mint${UNAME_RELEASE}
+	exit ;;
+    m68k:machten:*:*)
+	echo m68k-apple-machten${UNAME_RELEASE}
+	exit ;;
+    powerpc:machten:*:*)
+	echo powerpc-apple-machten${UNAME_RELEASE}
+	exit ;;
+    RISC*:Mach:*:*)
+	echo mips-dec-mach_bsd4.3
+	exit ;;
+    RISC*:ULTRIX:*:*)
+	echo mips-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    VAX*:ULTRIX*:*:*)
+	echo vax-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	echo clipper-intergraph-clix${UNAME_RELEASE}
+	exit ;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c &&
+	  dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+	  SYSTEM_NAME=`$dummy $dummyarg` &&
+	    { echo "$SYSTEM_NAME"; exit; }
+	echo mips-mips-riscos${UNAME_RELEASE}
+	exit ;;
+    Motorola:PowerMAX_OS:*:*)
+	echo powerpc-motorola-powermax
+	exit ;;
+    Motorola:*:4.3:PL8-*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:Power_UNIX:*:*)
+	echo powerpc-harris-powerunix
+	exit ;;
+    m88k:CX/UX:7*:*)
+	echo m88k-harris-cxux7
+	exit ;;
+    m88k:*:4*:R4*)
+	echo m88k-motorola-sysv4
+	exit ;;
+    m88k:*:3*:R3*)
+	echo m88k-motorola-sysv3
+	exit ;;
+    AViiON:dgux:*:*)
+	# DG/UX returns AViiON for all architectures
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+	then
+	    if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+	       [ ${TARGET_BINARY_INTERFACE}x = x ]
+	    then
+		echo m88k-dg-dgux${UNAME_RELEASE}
+	    else
+		echo m88k-dg-dguxbcs${UNAME_RELEASE}
+	    fi
+	else
+	    echo i586-dg-dgux${UNAME_RELEASE}
+	fi
+	exit ;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	echo m88k-dolphin-sysv3
+	exit ;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	echo m88k-motorola-sysv3
+	exit ;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	echo m88k-tektronix-sysv3
+	exit ;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	echo m68k-tektronix-bsd
+	exit ;;
+    *:IRIX*:*:*)
+	echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+	exit ;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	echo romp-ibm-aix     # uname -m gives an 8 hex-code CPU id
+	exit ;;               # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	echo i386-ibm-aix
+	exit ;;
+    ia64:AIX:*:*)
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		eval $set_cc_for_build
+		sed 's/^		//' << EOF >$dummy.c
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
+		then
+			echo "$SYSTEM_NAME"
+		else
+			echo rs6000-ibm-aix3.2.5
+		fi
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		echo rs6000-ibm-aix3.2.4
+	else
+		echo rs6000-ibm-aix3.2
+	fi
+	exit ;;
+    *:AIX:*:[4567])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:*:*)
+	echo rs6000-ibm-aix
+	exit ;;
+    ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+	echo romp-ibm-bsd4.4
+	exit ;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	echo romp-ibm-bsd${UNAME_RELEASE}   # 4.3 with uname added to
+	exit ;;                             # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	echo rs6000-bull-bosx
+	exit ;;
+    DPX/2?00:B.O.S.:*:*)
+	echo m68k-bull-sysv3
+	exit ;;
+    9000/[34]??:4.3bsd:1.*:*)
+	echo m68k-hp-bsd
+	exit ;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	echo m68k-hp-bsd4.4
+	exit ;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	case "${UNAME_MACHINE}" in
+	    9000/31? )            HP_ARCH=m68000 ;;
+	    9000/[34]?? )         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if [ -x /usr/bin/getconf ]; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+		    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+		    case "${sc_cpu_version}" in
+		      523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+		      528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+		      532)                      # CPU_PA_RISC2_0
+			case "${sc_kernel_bits}" in
+			  32) HP_ARCH="hppa2.0n" ;;
+			  64) HP_ARCH="hppa2.0w" ;;
+			  '') HP_ARCH="hppa2.0" ;;   # HP-UX 10.20
+			esac ;;
+		    esac
+		fi
+		if [ "${HP_ARCH}" = "" ]; then
+		    eval $set_cc_for_build
+		    sed 's/^		//' << EOF >$dummy.c
+
+		#define _HPUX_SOURCE
+		#include <stdlib.h>
+		#include <unistd.h>
+
+		int main ()
+		{
+		#if defined(_SC_KERNEL_BITS)
+		    long bits = sysconf(_SC_KERNEL_BITS);
+		#endif
+		    long cpu  = sysconf (_SC_CPU_VERSION);
+
+		    switch (cpu)
+			{
+			case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+			case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+			case CPU_PA_RISC2_0:
+		#if defined(_SC_KERNEL_BITS)
+			    switch (bits)
+				{
+				case 64: puts ("hppa2.0w"); break;
+				case 32: puts ("hppa2.0n"); break;
+				default: puts ("hppa2.0"); break;
+				} break;
+		#else  /* !defined(_SC_KERNEL_BITS) */
+			    puts ("hppa2.0"); break;
+		#endif
+			default: puts ("hppa1.0"); break;
+			}
+		    exit (0);
+		}
+EOF
+		    (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if [ ${HP_ARCH} = "hppa2.0w" ]
+	then
+	    eval $set_cc_for_build
+
+	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
+	    # generating 64-bit code.  GNU and HP use different nomenclature:
+	    #
+	    # $ CC_FOR_BUILD=cc ./config.guess
+	    # => hppa2.0w-hp-hpux11.23
+	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+	    # => hppa64-hp-hpux11.23
+
+	    if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
+		grep -q __LP64__
+	    then
+		HP_ARCH="hppa2.0w"
+	    else
+		HP_ARCH="hppa64"
+	    fi
+	fi
+	echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+	exit ;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	echo ia64-hp-hpux${HPUX_REV}
+	exit ;;
+    3050*:HI-UX:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
+		{ echo "$SYSTEM_NAME"; exit; }
+	echo unknown-hitachi-hiuxwe2
+	exit ;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+	echo hppa1.1-hp-bsd
+	exit ;;
+    9000/8??:4.3bsd:*:*)
+	echo hppa1.0-hp-bsd
+	exit ;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	echo hppa1.0-hp-mpeix
+	exit ;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+	echo hppa1.1-hp-osf
+	exit ;;
+    hp8??:OSF1:*:*)
+	echo hppa1.0-hp-osf
+	exit ;;
+    i*86:OSF1:*:*)
+	if [ -x /usr/sbin/sysversion ] ; then
+	    echo ${UNAME_MACHINE}-unknown-osf1mk
+	else
+	    echo ${UNAME_MACHINE}-unknown-osf1
+	fi
+	exit ;;
+    parisc*:Lites*:*:*)
+	echo hppa1.1-hp-lites
+	exit ;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	echo c1-convex-bsd
+	exit ;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	echo c34-convex-bsd
+	exit ;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	echo c38-convex-bsd
+	exit ;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	echo c4-convex-bsd
+	exit ;;
+    CRAY*Y-MP:*:*:*)
+	echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*[A-Z]90:*:*:*)
+	echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*TS:*:*:*)
+	echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*T3E:*:*:*)
+	echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*SV1:*:*:*)
+	echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    *:UNICOS/mp:*:*)
+	echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+	echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    5000:UNIX_System_V:4.*:*)
+	FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+	FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+	echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+	exit ;;
+    sparc*:BSD/OS:*:*)
+	echo sparc-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:BSD/OS:*:*)
+	echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:FreeBSD:*:*)
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	case ${UNAME_PROCESSOR} in
+	    amd64)
+		echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	    *)
+		echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	esac
+	exit ;;
+    i*:CYGWIN*:*)
+	echo ${UNAME_MACHINE}-pc-cygwin
+	exit ;;
+    *:MINGW*:*)
+	echo ${UNAME_MACHINE}-pc-mingw32
+	exit ;;
+    i*:windows32*:*)
+	# uname -m includes "-pc" on this system.
+	echo ${UNAME_MACHINE}-mingw32
+	exit ;;
+    i*:PW*:*)
+	echo ${UNAME_MACHINE}-pc-pw32
+	exit ;;
+    *:Interix*:*)
+	case ${UNAME_MACHINE} in
+	    x86)
+		echo i586-pc-interix${UNAME_RELEASE}
+		exit ;;
+	    authenticamd | genuineintel | EM64T)
+		echo x86_64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	    IA64)
+		echo ia64-unknown-interix${UNAME_RELEASE}
+		exit ;;
+	esac ;;
+    [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+	echo i${UNAME_MACHINE}-pc-mks
+	exit ;;
+    8664:Windows_NT:*)
+	echo x86_64-pc-mks
+	exit ;;
+    i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+	# UNAME_MACHINE based on the output of uname instead of i386?
+	echo i586-pc-interix
+	exit ;;
+    i*:UWIN*:*)
+	echo ${UNAME_MACHINE}-pc-uwin
+	exit ;;
+    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+	echo x86_64-unknown-cygwin
+	exit ;;
+    p*:CYGWIN*:*)
+	echo powerpcle-unknown-cygwin
+	exit ;;
+    prep*:SunOS:5.*:*)
+	echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    *:GNU:*:*)
+	# the GNU system
+	echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+	exit ;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
+	exit ;;
+    i*86:Minix:*:*)
+	echo ${UNAME_MACHINE}-pc-minix
+	exit ;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+	esac
+	objdump --private-headers /bin/sh | grep -q ld.so.1
+	if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+	echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
+	exit ;;
+    arm*:Linux:*:*)
+	eval $set_cc_for_build
+	if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+	    | grep -q __ARM_EABI__
+	then
+	    echo ${UNAME_MACHINE}-unknown-linux-gnu
+	else
+	    if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+		| grep -q __ARM_PCS_VFP
+	    then
+		echo ${UNAME_MACHINE}-unknown-linux-gnueabi
+	    else
+		echo ${UNAME_MACHINE}-unknown-linux-gnueabihf
+	    fi
+	fi
+	exit ;;
+    avr32*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    cris:Linux:*:*)
+	echo cris-axis-linux-gnu
+	exit ;;
+    crisv32:Linux:*:*)
+	echo crisv32-axis-linux-gnu
+	exit ;;
+    frv:Linux:*:*)
+	echo frv-unknown-linux-gnu
+	exit ;;
+    hexagon:Linux:*:*)
+	echo hexagon-unknown-linux-gnu
+	exit ;;
+    i*86:Linux:*:*)
+	LIBC=gnu
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#ifdef __dietlibc__
+	LIBC=dietlibc
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
+	echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
+	exit ;;
+    ia64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m32r*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m68*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    mips:Linux:*:* | mips64:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef ${UNAME_MACHINE}
+	#undef ${UNAME_MACHINE}el
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=${UNAME_MACHINE}el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=${UNAME_MACHINE}
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
+	test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
+	;;
+    or32:Linux:*:*)
+	echo or32-unknown-linux-gnu
+	exit ;;
+    padre:Linux:*:*)
+	echo sparc-unknown-linux-gnu
+	exit ;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	echo hppa64-unknown-linux-gnu
+	exit ;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) echo hppa1.1-unknown-linux-gnu ;;
+	  PA8*) echo hppa2.0-unknown-linux-gnu ;;
+	  *)    echo hppa-unknown-linux-gnu ;;
+	esac
+	exit ;;
+    ppc64:Linux:*:*)
+	echo powerpc64-unknown-linux-gnu
+	exit ;;
+    ppc:Linux:*:*)
+	echo powerpc-unknown-linux-gnu
+	exit ;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	echo ${UNAME_MACHINE}-ibm-linux
+	exit ;;
+    sh64*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sh*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    tile*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    vax:Linux:*:*)
+	echo ${UNAME_MACHINE}-dec-linux-gnu
+	exit ;;
+    x86_64:Linux:*:*)
+	echo x86_64-unknown-linux-gnu
+	exit ;;
+    xtensa*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	echo i386-sequent-sysv4
+	exit ;;
+    i*86:UNIX_SV:4.2MP:2.*)
+	# Unixware is an offshoot of SVR4, but it has its own version
+	# number series starting with 2...
+	# I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+	# Use sysv4.2uw... so that sysv4* matches it.
+	echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+	exit ;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	echo ${UNAME_MACHINE}-pc-os2-emx
+	exit ;;
+    i*86:XTS-300:*:STOP)
+	echo ${UNAME_MACHINE}-unknown-stop
+	exit ;;
+    i*86:atheos:*:*)
+	echo ${UNAME_MACHINE}-unknown-atheos
+	exit ;;
+    i*86:syllable:*:*)
+	echo ${UNAME_MACHINE}-pc-syllable
+	exit ;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+	echo i386-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    i*86:*DOS:*:*)
+	echo ${UNAME_MACHINE}-pc-msdosdjgpp
+	exit ;;
+    i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+	UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+	else
+		echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+	fi
+	exit ;;
+    i*86:*:5:[678]*)
+	# UnixWare 7.x, OpenUNIX and OpenServer 6.
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	exit ;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+	else
+		echo ${UNAME_MACHINE}-pc-sysv32
+	fi
+	exit ;;
+    pc:*:*:*)
+	# Left here for compatibility:
+	# uname -m prints for DJGPP always 'pc', but it prints nothing about
+	# the processor, so we play safe by assuming i586.
+	# Note: whatever this is, it MUST be the same as what config.sub
+	# prints for the "djgpp" host, or else GDB configury will decide that
+	# this is a cross-build.
+	echo i586-pc-msdosdjgpp
+	exit ;;
+    Intel:Mach:3*:*)
+	echo i386-pc-mach3
+	exit ;;
+    paragon:*:*:*)
+	echo i860-intel-osf1
+	exit ;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  echo i860-unknown-sysv${UNAME_RELEASE}  # Unknown i860-SVR4
+	fi
+	exit ;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	echo m68010-convergent-sysv
+	exit ;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	echo m68k-convergent-sysv
+	exit ;;
+    M680?0:D-NIX:5.3:*)
+	echo m68k-diab-dnix
+	exit ;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4; exit; } ;;
+    NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+	OS_REL='.3'
+	test -r /etc/.relid \
+	    && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	    && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+	    && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	echo m68k-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    mc68030:UNIX_System_V:4.*:*)
+	echo m68k-atari-sysv4
+	exit ;;
+    TSUNAMI:LynxOS:2.*:*)
+	echo sparc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    rs6000:LynxOS:2.*:*)
+	echo rs6000-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+	echo powerpc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    SM[BE]S:UNIX_SV:*:*)
+	echo mips-dde-sysv${UNAME_RELEASE}
+	exit ;;
+    RM*:ReliantUNIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    RM*:SINIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		echo ${UNAME_MACHINE}-sni-sysv4
+	else
+		echo ns32k-sni-sysv
+	fi
+	exit ;;
+    PENTIUM:*:4.0*:*)	# Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+			# says <Richard.M.Bartel@ccMail.Census.GOV>
+	echo i586-unisys-sysv4
+	exit ;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes@openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	echo hppa1.1-stratus-sysv4
+	exit ;;
+    *:*:*:FTX*)
+	# From seanf@swdc.stratus.com.
+	echo i860-stratus-sysv4
+	exit ;;
+    i*86:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo ${UNAME_MACHINE}-stratus-vos
+	exit ;;
+    *:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	echo hppa1.1-stratus-vos
+	exit ;;
+    mc68*:A/UX:*:*)
+	echo m68k-apple-aux${UNAME_RELEASE}
+	exit ;;
+    news*:NEWS-OS:6*:*)
+	echo mips-sony-newsos6
+	exit ;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if [ -d /usr/nec ]; then
+		echo mips-nec-sysv${UNAME_RELEASE}
+	else
+		echo mips-unknown-sysv${UNAME_RELEASE}
+	fi
+	exit ;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	echo powerpc-be-beos
+	exit ;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	echo powerpc-apple-beos
+	exit ;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	echo i586-pc-beos
+	exit ;;
+    BePC:Haiku:*:*)	# Haiku running on Intel PC compatible.
+	echo i586-pc-haiku
+	exit ;;
+    SX-4:SUPER-UX:*:*)
+	echo sx4-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-5:SUPER-UX:*:*)
+	echo sx5-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-6:SUPER-UX:*:*)
+	echo sx6-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-7:SUPER-UX:*:*)
+	echo sx7-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8:SUPER-UX:*:*)
+	echo sx8-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-8R:SUPER-UX:*:*)
+	echo sx8r-nec-superux${UNAME_RELEASE}
+	exit ;;
+    Power*:Rhapsody:*:*)
+	echo powerpc-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Rhapsody:*:*)
+	echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+	case $UNAME_PROCESSOR in
+	    i386)
+		eval $set_cc_for_build
+		if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+		  if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+		      (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+		      grep IS_64BIT_ARCH >/dev/null
+		  then
+		      UNAME_PROCESSOR="x86_64"
+		  fi
+		fi ;;
+	    unknown) UNAME_PROCESSOR=powerpc ;;
+	esac
+	echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+	exit ;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = "x86"; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+	exit ;;
+    *:QNX:*:4*)
+	echo i386-pc-qnx
+	exit ;;
+    NEO-?:NONSTOP_KERNEL:*:*)
+	echo neo-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSE-?:NONSTOP_KERNEL:*:*)
+	echo nse-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSR-?:NONSTOP_KERNEL:*:*)
+	echo nsr-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    *:NonStop-UX:*:*)
+	echo mips-compaq-nonstopux
+	exit ;;
+    BS2000:POSIX*:*:*)
+	echo bs2000-siemens-sysv
+	exit ;;
+    DS/*:UNIX_System_V:*:*)
+	echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+	exit ;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "$cputype" = "386"; then
+	    UNAME_MACHINE=i386
+	else
+	    UNAME_MACHINE="$cputype"
+	fi
+	echo ${UNAME_MACHINE}-unknown-plan9
+	exit ;;
+    *:TOPS-10:*:*)
+	echo pdp10-unknown-tops10
+	exit ;;
+    *:TENEX:*:*)
+	echo pdp10-unknown-tenex
+	exit ;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	echo pdp10-dec-tops20
+	exit ;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	echo pdp10-xkl-tops20
+	exit ;;
+    *:TOPS-20:*:*)
+	echo pdp10-unknown-tops20
+	exit ;;
+    *:ITS:*:*)
+	echo pdp10-unknown-its
+	exit ;;
+    SEI:*:*:SEIUX)
+	echo mips-sei-seiux${UNAME_RELEASE}
+	exit ;;
+    *:DragonFly:*:*)
+	echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit ;;
+    *:*VMS:*:*)
+	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case "${UNAME_MACHINE}" in
+	    A*) echo alpha-dec-vms ; exit ;;
+	    I*) echo ia64-dec-vms ; exit ;;
+	    V*) echo vax-dec-vms ; exit ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	echo i386-pc-xenix
+	exit ;;
+    i*86:skyos:*:*)
+	echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
+	exit ;;
+    i*86:rdos:*:*)
+	echo ${UNAME_MACHINE}-pc-rdos
+	exit ;;
+    i*86:AROS:*:*)
+	echo ${UNAME_MACHINE}-pc-aros
+	exit ;;
+esac
+
+#echo '(No uname command or uname output not recognized.)' 1>&2
+#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+	"4"
+#else
+	""
+#endif
+	); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+  printf ("arm-acorn-riscix\n"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+  printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+    struct utsname un;
+
+    uname(&un);
+
+    if (strncmp(un.version, "V2", 2) == 0) {
+	printf ("i386-sequent-ptx2\n"); exit (0);
+    }
+    if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+	printf ("i386-sequent-ptx1\n"); exit (0);
+    }
+    printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+#  include <sys/param.h>
+#  if defined (BSD)
+#   if BSD == 43
+      printf ("vax-dec-bsd4.3\n"); exit (0);
+#   else
+#    if BSD == 199006
+      printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#    else
+      printf ("vax-dec-bsd\n"); exit (0);
+#    endif
+#   endif
+#  else
+    printf ("vax-dec-bsd\n"); exit (0);
+#  endif
+# else
+    printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+	{ echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+    case `getsysinfo -f cpu_type` in
+    c1*)
+	echo c1-convex-bsd
+	exit ;;
+    c2*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    c34*)
+	echo c34-convex-bsd
+	exit ;;
+    c38*)
+	echo c38-convex-bsd
+	exit ;;
+    c4*)
+	echo c4-convex-bsd
+	exit ;;
+    esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+and
+  http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches@gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM  = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.sub b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.sub
new file mode 100755
index 0000000..5b87368
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/config.sub
@@ -0,0 +1,1767 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+#   2011 Free Software Foundation, Inc.
+
+timestamp='2011-10-08'
+
+# This file is (in principle) common to ALL GNU software.
+# The presence of a machine in this file suggests that SOME GNU software
+# can handle that machine.  It does not imply ALL GNU software can.
+#
+# This file is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Please send patches to <config-patches@gnu.org>.  Submit a context
+# diff and a properly formatted GNU ChangeLog entry.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+       $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free
+Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help"
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo $1
+       exit ;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+  nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \
+  linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
+  knetbsd*-gnu* | netbsd*-gnu* | \
+  kopensolaris*-gnu* | \
+  storm-chaos* | os2-emx* | rtmk-nova*)
+    os=-$maybe_os
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+    ;;
+  *)
+    basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+    if [ $basic_machine != $1 ]
+    then os=`echo $1 | sed 's/.*-/-/'`
+    else os=; fi
+    ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work.  We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+	-sun*os*)
+		# Prevent following clause from handling this invalid input.
+		;;
+	-dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+	-att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+	-unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+	-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+	-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+	-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+	-apple | -axis | -knuth | -cray | -microblaze)
+		os=
+		basic_machine=$1
+		;;
+	-bluegene*)
+		os=-cnk
+		;;
+	-sim | -cisco | -oki | -wec | -winbond)
+		os=
+		basic_machine=$1
+		;;
+	-scout)
+		;;
+	-wrs)
+		os=-vxworks
+		basic_machine=$1
+		;;
+	-chorusos*)
+		os=-chorusos
+		basic_machine=$1
+		;;
+	-chorusrdb)
+		os=-chorusrdb
+		basic_machine=$1
+		;;
+	-hiux*)
+		os=-hiuxwe2
+		;;
+	-sco6)
+		os=-sco5v6
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5)
+		os=-sco3.2v5
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco4)
+		os=-sco3.2v4
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2.[4-9]*)
+		os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2v[4-9]*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5v6*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco*)
+		os=-sco3.2v2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-udk*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-isc)
+		os=-isc2.2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-clix*)
+		basic_machine=clipper-intergraph
+		;;
+	-isc*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-lynx*)
+		os=-lynxos
+		;;
+	-ptx*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+		;;
+	-windowsnt*)
+		os=`echo $os | sed -e 's/windowsnt/winnt/'`
+		;;
+	-psos*)
+		os=-psos
+		;;
+	-mint | -mint[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+	# Recognize the basic CPU types without company name.
+	# Some are omitted here because they have special meanings below.
+	1750a | 580 \
+	| a29k \
+	| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+	| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+	| am33_2.0 \
+	| arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
+        | be32 | be64 \
+	| bfin \
+	| c4x | clipper \
+	| d10v | d30v | dlx | dsp16xx \
+	| epiphany \
+	| fido | fr30 | frv \
+	| h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+	| hexagon \
+	| i370 | i860 | i960 | ia64 \
+	| ip2k | iq2000 \
+	| le32 | le64 \
+	| lm32 \
+	| m32c | m32r | m32rle | m68000 | m68k | m88k \
+	| maxq | mb | microblaze | mcore | mep | metag \
+	| mips | mipsbe | mipseb | mipsel | mipsle \
+	| mips16 \
+	| mips64 | mips64el \
+	| mips64octeon | mips64octeonel \
+	| mips64orion | mips64orionel \
+	| mips64r5900 | mips64r5900el \
+	| mips64vr | mips64vrel \
+	| mips64vr4100 | mips64vr4100el \
+	| mips64vr4300 | mips64vr4300el \
+	| mips64vr5000 | mips64vr5000el \
+	| mips64vr5900 | mips64vr5900el \
+	| mipsisa32 | mipsisa32el \
+	| mipsisa32r2 | mipsisa32r2el \
+	| mipsisa64 | mipsisa64el \
+	| mipsisa64r2 | mipsisa64r2el \
+	| mipsisa64sb1 | mipsisa64sb1el \
+	| mipsisa64sr71k | mipsisa64sr71kel \
+	| mipstx39 | mipstx39el \
+	| mn10200 | mn10300 \
+	| moxie \
+	| mt \
+	| msp430 \
+	| nds32 | nds32le | nds32be \
+	| nios | nios2 \
+	| ns16k | ns32k \
+	| open8 \
+	| or32 \
+	| pdp10 | pdp11 | pj | pjl \
+	| powerpc | powerpc64 | powerpc64le | powerpcle \
+	| pyramid \
+	| rx \
+	| score \
+	| sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+	| sh64 | sh64le \
+	| sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+	| sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+	| spu \
+	| tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \
+	| ubicom32 \
+	| v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \
+	| we32k \
+	| x86 | xc16x | xstormy16 | xtensa \
+	| z8k | z80)
+		basic_machine=$basic_machine-unknown
+		;;
+	c54x)
+		basic_machine=tic54x-unknown
+		;;
+	c55x)
+		basic_machine=tic55x-unknown
+		;;
+	c6x)
+		basic_machine=tic6x-unknown
+		;;
+	m6811 | m68hc11 | m6812 | m68hc12 | picochip)
+		# Motorola 68HC11/12.
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+		;;
+	ms1)
+		basic_machine=mt-unknown
+		;;
+
+	strongarm | thumb | xscale)
+		basic_machine=arm-unknown
+		;;
+
+	xscaleeb)
+		basic_machine=armeb-unknown
+		;;
+
+	xscaleel)
+		basic_machine=armel-unknown
+		;;
+
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+	  basic_machine=$basic_machine-pc
+	  ;;
+	# Object if more than one company name word.
+	*-*-*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+	# Recognize the basic CPU types with company name.
+	580-* \
+	| a29k-* \
+	| alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+	| alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+	| alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
+	| arm-*  | armbe-* | armle-* | armeb-* | armv*-* \
+	| avr-* | avr32-* \
+	| be32-* | be64-* \
+	| bfin-* | bs2000-* \
+	| c[123]* | c30-* | [cjt]90-* | c4x-* \
+	| clipper-* | craynv-* | cydra-* \
+	| d10v-* | d30v-* | dlx-* \
+	| elxsi-* \
+	| f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
+	| h8300-* | h8500-* \
+	| hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+	| hexagon-* \
+	| i*86-* | i860-* | i960-* | ia64-* \
+	| ip2k-* | iq2000-* \
+	| le32-* | le64-* \
+	| lm32-* \
+	| m32c-* | m32r-* | m32rle-* \
+	| m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+	| m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \
+	| mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+	| mips16-* \
+	| mips64-* | mips64el-* \
+	| mips64octeon-* | mips64octeonel-* \
+	| mips64orion-* | mips64orionel-* \
+	| mips64r5900-* | mips64r5900el-* \
+	| mips64vr-* | mips64vrel-* \
+	| mips64vr4100-* | mips64vr4100el-* \
+	| mips64vr4300-* | mips64vr4300el-* \
+	| mips64vr5000-* | mips64vr5000el-* \
+	| mips64vr5900-* | mips64vr5900el-* \
+	| mipsisa32-* | mipsisa32el-* \
+	| mipsisa32r2-* | mipsisa32r2el-* \
+	| mipsisa64-* | mipsisa64el-* \
+	| mipsisa64r2-* | mipsisa64r2el-* \
+	| mipsisa64sb1-* | mipsisa64sb1el-* \
+	| mipsisa64sr71k-* | mipsisa64sr71kel-* \
+	| mipstx39-* | mipstx39el-* \
+	| mmix-* \
+	| mt-* \
+	| msp430-* \
+	| nds32-* | nds32le-* | nds32be-* \
+	| nios-* | nios2-* \
+	| none-* | np1-* | ns16k-* | ns32k-* \
+	| open8-* \
+	| orion-* \
+	| pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+	| powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \
+	| pyramid-* \
+	| romp-* | rs6000-* | rx-* \
+	| sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+	| shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+	| sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+	| sparclite-* \
+	| sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \
+	| tahoe-* \
+	| tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+	| tile*-* \
+	| tron-* \
+	| ubicom32-* \
+	| v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \
+	| vax-* \
+	| we32k-* \
+	| x86-* | x86_64-* | xc16x-* | xps100-* \
+	| xstormy16-* | xtensa*-* \
+	| ymp-* \
+	| z8k-* | z80-*)
+		;;
+	# Recognize the basic CPU types without company name, with glob match.
+	xtensa*)
+		basic_machine=$basic_machine-unknown
+		;;
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	386bsd)
+		basic_machine=i386-unknown
+		os=-bsd
+		;;
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		basic_machine=m68000-att
+		;;
+	3b*)
+		basic_machine=we32k-att
+		;;
+	a29khif)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	abacus)
+		basic_machine=abacus-unknown
+		;;
+	adobe68k)
+		basic_machine=m68010-adobe
+		os=-scout
+		;;
+	alliant | fx80)
+		basic_machine=fx80-alliant
+		;;
+	altos | altos3068)
+		basic_machine=m68k-altos
+		;;
+	am29k)
+		basic_machine=a29k-none
+		os=-bsd
+		;;
+	amd64)
+		basic_machine=x86_64-pc
+		;;
+	amd64-*)
+		basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	amdahl)
+		basic_machine=580-amdahl
+		os=-sysv
+		;;
+	amiga | amiga-*)
+		basic_machine=m68k-unknown
+		;;
+	amigaos | amigados)
+		basic_machine=m68k-unknown
+		os=-amigaos
+		;;
+	amigaunix | amix)
+		basic_machine=m68k-unknown
+		os=-sysv4
+		;;
+	apollo68)
+		basic_machine=m68k-apollo
+		os=-sysv
+		;;
+	apollo68bsd)
+		basic_machine=m68k-apollo
+		os=-bsd
+		;;
+	aros)
+		basic_machine=i386-pc
+		os=-aros
+		;;
+	aux)
+		basic_machine=m68k-apple
+		os=-aux
+		;;
+	balance)
+		basic_machine=ns32k-sequent
+		os=-dynix
+		;;
+	blackfin)
+		basic_machine=bfin-unknown
+		os=-linux
+		;;
+	blackfin-*)
+		basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	bluegene*)
+		basic_machine=powerpc-ibm
+		os=-cnk
+		;;
+	c54x-*)
+		basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c55x-*)
+		basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c6x-*)
+		basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	c90)
+		basic_machine=c90-cray
+		os=-unicos
+		;;
+	cegcc)
+		basic_machine=arm-unknown
+		os=-cegcc
+		;;
+	convex-c1)
+		basic_machine=c1-convex
+		os=-bsd
+		;;
+	convex-c2)
+		basic_machine=c2-convex
+		os=-bsd
+		;;
+	convex-c32)
+		basic_machine=c32-convex
+		os=-bsd
+		;;
+	convex-c34)
+		basic_machine=c34-convex
+		os=-bsd
+		;;
+	convex-c38)
+		basic_machine=c38-convex
+		os=-bsd
+		;;
+	cray | j90)
+		basic_machine=j90-cray
+		os=-unicos
+		;;
+	craynv)
+		basic_machine=craynv-cray
+		os=-unicosmp
+		;;
+	cr16 | cr16-*)
+		basic_machine=cr16-unknown
+		os=-elf
+		;;
+	crds | unos)
+		basic_machine=m68k-crds
+		;;
+	crisv32 | crisv32-* | etraxfs*)
+		basic_machine=crisv32-axis
+		;;
+	cris | cris-* | etrax*)
+		basic_machine=cris-axis
+		;;
+	crx)
+		basic_machine=crx-unknown
+		os=-elf
+		;;
+	da30 | da30-*)
+		basic_machine=m68k-da30
+		;;
+	decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+		basic_machine=mips-dec
+		;;
+	decsystem10* | dec10*)
+		basic_machine=pdp10-dec
+		os=-tops10
+		;;
+	decsystem20* | dec20*)
+		basic_machine=pdp10-dec
+		os=-tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		basic_machine=m68k-motorola
+		;;
+	delta88)
+		basic_machine=m88k-motorola
+		os=-sysv3
+		;;
+	dicos)
+		basic_machine=i686-pc
+		os=-dicos
+		;;
+	djgpp)
+		basic_machine=i586-pc
+		os=-msdosdjgpp
+		;;
+	dpx20 | dpx20-*)
+		basic_machine=rs6000-bull
+		os=-bosx
+		;;
+	dpx2* | dpx2*-bull)
+		basic_machine=m68k-bull
+		os=-sysv3
+		;;
+	ebmon29k)
+		basic_machine=a29k-amd
+		os=-ebmon
+		;;
+	elxsi)
+		basic_machine=elxsi-elxsi
+		os=-bsd
+		;;
+	encore | umax | mmax)
+		basic_machine=ns32k-encore
+		;;
+	es1800 | OSE68k | ose68k | ose | OSE)
+		basic_machine=m68k-ericsson
+		os=-ose
+		;;
+	fx2800)
+		basic_machine=i860-alliant
+		;;
+	genix)
+		basic_machine=ns32k-ns
+		;;
+	gmicro)
+		basic_machine=tron-gmicro
+		os=-sysv
+		;;
+	go32)
+		basic_machine=i386-pc
+		os=-go32
+		;;
+	h3050r* | hiux*)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	h8300hms)
+		basic_machine=h8300-hitachi
+		os=-hms
+		;;
+	h8300xray)
+		basic_machine=h8300-hitachi
+		os=-xray
+		;;
+	h8500hms)
+		basic_machine=h8500-hitachi
+		os=-hms
+		;;
+	harris)
+		basic_machine=m88k-harris
+		os=-sysv3
+		;;
+	hp300-*)
+		basic_machine=m68k-hp
+		;;
+	hp300bsd)
+		basic_machine=m68k-hp
+		os=-bsd
+		;;
+	hp300hpux)
+		basic_machine=m68k-hp
+		os=-hpux
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		basic_machine=m68000-hp
+		;;
+	hp9k3[2-9][0-9])
+		basic_machine=m68k-hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hppa-next)
+		os=-nextstep3
+		;;
+	hppaosf)
+		basic_machine=hppa1.1-hp
+		os=-osf
+		;;
+	hppro)
+		basic_machine=hppa1.1-hp
+		os=-proelf
+		;;
+	i370-ibm* | ibm*)
+		basic_machine=i370-ibm
+		;;
+# I'm not sure what "Sysv32" means.  Should this be sysv3.2?
+	i*86v32)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv32
+		;;
+	i*86v4*)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv4
+		;;
+	i*86v)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv
+		;;
+	i*86sol2)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-solaris2
+		;;
+	i386mach)
+		basic_machine=i386-mach
+		os=-mach
+		;;
+	i386-vsta | vsta)
+		basic_machine=i386-unknown
+		os=-vsta
+		;;
+	iris | iris4d)
+		basic_machine=mips-sgi
+		case $os in
+		    -irix*)
+			;;
+		    *)
+			os=-irix4
+			;;
+		esac
+		;;
+	isi68 | isi)
+		basic_machine=m68k-isi
+		os=-sysv
+		;;
+	m68knommu)
+		basic_machine=m68k-unknown
+		os=-linux
+		;;
+	m68knommu-*)
+		basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	m88k-omron*)
+		basic_machine=m88k-omron
+		;;
+	magnum | m3230)
+		basic_machine=mips-mips
+		os=-sysv
+		;;
+	merlin)
+		basic_machine=ns32k-utek
+		os=-sysv
+		;;
+	microblaze)
+		basic_machine=microblaze-xilinx
+		;;
+	mingw32)
+		basic_machine=i386-pc
+		os=-mingw32
+		;;
+	mingw32ce)
+		basic_machine=arm-unknown
+		os=-mingw32ce
+		;;
+	miniframe)
+		basic_machine=m68000-convergent
+		;;
+	*mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+	mips3*-*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+		;;
+	mips3*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+		;;
+	monitor)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	morphos)
+		basic_machine=powerpc-unknown
+		os=-morphos
+		;;
+	msdos)
+		basic_machine=i386-pc
+		os=-msdos
+		;;
+	ms1-*)
+		basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
+		;;
+	mvs)
+		basic_machine=i370-ibm
+		os=-mvs
+		;;
+	nacl)
+		basic_machine=le32-unknown
+		os=-nacl
+		;;
+	ncr3000)
+		basic_machine=i486-ncr
+		os=-sysv4
+		;;
+	netbsd386)
+		basic_machine=i386-unknown
+		os=-netbsd
+		;;
+	netwinder)
+		basic_machine=armv4l-rebel
+		os=-linux
+		;;
+	news | news700 | news800 | news900)
+		basic_machine=m68k-sony
+		os=-newsos
+		;;
+	news1000)
+		basic_machine=m68030-sony
+		os=-newsos
+		;;
+	news-3600 | risc-news)
+		basic_machine=mips-sony
+		os=-newsos
+		;;
+	necv70)
+		basic_machine=v70-nec
+		os=-sysv
+		;;
+	next | m*-next )
+		basic_machine=m68k-next
+		case $os in
+		    -nextstep* )
+			;;
+		    -ns2*)
+		      os=-nextstep2
+			;;
+		    *)
+		      os=-nextstep3
+			;;
+		esac
+		;;
+	nh3000)
+		basic_machine=m68k-harris
+		os=-cxux
+		;;
+	nh[45]000)
+		basic_machine=m88k-harris
+		os=-cxux
+		;;
+	nindy960)
+		basic_machine=i960-intel
+		os=-nindy
+		;;
+	mon960)
+		basic_machine=i960-intel
+		os=-mon960
+		;;
+	nonstopux)
+		basic_machine=mips-compaq
+		os=-nonstopux
+		;;
+	np1)
+		basic_machine=np1-gould
+		;;
+	neo-tandem)
+		basic_machine=neo-tandem
+		;;
+	nse-tandem)
+		basic_machine=nse-tandem
+		;;
+	nsr-tandem)
+		basic_machine=nsr-tandem
+		;;
+	op50n-* | op60c-*)
+		basic_machine=hppa1.1-oki
+		os=-proelf
+		;;
+	openrisc | openrisc-*)
+		basic_machine=or32-unknown
+		;;
+	os400)
+		basic_machine=powerpc-ibm
+		os=-os400
+		;;
+	OSE68000 | ose68000)
+		basic_machine=m68000-ericsson
+		os=-ose
+		;;
+	os68k)
+		basic_machine=m68k-none
+		os=-os68k
+		;;
+	pa-hitachi)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	paragon)
+		basic_machine=i860-intel
+		os=-osf
+		;;
+	parisc)
+		basic_machine=hppa-unknown
+		os=-linux
+		;;
+	parisc-*)
+		basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
+		os=-linux
+		;;
+	pbd)
+		basic_machine=sparc-tti
+		;;
+	pbb)
+		basic_machine=m68k-tti
+		;;
+	pc532 | pc532-*)
+		basic_machine=ns32k-pc532
+		;;
+	pc98)
+		basic_machine=i386-pc
+		;;
+	pc98-*)
+		basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium | p5 | k5 | k6 | nexgen | viac3)
+		basic_machine=i586-pc
+		;;
+	pentiumpro | p6 | 6x86 | athlon | athlon_*)
+		basic_machine=i686-pc
+		;;
+	pentiumii | pentium2 | pentiumiii | pentium3)
+		basic_machine=i686-pc
+		;;
+	pentium4)
+		basic_machine=i786-pc
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium4-*)
+		basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pn)
+		basic_machine=pn-gould
+		;;
+	power)	basic_machine=power-ibm
+		;;
+	ppc | ppcbe)	basic_machine=powerpc-unknown
+		;;
+	ppc-* | ppcbe-*)
+		basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppcle | powerpclittle | ppc-le | powerpc-little)
+		basic_machine=powerpcle-unknown
+		;;
+	ppcle-* | powerpclittle-*)
+		basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64)	basic_machine=powerpc64-unknown
+		;;
+	ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+		basic_machine=powerpc64le-unknown
+		;;
+	ppc64le-* | powerpc64little-*)
+		basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ps2)
+		basic_machine=i386-ibm
+		;;
+	pw32)
+		basic_machine=i586-unknown
+		os=-pw32
+		;;
+	rdos)
+		basic_machine=i386-pc
+		os=-rdos
+		;;
+	rom68k)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	rm[46]00)
+		basic_machine=mips-siemens
+		;;
+	rtpc | rtpc-*)
+		basic_machine=romp-ibm
+		;;
+	s390 | s390-*)
+		basic_machine=s390-ibm
+		;;
+	s390x | s390x-*)
+		basic_machine=s390x-ibm
+		;;
+	sa29200)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	sb1)
+		basic_machine=mipsisa64sb1-unknown
+		;;
+	sb1el)
+		basic_machine=mipsisa64sb1el-unknown
+		;;
+	sde)
+		basic_machine=mipsisa32-sde
+		os=-elf
+		;;
+	sei)
+		basic_machine=mips-sei
+		os=-seiux
+		;;
+	sequent)
+		basic_machine=i386-sequent
+		;;
+	sh)
+		basic_machine=sh-hitachi
+		os=-hms
+		;;
+	sh5el)
+		basic_machine=sh5le-unknown
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparclite-wrs | simso-wrs)
+		basic_machine=sparclite-wrs
+		os=-vxworks
+		;;
+	sps7)
+		basic_machine=m68k-bull
+		os=-sysv2
+		;;
+	spur)
+		basic_machine=spur-unknown
+		;;
+	st2000)
+		basic_machine=m68k-tandem
+		;;
+	stratus)
+		basic_machine=i860-stratus
+		os=-sysv4
+		;;
+	strongarm-* | thumb-*)
+		basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	sun2)
+		basic_machine=m68000-sun
+		;;
+	sun2os3)
+		basic_machine=m68000-sun
+		os=-sunos3
+		;;
+	sun2os4)
+		basic_machine=m68000-sun
+		os=-sunos4
+		;;
+	sun3os3)
+		basic_machine=m68k-sun
+		os=-sunos3
+		;;
+	sun3os4)
+		basic_machine=m68k-sun
+		os=-sunos4
+		;;
+	sun4os3)
+		basic_machine=sparc-sun
+		os=-sunos3
+		;;
+	sun4os4)
+		basic_machine=sparc-sun
+		os=-sunos4
+		;;
+	sun4sol2)
+		basic_machine=sparc-sun
+		os=-solaris2
+		;;
+	sun3 | sun3-*)
+		basic_machine=m68k-sun
+		;;
+	sun4)
+		basic_machine=sparc-sun
+		;;
+	sun386 | sun386i | roadrunner)
+		basic_machine=i386-sun
+		;;
+	sv1)
+		basic_machine=sv1-cray
+		os=-unicos
+		;;
+	symmetry)
+		basic_machine=i386-sequent
+		os=-dynix
+		;;
+	t3e)
+		basic_machine=alphaev5-cray
+		os=-unicos
+		;;
+	t90)
+		basic_machine=t90-cray
+		os=-unicos
+		;;
+	tile*)
+		basic_machine=$basic_machine-unknown
+		os=-linux-gnu
+		;;
+	tx39)
+		basic_machine=mipstx39-unknown
+		;;
+	tx39el)
+		basic_machine=mipstx39el-unknown
+		;;
+	toad1)
+		basic_machine=pdp10-xkl
+		os=-tops20
+		;;
+	tower | tower-32)
+		basic_machine=m68k-ncr
+		;;
+	tpf)
+		basic_machine=s390x-ibm
+		os=-tpf
+		;;
+	udi29k)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	ultra3)
+		basic_machine=a29k-nyu
+		os=-sym1
+		;;
+	v810 | necv810)
+		basic_machine=v810-nec
+		os=-none
+		;;
+	vaxv)
+		basic_machine=vax-dec
+		os=-sysv
+		;;
+	vms)
+		basic_machine=vax-dec
+		os=-vms
+		;;
+	vpp*|vx|vx-*)
+		basic_machine=f301-fujitsu
+		;;
+	vxworks960)
+		basic_machine=i960-wrs
+		os=-vxworks
+		;;
+	vxworks68)
+		basic_machine=m68k-wrs
+		os=-vxworks
+		;;
+	vxworks29k)
+		basic_machine=a29k-wrs
+		os=-vxworks
+		;;
+	w65*)
+		basic_machine=w65-wdc
+		os=-none
+		;;
+	w89k-*)
+		basic_machine=hppa1.1-winbond
+		os=-proelf
+		;;
+	xbox)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	xps | xps100)
+		basic_machine=xps100-honeywell
+		;;
+	xscale-* | xscalee[bl]-*)
+		basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'`
+		;;
+	ymp)
+		basic_machine=ymp-cray
+		os=-unicos
+		;;
+	z8k-*-coff)
+		basic_machine=z8k-unknown
+		os=-sim
+		;;
+	z80-*-coff)
+		basic_machine=z80-unknown
+		os=-sim
+		;;
+	none)
+		basic_machine=none-none
+		os=-none
+		;;
+
+# Here we handle the default manufacturer of certain CPU types.  It is in
+# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		basic_machine=hppa1.1-winbond
+		;;
+	op50n)
+		basic_machine=hppa1.1-oki
+		;;
+	op60c)
+		basic_machine=hppa1.1-oki
+		;;
+	romp)
+		basic_machine=romp-ibm
+		;;
+	mmix)
+		basic_machine=mmix-knuth
+		;;
+	rs6000)
+		basic_machine=rs6000-ibm
+		;;
+	vax)
+		basic_machine=vax-dec
+		;;
+	pdp10)
+		# there are many clones, so DEC is not a safe bet
+		basic_machine=pdp10-unknown
+		;;
+	pdp11)
+		basic_machine=pdp11-dec
+		;;
+	we32k)
+		basic_machine=we32k-att
+		;;
+	sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
+		basic_machine=sh-unknown
+		;;
+	sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
+		basic_machine=sparc-sun
+		;;
+	cydra)
+		basic_machine=cydra-cydrome
+		;;
+	orion)
+		basic_machine=orion-highlevel
+		;;
+	orion105)
+		basic_machine=clipper-highlevel
+		;;
+	mac | mpw | mac-mpw)
+		basic_machine=m68k-apple
+		;;
+	pmac | pmac-mpw)
+		basic_machine=powerpc-apple
+		;;
+	*-unknown)
+		# Make sure to match an already-canonicalized machine name.
+		;;
+	*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+	*-digital*)
+		basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+		;;
+	*-commodore*)
+		basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+	# First match some system type aliases
+	# that might get confused with valid system types.
+	# -solaris* is a basic system type, with this one exception.
+	-auroraux)
+		os=-auroraux
+		;;
+	-solaris1 | -solaris1.*)
+		os=`echo $os | sed -e 's|solaris1|sunos4|'`
+		;;
+	-solaris)
+		os=-solaris2
+		;;
+	-svr4*)
+		os=-sysv4
+		;;
+	-unixware*)
+		os=-sysv4.2uw
+		;;
+	-gnu/linux*)
+		os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+		;;
+	# First accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST END IN A *, to match a version number.
+	# -sysv* is not here because it comes later, after sysvr4.
+	-gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+	      | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
+	      | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
+	      | -sym* | -kopensolaris* \
+	      | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+	      | -aos* | -aros* \
+	      | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+	      | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+	      | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
+	      | -openbsd* | -solidbsd* \
+	      | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+	      | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+	      | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+	      | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+	      | -chorusos* | -chorusrdb* | -cegcc* \
+	      | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+	      | -mingw32* | -linux-gnu* | -linux-android* \
+	      | -linux-newlib* | -linux-uclibc* \
+	      | -uxpv* | -beos* | -mpeix* | -udk* \
+	      | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+	      | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+	      | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+	      | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+	      | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+	      | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+	      | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
+	# Remember, each alternative MUST END IN *, to match a version number.
+		;;
+	-qnx*)
+		case $basic_machine in
+		    x86-* | i*86-*)
+			;;
+		    *)
+			os=-nto$os
+			;;
+		esac
+		;;
+	-nto-qnx*)
+		;;
+	-nto*)
+		os=`echo $os | sed -e 's|nto|nto-qnx|'`
+		;;
+	-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+	      | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
+	      | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+		;;
+	-mac*)
+		os=`echo $os | sed -e 's|mac|macos|'`
+		;;
+	-linux-dietlibc)
+		os=-linux-dietlibc
+		;;
+	-linux*)
+		os=`echo $os | sed -e 's|linux|linux-gnu|'`
+		;;
+	-sunos5*)
+		os=`echo $os | sed -e 's|sunos5|solaris2|'`
+		;;
+	-sunos6*)
+		os=`echo $os | sed -e 's|sunos6|solaris3|'`
+		;;
+	-opened*)
+		os=-openedition
+		;;
+	-os400*)
+		os=-os400
+		;;
+	-wince*)
+		os=-wince
+		;;
+	-osfrose*)
+		os=-osfrose
+		;;
+	-osf*)
+		os=-osf
+		;;
+	-utek*)
+		os=-bsd
+		;;
+	-dynix*)
+		os=-bsd
+		;;
+	-acis*)
+		os=-aos
+		;;
+	-atheos*)
+		os=-atheos
+		;;
+	-syllable*)
+		os=-syllable
+		;;
+	-386bsd)
+		os=-bsd
+		;;
+	-ctix* | -uts*)
+		os=-sysv
+		;;
+	-nova*)
+		os=-rtmk-nova
+		;;
+	-ns2 )
+		os=-nextstep2
+		;;
+	-nsk*)
+		os=-nsk
+		;;
+	# Preserve the version number of sinix5.
+	-sinix5.*)
+		os=`echo $os | sed -e 's|sinix|sysv|'`
+		;;
+	-sinix*)
+		os=-sysv4
+		;;
+	-tpf*)
+		os=-tpf
+		;;
+	-triton*)
+		os=-sysv3
+		;;
+	-oss*)
+		os=-sysv3
+		;;
+	-svr4)
+		os=-sysv4
+		;;
+	-svr3)
+		os=-sysv3
+		;;
+	-sysvr4)
+		os=-sysv4
+		;;
+	# This must come after -sysvr4.
+	-sysv*)
+		;;
+	-ose*)
+		os=-ose
+		;;
+	-es1800*)
+		os=-ose
+		;;
+	-xenix)
+		os=-xenix
+		;;
+	-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+		os=-mint
+		;;
+	-aros*)
+		os=-aros
+		;;
+	-kaos*)
+		os=-kaos
+		;;
+	-zvmoe)
+		os=-zvmoe
+		;;
+	-dicos*)
+		os=-dicos
+		;;
+	-nacl*)
+		;;
+	-none)
+		;;
+	*)
+		# Get rid of the `-' at the beginning of $os.
+		os=`echo $os | sed 's/[^-]*-//'`
+		echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+		exit 1
+		;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+	score-*)
+		os=-elf
+		;;
+	spu-*)
+		os=-elf
+		;;
+	*-acorn)
+		os=-riscix1.2
+		;;
+	arm*-rebel)
+		os=-linux
+		;;
+	arm*-semi)
+		os=-aout
+		;;
+	c4x-* | tic4x-*)
+		os=-coff
+		;;
+	tic54x-*)
+		os=-coff
+		;;
+	tic55x-*)
+		os=-coff
+		;;
+	tic6x-*)
+		os=-coff
+		;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=-tops20
+		;;
+	pdp11-*)
+		os=-none
+		;;
+	*-dec | vax-*)
+		os=-ultrix4.2
+		;;
+	m68*-apollo)
+		os=-domain
+		;;
+	i386-sun)
+		os=-sunos4.0.2
+		;;
+	m68000-sun)
+		os=-sunos3
+		# This also exists in the configure program, but was not the
+		# default.
+		# os=-sunos4
+		;;
+	m68*-cisco)
+		os=-aout
+		;;
+	mep-*)
+		os=-elf
+		;;
+	mips*-cisco)
+		os=-elf
+		;;
+	mips*-*)
+		os=-elf
+		;;
+	or32-*)
+		os=-coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=-sysv3
+		;;
+	sparc-* | *-sun)
+		os=-sunos4.1.1
+		;;
+	*-be)
+		os=-beos
+		;;
+	*-haiku)
+		os=-haiku
+		;;
+	*-ibm)
+		os=-aix
+		;;
+	*-knuth)
+		os=-mmixware
+		;;
+	*-wec)
+		os=-proelf
+		;;
+	*-winbond)
+		os=-proelf
+		;;
+	*-oki)
+		os=-proelf
+		;;
+	*-hp)
+		os=-hpux
+		;;
+	*-hitachi)
+		os=-hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=-sysv
+		;;
+	*-cbm)
+		os=-amigaos
+		;;
+	*-dg)
+		os=-dgux
+		;;
+	*-dolphin)
+		os=-sysv3
+		;;
+	m68k-ccur)
+		os=-rtu
+		;;
+	m88k-omron*)
+		os=-luna
+		;;
+	*-next )
+		os=-nextstep
+		;;
+	*-sequent)
+		os=-ptx
+		;;
+	*-crds)
+		os=-unos
+		;;
+	*-ns)
+		os=-genix
+		;;
+	i370-*)
+		os=-mvs
+		;;
+	*-next)
+		os=-nextstep3
+		;;
+	*-gould)
+		os=-sysv
+		;;
+	*-highlevel)
+		os=-bsd
+		;;
+	*-encore)
+		os=-bsd
+		;;
+	*-sgi)
+		os=-irix
+		;;
+	*-siemens)
+		os=-sysv4
+		;;
+	*-masscomp)
+		os=-rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=-uxpv
+		;;
+	*-rom68k)
+		os=-coff
+		;;
+	*-*bug)
+		os=-coff
+		;;
+	*-apple)
+		os=-macos
+		;;
+	*-atari*)
+		os=-mint
+		;;
+	*)
+		os=-none
+		;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+	*-unknown)
+		case $os in
+			-riscix*)
+				vendor=acorn
+				;;
+			-sunos*)
+				vendor=sun
+				;;
+			-cnk*|-aix*)
+				vendor=ibm
+				;;
+			-beos*)
+				vendor=be
+				;;
+			-hpux*)
+				vendor=hp
+				;;
+			-mpeix*)
+				vendor=hp
+				;;
+			-hiux*)
+				vendor=hitachi
+				;;
+			-unos*)
+				vendor=crds
+				;;
+			-dgux*)
+				vendor=dg
+				;;
+			-luna*)
+				vendor=omron
+				;;
+			-genix*)
+				vendor=ns
+				;;
+			-mvs* | -opened*)
+				vendor=ibm
+				;;
+			-os400*)
+				vendor=ibm
+				;;
+			-ptx*)
+				vendor=sequent
+				;;
+			-tpf*)
+				vendor=ibm
+				;;
+			-vxsim* | -vxworks* | -windiss*)
+				vendor=wrs
+				;;
+			-aux*)
+				vendor=apple
+				;;
+			-hms*)
+				vendor=hitachi
+				;;
+			-mpw* | -macos*)
+				vendor=apple
+				;;
+			-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+				vendor=atari
+				;;
+			-vos*)
+				vendor=stratus
+				;;
+		esac
+		basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+		;;
+esac
+
+echo $basic_machine$os
+exit
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/depcomp b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/depcomp
new file mode 100755
index 0000000..df8eea7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/depcomp
@@ -0,0 +1,630 @@
+#! /bin/sh
+# depcomp - compile a program generating dependencies as side-effects
+
+scriptversion=2009-04-28.21; # UTC
+
+# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2009 Free
+# Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
+
+case $1 in
+  '')
+     echo "$0: No command.  Try \`$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: depcomp [--help] [--version] PROGRAM [ARGS]
+
+Run PROGRAMS ARGS to compile a file, generating dependencies
+as side-effects.
+
+Environment variables:
+  depmode     Dependency tracking mode.
+  source      Source file read by `PROGRAMS ARGS'.
+  object      Object file output by `PROGRAMS ARGS'.
+  DEPDIR      directory where to store dependencies.
+  depfile     Dependency file to output.
+  tmpdepfile  Temporary file to use when outputing dependencies.
+  libtool     Whether libtool is used (yes/no).
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "depcomp $scriptversion"
+    exit $?
+    ;;
+esac
+
+if test -z "$depmode" || test -z "$source" || test -z "$object"; then
+  echo "depcomp: Variables source, object and depmode must be set" 1>&2
+  exit 1
+fi
+
+# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
+depfile=${depfile-`echo "$object" |
+  sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
+tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
+
+rm -f "$tmpdepfile"
+
+# Some modes work just like other modes, but use different flags.  We
+# parameterize here, but still list the modes in the big case below,
+# to make depend.m4 easier to write.  Note that we *cannot* use a case
+# here, because this file can only contain one case statement.
+if test "$depmode" = hp; then
+  # HP compiler uses -M and no extra arg.
+  gccflag=-M
+  depmode=gcc
+fi
+
+if test "$depmode" = dashXmstdout; then
+   # This is just like dashmstdout with a different argument.
+   dashmflag=-xM
+   depmode=dashmstdout
+fi
+
+cygpath_u="cygpath -u -f -"
+if test "$depmode" = msvcmsys; then
+   # This is just like msvisualcpp but w/o cygpath translation.
+   # Just convert the backslash-escaped backslashes to single forward
+   # slashes to satisfy depend.m4
+   cygpath_u="sed s,\\\\\\\\,/,g"
+   depmode=msvisualcpp
+fi
+
+case "$depmode" in
+gcc3)
+## gcc 3 implements dependency tracking that does exactly what
+## we want.  Yay!  Note: for some reason libtool 1.4 doesn't like
+## it if -MD -MP comes after the -MF stuff.  Hmm.
+## Unfortunately, FreeBSD c89 acceptance of flags depends upon
+## the command line argument order; so add the flags where they
+## appear in depend2.am.  Note that the slowdown incurred here
+## affects only configure: in makefiles, %FASTDEP% shortcuts this.
+  for arg
+  do
+    case $arg in
+    -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
+    *)  set fnord "$@" "$arg" ;;
+    esac
+    shift # fnord
+    shift # $arg
+  done
+  "$@"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  mv "$tmpdepfile" "$depfile"
+  ;;
+
+gcc)
+## There are various ways to get dependency output from gcc.  Here's
+## why we pick this rather obscure method:
+## - Don't want to use -MD because we'd like the dependencies to end
+##   up in a subdir.  Having to rename by hand is ugly.
+##   (We might end up doing this anyway to support other compilers.)
+## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
+##   -MM, not -M (despite what the docs say).
+## - Using -M directly means running the compiler twice (even worse
+##   than renaming).
+  if test -z "$gccflag"; then
+    gccflag=-MD,
+  fi
+  "$@" -Wp,"$gccflag$tmpdepfile"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
+## The second -e expression handles DOS-style file names with drive letters.
+  sed -e 's/^[^:]*: / /' \
+      -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
+## This next piece of magic avoids the `deleted header file' problem.
+## The problem is that when a header file which appears in a .P file
+## is deleted, the dependency causes make to die (because there is
+## typically no way to rebuild the header).  We avoid this by adding
+## dummy dependencies for each header file.  Too bad gcc doesn't do
+## this for us directly.
+  tr ' ' '
+' < "$tmpdepfile" |
+## Some versions of gcc put a space before the `:'.  On the theory
+## that the space means something, we add a space to the output as
+## well.
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+sgi)
+  if test "$libtool" = yes; then
+    "$@" "-Wp,-MDupdate,$tmpdepfile"
+  else
+    "$@" -MDupdate "$tmpdepfile"
+  fi
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+
+  if test -f "$tmpdepfile"; then  # yes, the sourcefile depend on other files
+    echo "$object : \\" > "$depfile"
+
+    # Clip off the initial element (the dependent).  Don't try to be
+    # clever and replace this with sed code, as IRIX sed won't handle
+    # lines with more than a fixed number of characters (4096 in
+    # IRIX 6.2 sed, 8192 in IRIX 6.5).  We also remove comment lines;
+    # the IRIX cc adds comments like `#:fec' to the end of the
+    # dependency line.
+    tr ' ' '
+' < "$tmpdepfile" \
+    | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \
+    tr '
+' ' ' >> "$depfile"
+    echo >> "$depfile"
+
+    # The second pass generates a dummy entry for each header file.
+    tr ' ' '
+' < "$tmpdepfile" \
+   | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
+   >> "$depfile"
+  else
+    # The sourcefile does not contain any dependencies, so just
+    # store a dummy comment line, to avoid errors with the Makefile
+    # "include basename.Plo" scheme.
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+aix)
+  # The C for AIX Compiler uses -M and outputs the dependencies
+  # in a .u file.  In older versions, this file always lives in the
+  # current directory.  Also, the AIX compiler puts `$object:' at the
+  # start of each line; $object doesn't have directory information.
+  # Version 6 uses the directory in both cases.
+  dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
+  test "x$dir" = "x$object" && dir=
+  base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$base.u
+    tmpdepfile3=$dir.libs/$base.u
+    "$@" -Wc,-M
+  else
+    tmpdepfile1=$dir$base.u
+    tmpdepfile2=$dir$base.u
+    tmpdepfile3=$dir$base.u
+    "$@" -M
+  fi
+  stat=$?
+
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+    exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  if test -f "$tmpdepfile"; then
+    # Each line is of the form `foo.o: dependent.h'.
+    # Do two passes, one to just change these to
+    # `$object: dependent.h' and one to simply `dependent.h:'.
+    sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
+    # That's a tab and a space in the [].
+    sed -e 's,^.*\.[a-z]*:[	 ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
+  else
+    # The sourcefile does not contain any dependencies, so just
+    # store a dummy comment line, to avoid errors with the Makefile
+    # "include basename.Plo" scheme.
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+icc)
+  # Intel's C compiler understands `-MD -MF file'.  However on
+  #    icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c
+  # ICC 7.0 will fill foo.d with something like
+  #    foo.o: sub/foo.c
+  #    foo.o: sub/foo.h
+  # which is wrong.  We want:
+  #    sub/foo.o: sub/foo.c
+  #    sub/foo.o: sub/foo.h
+  #    sub/foo.c:
+  #    sub/foo.h:
+  # ICC 7.1 will output
+  #    foo.o: sub/foo.c sub/foo.h
+  # and will wrap long lines using \ :
+  #    foo.o: sub/foo.c ... \
+  #     sub/foo.h ... \
+  #     ...
+
+  "$@" -MD -MF "$tmpdepfile"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each line is of the form `foo.o: dependent.h',
+  # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
+  # Do two passes, one to just change these to
+  # `$object: dependent.h' and one to simply `dependent.h:'.
+  sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" |
+    sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp2)
+  # The "hp" stanza above does not work with aCC (C++) and HP's ia64
+  # compilers, which have integrated preprocessors.  The correct option
+  # to use with these is +Maked; it writes dependencies to a file named
+  # 'foo.d', which lands next to the object file, wherever that
+  # happens to be.
+  # Much of this is similar to the tru64 case; see comments there.
+  dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
+  test "x$dir" = "x$object" && dir=
+  base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir.libs/$base.d
+    "$@" -Wc,+Maked
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    "$@" +Maked
+  fi
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+     rm -f "$tmpdepfile1" "$tmpdepfile2"
+     exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  if test -f "$tmpdepfile"; then
+    sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile"
+    # Add `dependent.h:' lines.
+    sed -ne '2,${
+	       s/^ *//
+	       s/ \\*$//
+	       s/$/:/
+	       p
+	     }' "$tmpdepfile" >> "$depfile"
+  else
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile" "$tmpdepfile2"
+  ;;
+
+tru64)
+   # The Tru64 compiler uses -MD to generate dependencies as a side
+   # effect.  `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'.
+   # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
+   # dependencies in `foo.d' instead, so we check for that too.
+   # Subdirectories are respected.
+   dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
+   test "x$dir" = "x$object" && dir=
+   base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
+
+   if test "$libtool" = yes; then
+      # With Tru64 cc, shared objects can also be used to make a
+      # static library.  This mechanism is used in libtool 1.4 series to
+      # handle both shared and static libraries in a single compilation.
+      # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d.
+      #
+      # With libtool 1.5 this exception was removed, and libtool now
+      # generates 2 separate objects for the 2 libraries.  These two
+      # compilations output dependencies in $dir.libs/$base.o.d and
+      # in $dir$base.o.d.  We have to check for both files, because
+      # one of the two compilations can be disabled.  We should prefer
+      # $dir$base.o.d over $dir.libs/$base.o.d because the latter is
+      # automatically cleaned when .libs/ is deleted, while ignoring
+      # the former would cause a distcleancheck panic.
+      tmpdepfile1=$dir.libs/$base.lo.d   # libtool 1.4
+      tmpdepfile2=$dir$base.o.d          # libtool 1.5
+      tmpdepfile3=$dir.libs/$base.o.d    # libtool 1.5
+      tmpdepfile4=$dir.libs/$base.d      # Compaq CCC V6.2-504
+      "$@" -Wc,-MD
+   else
+      tmpdepfile1=$dir$base.o.d
+      tmpdepfile2=$dir$base.d
+      tmpdepfile3=$dir$base.d
+      tmpdepfile4=$dir$base.d
+      "$@" -MD
+   fi
+
+   stat=$?
+   if test $stat -eq 0; then :
+   else
+      rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
+      exit $stat
+   fi
+
+   for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
+   do
+     test -f "$tmpdepfile" && break
+   done
+   if test -f "$tmpdepfile"; then
+      sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
+      # That's a tab and a space in the [].
+      sed -e 's,^.*\.[a-z]*:[	 ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
+   else
+      echo "#dummy" > "$depfile"
+   fi
+   rm -f "$tmpdepfile"
+   ;;
+
+#nosideeffect)
+  # This comment above is used by automake to tell side-effect
+  # dependency tracking mechanisms from slower ones.
+
+dashmstdout)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout, regardless of -o.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove `-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  test -z "$dashmflag" && dashmflag=-M
+  # Require at least two characters before searching for `:'
+  # in the target name.  This is to cope with DOS-style filenames:
+  # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise.
+  "$@" $dashmflag |
+    sed 's:^[  ]*[^: ][^:][^:]*\:[    ]*:'"$object"'\: :' > "$tmpdepfile"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  tr ' ' '
+' < "$tmpdepfile" | \
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+dashXmstdout)
+  # This case only exists to satisfy depend.m4.  It is never actually
+  # run, as this mode is specially recognized in the preamble.
+  exit 1
+  ;;
+
+makedepend)
+  "$@" || exit $?
+  # Remove any Libtool call
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+  # X makedepend
+  shift
+  cleared=no eat=no
+  for arg
+  do
+    case $cleared in
+    no)
+      set ""; shift
+      cleared=yes ;;
+    esac
+    if test $eat = yes; then
+      eat=no
+      continue
+    fi
+    case "$arg" in
+    -D*|-I*)
+      set fnord "$@" "$arg"; shift ;;
+    # Strip any option that makedepend may not understand.  Remove
+    # the object too, otherwise makedepend will parse it as a source file.
+    -arch)
+      eat=yes ;;
+    -*|$object)
+      ;;
+    *)
+      set fnord "$@" "$arg"; shift ;;
+    esac
+  done
+  obj_suffix=`echo "$object" | sed 's/^.*\././'`
+  touch "$tmpdepfile"
+  ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  sed '1,2d' "$tmpdepfile" | tr ' ' '
+' | \
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile" "$tmpdepfile".bak
+  ;;
+
+cpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove `-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  "$@" -E |
+    sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+       -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' |
+    sed '$ s: \\$::' > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  cat < "$tmpdepfile" >> "$depfile"
+  sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvisualcpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test "X$1" != 'X--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  IFS=" "
+  for arg
+  do
+    case "$arg" in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
+	set fnord "$@"
+	shift
+	shift
+	;;
+    *)
+	set fnord "$@" "$arg"
+	shift
+	shift
+	;;
+    esac
+  done
+  "$@" -E 2>/dev/null |
+  sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::	\1 \\:p' >> "$depfile"
+  echo "	" >> "$depfile"
+  sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvcmsys)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+none)
+  exec "$@"
+  ;;
+
+*)
+  echo "Unknown depmode $depmode" 1>&2
+  exit 1
+  ;;
+esac
+
+exit 0
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/install-sh b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/install-sh
new file mode 100755
index 0000000..a9244eb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/install-sh
@@ -0,0 +1,527 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2011-01-19.21; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# `make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+nl='
+'
+IFS=" ""	$nl"
+
+# set DOITPROG to echo to test this script
+
+# Don't use :- since 4.3BSD and earlier shells don't like it.
+doit=${DOITPROG-}
+if test -z "$doit"; then
+  doit_exec=exec
+else
+  doit_exec=$doit
+fi
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_glob='?'
+initialize_posix_glob='
+  test "$posix_glob" != "?" || {
+    if (set -f) 2>/dev/null; then
+      posix_glob=
+    else
+      posix_glob=:
+    fi
+  }
+'
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+no_target_directory=
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+   or: $0 [OPTION]... SRCFILES... DIRECTORY
+   or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+   or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+     --help     display this help and exit.
+     --version  display version info and exit.
+
+  -c            (ignored)
+  -C            install only if different (preserve the last data modification time)
+  -d            create directories instead of installing files.
+  -g GROUP      $chgrpprog installed files to GROUP.
+  -m MODE       $chmodprog installed files to MODE.
+  -o USER       $chownprog installed files to USER.
+  -s            $stripprog installed files.
+  -t DIRECTORY  install into DIRECTORY.
+  -T            report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+  CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+  RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+  case $1 in
+    -c) ;;
+
+    -C) copy_on_change=true;;
+
+    -d) dir_arg=true;;
+
+    -g) chgrpcmd="$chgrpprog $2"
+	shift;;
+
+    --help) echo "$usage"; exit $?;;
+
+    -m) mode=$2
+	case $mode in
+	  *' '* | *'	'* | *'
+'*	  | *'*'* | *'?'* | *'['*)
+	    echo "$0: invalid mode: $mode" >&2
+	    exit 1;;
+	esac
+	shift;;
+
+    -o) chowncmd="$chownprog $2"
+	shift;;
+
+    -s) stripcmd=$stripprog;;
+
+    -t) dst_arg=$2
+	# Protect names problematic for `test' and other utilities.
+	case $dst_arg in
+	  -* | [=\(\)!]) dst_arg=./$dst_arg;;
+	esac
+	shift;;
+
+    -T) no_target_directory=true;;
+
+    --version) echo "$0 $scriptversion"; exit $?;;
+
+    --)	shift
+	break;;
+
+    -*)	echo "$0: invalid option: $1" >&2
+	exit 1;;
+
+    *)  break;;
+  esac
+  shift
+done
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+  # When -d is used, all remaining arguments are directories to create.
+  # When -t is used, the destination is already specified.
+  # Otherwise, the last argument is the destination.  Remove it from $@.
+  for arg
+  do
+    if test -n "$dst_arg"; then
+      # $@ is not empty: it contains at least $arg.
+      set fnord "$@" "$dst_arg"
+      shift # fnord
+    fi
+    shift # arg
+    dst_arg=$arg
+    # Protect names problematic for `test' and other utilities.
+    case $dst_arg in
+      -* | [=\(\)!]) dst_arg=./$dst_arg;;
+    esac
+  done
+fi
+
+if test $# -eq 0; then
+  if test -z "$dir_arg"; then
+    echo "$0: no input file specified." >&2
+    exit 1
+  fi
+  # It's OK to call `install-sh -d' without argument.
+  # This can happen when creating conditional directories.
+  exit 0
+fi
+
+if test -z "$dir_arg"; then
+  do_exit='(exit $ret); exit $ret'
+  trap "ret=129; $do_exit" 1
+  trap "ret=130; $do_exit" 2
+  trap "ret=141; $do_exit" 13
+  trap "ret=143; $do_exit" 15
+
+  # Set umask so as not to create temps with too-generous modes.
+  # However, 'strip' requires both read and write access to temps.
+  case $mode in
+    # Optimize common cases.
+    *644) cp_umask=133;;
+    *755) cp_umask=22;;
+
+    *[0-7])
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw='% 200'
+      fi
+      cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+    *)
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw=,u+rw
+      fi
+      cp_umask=$mode$u_plus_rw;;
+  esac
+fi
+
+for src
+do
+  # Protect names problematic for `test' and other utilities.
+  case $src in
+    -* | [=\(\)!]) src=./$src;;
+  esac
+
+  if test -n "$dir_arg"; then
+    dst=$src
+    dstdir=$dst
+    test -d "$dstdir"
+    dstdir_status=$?
+  else
+
+    # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+    # might cause directories to be created, which would be especially bad
+    # if $src (and thus $dsttmp) contains '*'.
+    if test ! -f "$src" && test ! -d "$src"; then
+      echo "$0: $src does not exist." >&2
+      exit 1
+    fi
+
+    if test -z "$dst_arg"; then
+      echo "$0: no destination specified." >&2
+      exit 1
+    fi
+    dst=$dst_arg
+
+    # If destination is a directory, append the input filename; won't work
+    # if double slashes aren't ignored.
+    if test -d "$dst"; then
+      if test -n "$no_target_directory"; then
+	echo "$0: $dst_arg: Is a directory" >&2
+	exit 1
+      fi
+      dstdir=$dst
+      dst=$dstdir/`basename "$src"`
+      dstdir_status=0
+    else
+      # Prefer dirname, but fall back on a substitute if dirname fails.
+      dstdir=`
+	(dirname "$dst") 2>/dev/null ||
+	expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	     X"$dst" : 'X\(//\)[^/]' \| \
+	     X"$dst" : 'X\(//\)$' \| \
+	     X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
+	echo X"$dst" |
+	    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)[^/].*/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\).*/{
+		   s//\1/
+		   q
+		 }
+		 s/.*/./; q'
+      `
+
+      test -d "$dstdir"
+      dstdir_status=$?
+    fi
+  fi
+
+  obsolete_mkdir_used=false
+
+  if test $dstdir_status != 0; then
+    case $posix_mkdir in
+      '')
+	# Create intermediate dirs using mode 755 as modified by the umask.
+	# This is like FreeBSD 'install' as of 1997-10-28.
+	umask=`umask`
+	case $stripcmd.$umask in
+	  # Optimize common cases.
+	  *[2367][2367]) mkdir_umask=$umask;;
+	  .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+	  *[0-7])
+	    mkdir_umask=`expr $umask + 22 \
+	      - $umask % 100 % 40 + $umask % 20 \
+	      - $umask % 10 % 4 + $umask % 2
+	    `;;
+	  *) mkdir_umask=$umask,go-w;;
+	esac
+
+	# With -d, create the new directory with the user-specified mode.
+	# Otherwise, rely on $mkdir_umask.
+	if test -n "$dir_arg"; then
+	  mkdir_mode=-m$mode
+	else
+	  mkdir_mode=
+	fi
+
+	posix_mkdir=false
+	case $umask in
+	  *[123567][0-7][0-7])
+	    # POSIX mkdir -p sets u+wx bits regardless of umask, which
+	    # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+	    ;;
+	  *)
+	    tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+	    trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+	    if (umask $mkdir_umask &&
+		exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
+	    then
+	      if test -z "$dir_arg" || {
+		   # Check for POSIX incompatibilities with -m.
+		   # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+		   # other-writeable bit of parent directory when it shouldn't.
+		   # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+		   ls_ld_tmpdir=`ls -ld "$tmpdir"`
+		   case $ls_ld_tmpdir in
+		     d????-?r-*) different_mode=700;;
+		     d????-?--*) different_mode=755;;
+		     *) false;;
+		   esac &&
+		   $mkdirprog -m$different_mode -p -- "$tmpdir" && {
+		     ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
+		     test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+		   }
+		 }
+	      then posix_mkdir=:
+	      fi
+	      rmdir "$tmpdir/d" "$tmpdir"
+	    else
+	      # Remove any dirs left behind by ancient mkdir implementations.
+	      rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
+	    fi
+	    trap '' 0;;
+	esac;;
+    esac
+
+    if
+      $posix_mkdir && (
+	umask $mkdir_umask &&
+	$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+      )
+    then :
+    else
+
+      # The umask is ridiculous, or mkdir does not conform to POSIX,
+      # or it failed possibly due to a race condition.  Create the
+      # directory the slow way, step by step, checking for races as we go.
+
+      case $dstdir in
+	/*) prefix='/';;
+	[-=\(\)!]*) prefix='./';;
+	*)  prefix='';;
+      esac
+
+      eval "$initialize_posix_glob"
+
+      oIFS=$IFS
+      IFS=/
+      $posix_glob set -f
+      set fnord $dstdir
+      shift
+      $posix_glob set +f
+      IFS=$oIFS
+
+      prefixes=
+
+      for d
+      do
+	test X"$d" = X && continue
+
+	prefix=$prefix$d
+	if test -d "$prefix"; then
+	  prefixes=
+	else
+	  if $posix_mkdir; then
+	    (umask=$mkdir_umask &&
+	     $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+	    # Don't fail if two instances are running concurrently.
+	    test -d "$prefix" || exit 1
+	  else
+	    case $prefix in
+	      *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+	      *) qprefix=$prefix;;
+	    esac
+	    prefixes="$prefixes '$qprefix'"
+	  fi
+	fi
+	prefix=$prefix/
+      done
+
+      if test -n "$prefixes"; then
+	# Don't fail if two instances are running concurrently.
+	(umask $mkdir_umask &&
+	 eval "\$doit_exec \$mkdirprog $prefixes") ||
+	  test -d "$dstdir" || exit 1
+	obsolete_mkdir_used=true
+      fi
+    fi
+  fi
+
+  if test -n "$dir_arg"; then
+    { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+    { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+      test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+  else
+
+    # Make a couple of temp file names in the proper directory.
+    dsttmp=$dstdir/_inst.$$_
+    rmtmp=$dstdir/_rm.$$_
+
+    # Trap to clean up those temp files at exit.
+    trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+    # Copy the file name to the temp name.
+    (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+    # and set any options; do chmod last to preserve setuid bits.
+    #
+    # If any of these fail, we abort the whole thing.  If we want to
+    # ignore errors from any of these, just make sure not to ignore
+    # errors from the above "$doit $cpprog $src $dsttmp" command.
+    #
+    { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+    { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+    { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+    # If -C, don't bother to copy if it wouldn't change the file.
+    if $copy_on_change &&
+       old=`LC_ALL=C ls -dlL "$dst"	2>/dev/null` &&
+       new=`LC_ALL=C ls -dlL "$dsttmp"	2>/dev/null` &&
+
+       eval "$initialize_posix_glob" &&
+       $posix_glob set -f &&
+       set X $old && old=:$2:$4:$5:$6 &&
+       set X $new && new=:$2:$4:$5:$6 &&
+       $posix_glob set +f &&
+
+       test "$old" = "$new" &&
+       $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+    then
+      rm -f "$dsttmp"
+    else
+      # Rename the file to the real destination.
+      $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+      # The rename failed, perhaps because mv can't rename something else
+      # to itself, or perhaps because mv is so ancient that it does not
+      # support -f.
+      {
+	# Now remove or move aside any old file at destination location.
+	# We try this two ways since rm can't unlink itself on some
+	# systems and the destination file might be busy for other
+	# reasons.  In this case, the final cleanup might fail but the new
+	# file should still install successfully.
+	{
+	  test ! -f "$dst" ||
+	  $doit $rmcmd -f "$dst" 2>/dev/null ||
+	  { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+	    { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
+	  } ||
+	  { echo "$0: cannot unlink or rename $dst" >&2
+	    (exit 1); exit 1
+	  }
+	} &&
+
+	# Now rename the file to the real destination.
+	$doit $mvcmd "$dsttmp" "$dst"
+      }
+    fi || exit 1
+
+    trap '' 0
+  fi
+done
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/ltmain.sh b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/ltmain.sh
new file mode 100644
index 0000000..63ae69d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/ltmain.sh
@@ -0,0 +1,9655 @@
+
+# libtool (GNU libtool) 2.4.2
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --no-warn            don't display warning messages
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.4.2
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.2
+TIMESTAMP=""
+package_revision=1.3337
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+    eval "${1}=\$${1}\${2}"
+} # func_append may be replaced by extended shell implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+    func_quote_for_eval "${2}"
+    eval "${1}=\$${1}\\ \$func_quote_for_eval_result"
+} # func_append_quoted may be replaced by extended shell implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+    func_arith_result=`expr "${@}"`
+} # func_arith may be replaced by extended shell implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+    func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len`
+} # func_len may be replaced by extended shell implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+    func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+} # func_lo2o may be replaced by extended shell implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+    func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+} # func_xform may be replaced by extended shell implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+  shift; set dummy --mode clean ${1+"$@"}; shift
+  ;;
+compile|compil|compi|comp|com|co|c)
+  shift; set dummy --mode compile ${1+"$@"}; shift
+  ;;
+execute|execut|execu|exec|exe|ex|e)
+  shift; set dummy --mode execute ${1+"$@"}; shift
+  ;;
+finish|finis|fini|fin|fi|f)
+  shift; set dummy --mode finish ${1+"$@"}; shift
+  ;;
+install|instal|insta|inst|ins|in|i)
+  shift; set dummy --mode install ${1+"$@"}; shift
+  ;;
+link|lin|li|l)
+  shift; set dummy --mode link ${1+"$@"}; shift
+  ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+  shift; set dummy --mode uninstall ${1+"$@"}; shift
+  ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_warning=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+			;;
+      --config)
+			opt_config=:
+func_config
+			;;
+      --dlopen|-dlopen)
+			optarg="$1"
+			opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+			shift
+			;;
+      --preserve-dup-deps)
+			opt_preserve_dup_deps=:
+			;;
+      --features)
+			opt_features=:
+func_features
+			;;
+      --finish)
+			opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+			;;
+      --help)
+			opt_help=:
+			;;
+      --help-all)
+			opt_help_all=:
+opt_help=': help-all'
+			;;
+      --mode)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_mode="$optarg"
+case $optarg in
+  # Valid mode arguments:
+  clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+  # Catch anything else as an error
+  *) func_error "invalid argument for $opt"
+     exit_cmd=exit
+     break
+     ;;
+esac
+			shift
+			;;
+      --no-silent|--no-quiet)
+			opt_silent=false
+func_append preserve_args " $opt"
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+func_append preserve_args " $opt"
+			;;
+      --no-verbose)
+			opt_verbose=false
+func_append preserve_args " $opt"
+			;;
+      --silent|--quiet)
+			opt_silent=:
+func_append preserve_args " $opt"
+        opt_verbose=false
+			;;
+      --verbose|-v)
+			opt_verbose=:
+func_append preserve_args " $opt"
+opt_silent=false
+			;;
+      --tag)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_tag="$optarg"
+func_append preserve_args " $opt $optarg"
+func_enable_tag "$optarg"
+			shift
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-n*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # save first non-option argument
+  if test "$#" -gt 0; then
+    nonopt="$opt"
+    shift
+  fi
+
+  # preserve --debug
+  test "$opt_debug" = : || func_append preserve_args " --debug"
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+      ;;
+  esac
+
+  $opt_help || {
+    # Sanity checks first:
+    func_check_version_match
+
+    if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+      func_fatal_configuration "not configured to build any kind of library"
+    fi
+
+    # Darwin sucks
+    eval std_shrext=\"$shrext_cmds\"
+
+    # Only execute mode is allowed to have -dlopen flags.
+    if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+      func_error "unrecognized option \`-dlopen'"
+      $ECHO "$help" 1>&2
+      exit $EXIT_FAILURE
+    fi
+
+    # Change the help message to a mode-specific one.
+    generic_help="$help"
+    help="Try \`$progname --help --mode=$opt_mode' for more information."
+  }
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_dirname_and_basename "$1" "" "."
+    func_stripname '' '.exe' "$func_basename_result"
+    func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot.  Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+  func_resolve_sysroot_result=$1
+  case $func_resolve_sysroot_result in
+  =*)
+    func_stripname '=' '' "$func_resolve_sysroot_result"
+    func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+    ;;
+  esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+  case "$lt_sysroot:$1" in
+  ?*:"$lt_sysroot"*)
+    func_stripname "$lt_sysroot" '' "$1"
+    func_replace_sysroot_result="=$func_stripname_result"
+    ;;
+  *)
+    # Including no sysroot.
+    func_replace_sysroot_result=$1
+    ;;
+  esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+	func_append_quoted CC_quoted "$arg"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_append_quoted CC_quoted "$arg"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+  $opt_debug
+  func_convert_core_file_wine_to_w32_result="$1"
+  if test -n "$1"; then
+    # Unfortunately, winepath does not exit with a non-zero error code, so we
+    # are forced to check the contents of stdout. On the other hand, if the
+    # command is not found, the shell will set an exit code of 127 and print
+    # *an error message* to stdout. So we must check for both error code of
+    # zero AND non-empty stdout, which explains the odd construction:
+    func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+    if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+      func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+        $SED -e "$lt_sed_naive_backslashify"`
+    else
+      func_convert_core_file_wine_to_w32_result=
+    fi
+  fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+  $opt_debug
+  # unfortunately, winepath doesn't convert paths, only file names
+  func_convert_core_path_wine_to_w32_result=""
+  if test -n "$1"; then
+    oldIFS=$IFS
+    IFS=:
+    for func_convert_core_path_wine_to_w32_f in $1; do
+      IFS=$oldIFS
+      func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+      if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+        if test -z "$func_convert_core_path_wine_to_w32_result"; then
+          func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+        else
+          func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+        fi
+      fi
+    done
+    IFS=$oldIFS
+  fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+  $opt_debug
+  if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+    func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+    if test "$?" -ne 0; then
+      # on failure, ensure result is empty
+      func_cygpath_result=
+    fi
+  else
+    func_cygpath_result=
+    func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+  fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format.  Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+  $opt_debug
+  # awkward: cmd appends spaces to result
+  func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+    $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+  $opt_debug
+  if test -z "$2" && test -n "$1" ; then
+    func_error "Could not determine host file name corresponding to"
+    func_error "  \`$1'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback:
+    func_to_host_file_result="$1"
+  fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+  $opt_debug
+  if test -z "$4" && test -n "$3"; then
+    func_error "Could not determine the host path corresponding to"
+    func_error "  \`$3'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback.  This is a deliberately simplistic "conversion" and
+    # should not be "improved".  See libtool.info.
+    if test "x$1" != "x$2"; then
+      lt_replace_pathsep_chars="s|$1|$2|g"
+      func_to_host_path_result=`echo "$3" |
+        $SED -e "$lt_replace_pathsep_chars"`
+    else
+      func_to_host_path_result="$3"
+    fi
+  fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+  $opt_debug
+  case $4 in
+  $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+    ;;
+  esac
+  case $4 in
+  $2 ) func_append func_to_host_path_result "$3"
+    ;;
+  esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+  $opt_debug
+  $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result.  If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+  $opt_debug
+  case ,$2, in
+    *,"$to_tool_file_cmd",*)
+      func_to_tool_file_result=$1
+      ;;
+    *)
+      $to_tool_file_cmd "$1"
+      func_to_tool_file_result=$func_to_host_file_result
+      ;;
+  esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+  func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+    # LT_CYGPATH in this case.
+    func_to_host_file_result=`cygpath -m "$1"`
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format.  Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_file_wine_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_msys_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format.  Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set.  Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+    func_convert_core_file_wine_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format.  If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+#   file name conversion function    : func_convert_file_X_to_Y ()
+#   path conversion function         : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same.  If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+  $opt_debug
+  if test -z "$to_host_path_cmd"; then
+    func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+    to_host_path_cmd="func_convert_path_${func_stripname_result}"
+  fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+  $opt_debug
+  func_init_to_host_path_cmd
+  $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+  func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from ARG.  MSYS
+    # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+    # and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format.  Requires a wine environment and
+# a working winepath.  Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format.  Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set.  Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from
+    # ARG. msys behavior is inconsistent here, cygpath turns them
+    # into '.;' and ';.', and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          func_append pie_flag " $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  func_append later " $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_append_quoted lastarg "$arg"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  func_append base_compile " $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_append_quoted base_compile "$lastarg"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      func_append removelist " $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    func_append removelist " $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+    srcfile=$func_to_tool_file_result
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	func_append command " -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	func_append command " -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      func_append command "$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $opt_mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$opt_mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $opt_dlopen; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  func_append dir "/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_append_quoted args "$file"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libs=
+    libdirs=
+    admincmds=
+
+    for opt in "$nonopt" ${1+"$@"}
+    do
+      if test -d "$opt"; then
+	func_append libdirs " $opt"
+
+      elif test -f "$opt"; then
+	if func_lalib_unsafe_p "$opt"; then
+	  func_append libs " $opt"
+	else
+	  func_warning "\`$opt' is not a valid libtool archive"
+	fi
+
+      else
+	func_fatal_error "invalid argument \`$opt'"
+      fi
+    done
+
+    if test -n "$libs"; then
+      if test -n "$lt_sysroot"; then
+        sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+        sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+      else
+        sysroot_cmd=
+      fi
+
+      # Remove sysroot references
+      if $opt_dry_run; then
+        for lib in $libs; do
+          echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+        done
+      else
+        tmpdir=`func_mktempdir`
+        for lib in $libs; do
+	  sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+	    > $tmpdir/tmp-la
+	  mv -f $tmpdir/tmp-la $lib
+	done
+        ${RM}r "$tmpdir"
+      fi
+    fi
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || func_append admincmds "
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      echo "----------------------------------------------------------------------"
+      echo "Libraries have been installed in:"
+      for libdir in $libdirs; do
+	$ECHO "   $libdir"
+      done
+      echo
+      echo "If you ever happen to want to link against installed libraries"
+      echo "in a given directory, LIBDIR, you must either use libtool, and"
+      echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+      echo "flag during linking and do at least one of the following:"
+      if test -n "$shlibpath_var"; then
+	echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+	echo "     during execution"
+      fi
+      if test -n "$runpath_var"; then
+	echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+	echo "     during linking"
+      fi
+      if test -n "$hardcode_libdir_flag_spec"; then
+	libdir=LIBDIR
+	eval flag=\"$hardcode_libdir_flag_spec\"
+
+	$ECHO "   - use the \`$flag' linker flag"
+      fi
+      if test -n "$admincmds"; then
+	$ECHO "   - have your system administrator run these commands:$admincmds"
+      fi
+      if test -f /etc/ld.so.conf; then
+	echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+      fi
+      echo
+
+      echo "See any operating system documentation about shared libraries for"
+      case $host in
+	solaris2.[6789]|solaris2.1[0-9])
+	  echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	  echo "pages."
+	  ;;
+	*)
+	  echo "more information, such as the ld(1) and ld.so(8) manual pages."
+	  ;;
+      esac
+      echo "----------------------------------------------------------------------"
+    fi
+    exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    func_append install_prog "$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	func_append files " $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      func_append install_prog " $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      func_append install_shared_prog " $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	func_append install_shared_prog " -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	func_append staticlibs " $file"
+	;;
+
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) func_append current_libdirs " $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) func_append future_libdirs " $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	func_append dir "$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && func_append staticlibs " $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+      func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+      tool_oldlib=$func_to_tool_file_result
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+	    func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+	    $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+          case $host in
+	    *cygwin* | *mingw* | *cegcc* )
+	      # if an import library, we need to obtain dlname
+	      if func_win32_import_lib_p "$dlprefile"; then
+	        func_tr_sh "$dlprefile"
+	        eval "curr_lafile=\$libfile_$func_tr_sh_result"
+	        dlprefile_dlbasename=""
+	        if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+	          # Use subshell, to avoid clobbering current variable values
+	          dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+	          if test -n "$dlprefile_dlname" ; then
+	            func_basename "$dlprefile_dlname"
+	            dlprefile_dlbasename="$func_basename_result"
+	          else
+	            # no lafile. user explicitly requested -dlpreopen <import library>.
+	            $sharedlib_from_linklib_cmd "$dlprefile"
+	            dlprefile_dlbasename=$sharedlib_from_linklib_result
+	          fi
+	        fi
+	        $opt_dry_run || {
+	          if test -n "$dlprefile_dlbasename" ; then
+	            eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+	          else
+	            func_warning "Could not compute DLL name from $name"
+	            eval '$ECHO ": $name " >> "$nlist"'
+	          fi
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+	            $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+	        }
+	      else # not an import lib
+	        $opt_dry_run || {
+	          eval '$ECHO ": $name " >> "$nlist"'
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	        }
+	      fi
+	    ;;
+	    *)
+	      $opt_dry_run || {
+	        eval '$ECHO ": $name " >> "$nlist"'
+	        func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	        eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	      }
+	    ;;
+          esac
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) func_append symtab_cflags " $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      func_to_tool_file "$1" func_convert_file_msys_to_w32
+      win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+  $opt_debug
+  sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+  $opt_debug
+  match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+  $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+    $SED '/^Contents of section '"$match_literal"':/{
+      # Place marker at beginning of archive member dllname section
+      s/.*/====MARK====/
+      p
+      d
+    }
+    # These lines can sometimes be longer than 43 characters, but
+    # are always uninteresting
+    /:[	 ]*file format pe[i]\{,1\}-/d
+    /^In archive [^:]*:/d
+    # Ensure marker is printed
+    /^====MARK====/p
+    # Remove all lines with less than 43 characters
+    /^.\{43\}/!d
+    # From remaining lines, remove first 43 characters
+    s/^.\{43\}//' |
+    $SED -n '
+      # Join marker and all lines until next marker into a single line
+      /^====MARK====/ b para
+      H
+      $ b para
+      b
+      :para
+      x
+      s/\n//g
+      # Remove the marker
+      s/^====MARK====//
+      # Remove trailing dots and whitespace
+      s/[\. \t]*$//
+      # Print
+      /./p' |
+    # we now have a list, one entry per line, of the stringified
+    # contents of the appropriate section of all members of the
+    # archive which possess that section. Heuristic: eliminate
+    # all those which have a first or second character that is
+    # a '.' (that is, objdump's representation of an unprintable
+    # character.) This should work for all archives with less than
+    # 0x302f exports -- but will fail for DLLs whose name actually
+    # begins with a literal '.' or a single character followed by
+    # a '.'.
+    #
+    # Of those that remain, print the first one.
+    $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+  test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+  test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+  $opt_debug
+  if func_cygming_gnu_implib_p "$1" ; then
+    # binutils import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+  elif func_cygming_ms_implib_p "$1" ; then
+    # ms-generated import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+  else
+    # unknown
+    sharedlib_from_linklib_result=""
+  fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  case \" \$* \" in
+  *\\ --lt-*)
+    for lt_wr_arg
+    do
+      case \$lt_wr_arg in
+      --lt-*) ;;
+      *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+      esac
+      shift
+    done ;;
+  esac
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# fixup the dll searchpath if we need to.
+	#
+	# Fix the DLL searchpath if we need to.  Do this before prepending
+	# to shlibpath, because on Windows, both are PATH and uninstalled
+	# libraries must come first.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_path "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_path "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  /* Update the DLL searchpath.  EXE_PATH_VALUE ($dllsearchpath) must
+     be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+     because on Windows, both *_VARNAMEs are PATH but uninstalled
+     libraries must come first. */
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+	      $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/  fputs ("\1", f);/p
+g
+D'
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      func_append libtool_args " $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  func_append compile_command " @OUTPUT@"
+	  func_append finalize_command " @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    func_append compile_command " @SYMFILE@"
+	    func_append finalize_command " @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      func_append dlfiles " $arg"
+	    else
+	      func_append dlprefiles " $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) func_append deplibs " $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      func_append moreargs " $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      func_append dlfiles " $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    func_append dlprefiles " $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  func_append libobjs " $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  func_append non_pic_objects " $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  func_append libobjs " $pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) func_append rpath " $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) func_append xrpath " $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  func_append weak_libs " $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  func_append linker_flags " $qarg"
+	  func_append compiler_flags " $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  func_append compiler_flags " $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  func_append linker_flags " $qarg"
+	  func_append compiler_flags " $wl$qarg"
+	  prev=
+	  func_append compile_command " $wl$qarg"
+	  func_append finalize_command " $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  func_append compile_command " $link_static_flag"
+	  func_append finalize_command " $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  func_append compile_command " $arg"
+	  func_append finalize_command " $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname "-L" '' "$arg"
+	if test -z "$func_stripname_result"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	func_resolve_sysroot "$func_stripname_result"
+	dir=$func_resolve_sysroot_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "* | *" $arg "*)
+	  # Will only happen for absolute or sysroot arguments
+	  ;;
+	*)
+	  # Preserve sysroot, but never include relative directories
+	  case $dir in
+	    [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
+	    *) func_append deplibs " -L$dir" ;;
+	  esac
+	  func_append lib_search_path " $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) func_append dllsearchpath ":$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) func_append dllsearchpath ":$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    func_append deplibs " System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	func_append deplibs " $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot|--sysroot)
+	func_append compiler_flags " $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+      |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	func_append compiler_flags " $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) func_append new_inherited_linker_flags " $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	=*)
+	  func_stripname '=' '' "$dir"
+	  dir=$lt_sysroot$func_stripname_result
+	  ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) func_append xrpath " $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  func_append arg " $func_quote_for_eval_result"
+	  func_append compiler_flags " $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  func_append arg " $wl$func_quote_for_eval_result"
+	  func_append compiler_flags " $wl$func_quote_for_eval_result"
+	  func_append linker_flags " $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      # --sysroot=*          for sysroot support
+      # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+      -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        func_append compile_command " $arg"
+        func_append finalize_command " $arg"
+        func_append compiler_flags " $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	func_append objs " $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		func_append dlfiles " $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      func_append dlprefiles " $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    func_append libobjs " $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    func_append non_pic_objects " $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    func_append libobjs " $pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	func_append deplibs " $arg"
+	func_append old_deplibs " $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	func_resolve_sysroot "$arg"
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  func_append dlfiles " $func_resolve_sysroot_result"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  func_append dlprefiles " $func_resolve_sysroot_result"
+	  prev=
+	else
+	  func_append deplibs " $func_resolve_sysroot_result"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      func_append compile_command " $arg"
+      func_append finalize_command " $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    func_to_tool_file "$output_objdir/"
+    tool_output_objdir=$func_to_tool_file_result
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_preserve_dup_deps ; then
+	case "$libs " in
+	*" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	esac
+      fi
+      func_append libs " $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
+	  esac
+	  func_append pre_post_deps " $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  func_resolve_sysroot "$lib"
+	  case $lib in
+	  *.la)	func_source "$func_resolve_sysroot_result" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) func_append deplibs " $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+        |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    func_append compiler_flags " $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) func_append new_inherited_linker_flags " $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) func_append new_inherited_linker_flags " $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    func_append newlib_search_path " $func_resolve_sysroot_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    func_append newlib_search_path " $func_resolve_sysroot_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    dir=$func_resolve_sysroot_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) func_append xrpath " $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la)
+	  func_resolve_sysroot "$deplib"
+	  lib=$func_resolve_sysroot_result
+	  ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      func_append newdlprefiles " $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      func_append newdlfiles " $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && func_append dlfiles " $dlopen"
+	  test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    func_append convenience " $ladir/$objdir/$old_library"
+	    func_append old_convenience " $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	if test -n "$old_library" &&
+	   { test "$prefer_static_libs" = yes ||
+	     test "$prefer_static_libs,$installed" = "built,no"; }; then
+	  linklib=$old_library
+	else
+	  for l in $old_library $library_names; do
+	    linklib="$l"
+	  done
+	fi
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    func_append dlprefiles " $lib $dependency_libs"
+	  else
+	    func_append newdlfiles " $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$lt_sysroot$libdir"
+	    absdir="$lt_sysroot$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    func_append notinst_path " $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    func_append notinst_path " $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  case "$host" in
+	    # special handling for platforms with PE-DLLs.
+	    *cygwin* | *mingw* | *cegcc* )
+	      # Linker will automatically link against shared library if both
+	      # static and shared are present.  Therefore, ensure we extract
+	      # symbols from the import library if a shared library is present
+	      # (otherwise, the dlopen module name will be incorrect).  We do
+	      # this by putting the import library name into $newdlprefiles.
+	      # We recover the dlopen module name by 'saving' the la file
+	      # name in a special purpose variable, and (later) extracting the
+	      # dlname from the la file.
+	      if test -n "$dlname"; then
+	        func_tr_sh "$dir/$linklib"
+	        eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+	        func_append newdlprefiles " $dir/$linklib"
+	      else
+	        func_append newdlprefiles " $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          func_append dlpreconveniencelibs " $dir/$old_library"
+	      fi
+	    ;;
+	    * )
+	      # Prefer using a static library (so that no silly _DYNAMIC symbols
+	      # are required to link).
+	      if test -n "$old_library"; then
+	        func_append newdlprefiles " $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          func_append dlpreconveniencelibs " $dir/$old_library"
+	      # Otherwise, use the dlname, so that lt_dlopen finds it.
+	      elif test -n "$dlname"; then
+	        func_append newdlprefiles " $dir/$dlname"
+	      else
+	        func_append newdlprefiles " $dir/$linklib"
+	      fi
+	    ;;
+	  esac
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  func_append newlib_search_path " $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         func_resolve_sysroot "$func_stripname_result"
+	         func_append newlib_search_path " $func_resolve_sysroot_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) func_append temp_rpath "$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) func_append compile_rpath " $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append finalize_rpath " $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      func_append notinst_deplibs " $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      func_append notinst_deplibs " $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) func_append compile_rpath " $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append finalize_rpath " $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$opt_mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$absdir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      func_append add_dir " -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) func_append compile_shlibpath "$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) func_append finalize_shlibpath "$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$opt_mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) func_append finalize_shlibpath "$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    func_append add_dir " -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) func_append xrpath " $temp_xrpath";;
+		   esac;;
+	      *) func_append temp_deplibs " $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  func_append newlib_search_path " $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    case $deplib in
+              -L*) func_stripname '-L' '' "$deplib"
+                   func_resolve_sysroot "$func_stripname_result";;
+              *) func_resolve_sysroot "$deplib" ;;
+            esac
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $func_resolve_sysroot_result "*)
+                func_append specialdeplibs " $func_resolve_sysroot_result" ;;
+	      esac
+	    fi
+	    func_append tmp_libs " $func_resolve_sysroot_result"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_resolve_sysroot "$deplib"
+	        deplib=$func_resolve_sysroot_result
+	        func_dirname "$deplib" "" "."
+		dir=$func_dirname_result
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) func_append lib_search_path " $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) func_append tmp_libs " $deplib" ;;
+	      esac
+	      ;;
+	    *) func_append tmp_libs " $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  func_append tmp_libs " $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      func_append objs "$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  func_append libobjs " $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  # correct linux to gnu/linux during the next big refactor
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux) # correct to gnu/linux during the next big refactor
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  func_append verstring ":${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      func_append libobjs " $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$opt_mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       func_append removelist " $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	func_append oldlibs " $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  func_replace_sysroot "$libdir"
+	  func_append temp_xrpath " -R$func_replace_sysroot_result"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_rpath " $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) func_append dlfiles " $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) func_append dlprefiles " $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    func_append deplibs " System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      func_append deplibs " -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    func_append newdeplibs " $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    func_append newdeplibs " $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		func_append newdeplibs " $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      func_append newdeplibs " $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      func_append newdeplibs " $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		func_append newdeplibs " $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  func_append newdeplibs " $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		if test -n "$file_magic_glob"; then
+		  libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+		else
+		  libnameglob=$libname
+		fi
+		test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  if test "$want_nocaseglob" = yes; then
+		    shopt -s nocaseglob
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		    $nocaseglob
+		  else
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		  fi
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			func_append newdeplibs " $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      func_append newdeplibs " $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  func_append newdeplibs " $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      func_append newdeplibs " $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      func_append newdeplibs " $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    func_append new_libs " -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) func_append new_libs " $deplib" ;;
+	  esac
+	  ;;
+	*) func_append new_libs " $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	# Remove ${wl} instances when linking with ld.
+	# FIXME: should test the right _cmds variable.
+	case $archive_cmds in
+	  *\$LD\ *) wl= ;;
+        esac
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		func_replace_sysroot "$libdir"
+		libdir=$func_replace_sysroot_result
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		func_append dep_rpath " $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) func_append perm_rpath " $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      func_append rpath "$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  func_append linknames " $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  func_append delfiles " $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd1 in $cmds; do
+	      IFS="$save_ifs"
+	      # Take the normal branch if the nm_file_list_spec branch
+	      # doesn't work or if tool conversion is not needed.
+	      case $nm_file_list_spec~$to_tool_file_cmd in
+		*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+		  try_normal_branch=yes
+		  eval cmd=\"$cmd1\"
+		  func_len " $cmd"
+		  len=$func_len_result
+		  ;;
+		*)
+		  try_normal_branch=no
+		  ;;
+	      esac
+	      if test "$try_normal_branch" = yes \
+		 && { test "$len" -lt "$max_cmd_len" \
+		      || test "$max_cmd_len" -le -1; }
+	      then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      elif test -n "$nm_file_list_spec"; then
+		func_basename "$output"
+		output_la=$func_basename_result
+		save_libobjs=$libobjs
+		save_output=$output
+		output=${output_objdir}/${output_la}.nm
+		func_to_tool_file "$output"
+		libobjs=$nm_file_list_spec$func_to_tool_file_result
+		func_append delfiles " $output"
+		func_verbose "creating $NM input file list: $output"
+		for obj in $save_libobjs; do
+		  func_to_tool_file "$obj"
+		  $ECHO "$func_to_tool_file_result"
+		done > "$output"
+		eval cmd=\"$cmd1\"
+		func_show_eval "$cmd" 'exit $?'
+		output=$save_output
+		libobjs=$save_libobjs
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    func_append tmp_deplibs " $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    func_append generated " $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    func_append libobjs " $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  func_append linker_flags " $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    echo ')' >> $output
+	    func_append delfiles " $output"
+	    func_to_tool_file "$output"
+	    output=$func_to_tool_file_result
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    func_append delfiles " $output"
+	    func_to_tool_file "$output"
+	    output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  func_append objlist " $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      func_append delfiles " $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$opt_mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  func_append libobjs " $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$opt_mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # If we're not building shared, we need to use non_pic_objs
+      test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      func_append compile_command " ${wl}-bind_at_load"
+	      func_append finalize_command " ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    func_append new_libs " -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) func_append new_libs " $deplib" ;;
+	  esac
+	  ;;
+	*) func_append new_libs " $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      func_append compile_command " $compile_deplibs"
+      func_append finalize_command " $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_rpath " $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    func_append rpath " $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append perm_rpath " $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) func_append dllsearchpath ":$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) func_append dllsearchpath ":$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    func_append rpath " $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) func_append finalize_perm_rpath " $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    func_append rpath "$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    func_append rpath "$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      if test -n "$postlink_cmds"; then
+	func_to_tool_file "$output_objdir/$outputname"
+	postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	func_execute_cmds "$postlink_cmds" 'exit $?'
+      fi
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    func_append oldobjs " $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	func_append generated " $gentop"
+
+	func_extract_archives $gentop $addlibs
+	func_append oldobjs " $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  func_append oldobjs " $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  func_append generated " $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      func_append oldobjs " $gentop/$newobj"
+	      ;;
+	    *) func_append oldobjs " $obj" ;;
+	    esac
+	  done
+	fi
+	func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+	tool_oldlib=$func_to_tool_file_result
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	elif test -n "$archiver_list_spec"; then
+	  func_verbose "using command file archive linking..."
+	  for obj in $oldobjs
+	  do
+	    func_to_tool_file "$obj"
+	    $ECHO "$func_to_tool_file_result"
+	  done > $output_objdir/$libname.libcmd
+	  func_to_tool_file "$output_objdir/$libname.libcmd"
+	  oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    func_append objlist " $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		func_resolve_sysroot "$deplib"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      -L*)
+		func_stripname -L '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		func_append newdependency_libs " -L$func_replace_sysroot_result"
+		;;
+	      -R*)
+		func_stripname -R '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		func_append newdependency_libs " -R$func_replace_sysroot_result"
+		;;
+	      *) func_append newdependency_libs " $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      *) func_append newdlfiles " $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      func_append newdlfiles " $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      func_append newdlprefiles " $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) func_append RM " $arg"; rmforce=yes ;;
+      -*) func_append RM " $arg" ;;
+      *) func_append files " $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	odir="$objdir"
+      else
+	odir="$dir/$objdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$opt_mode" = uninstall && odir="$dir"
+
+      # Remember odir for removal later, being careful to avoid duplicates
+      if test "$opt_mode" = clean; then
+	case " $rmdirs " in
+	  *" $odir "*) ;;
+	  *) func_append rmdirs " $odir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    func_append rmfiles " $odir/$n"
+	  done
+	  test -n "$old_library" && func_append rmfiles " $odir/$old_library"
+
+	  case "$opt_mode" in
+	  clean)
+	    case " $library_names " in
+	    *" $dlname "*) ;;
+	    *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    func_append rmfiles " $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    func_append rmfiles " $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$opt_mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    func_append rmfiles " $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      func_append rmfiles " $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    func_append rmfiles " $odir/$name $odir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      func_append rmfiles " $odir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      func_append rmfiles " $odir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/missing b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/missing
new file mode 100755
index 0000000..28055d2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/config/missing
@@ -0,0 +1,376 @@
+#! /bin/sh
+# Common stub for a few missing GNU programs while installing.
+
+scriptversion=2009-04-28.21; # UTC
+
+# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006,
+# 2008, 2009 Free Software Foundation, Inc.
+# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+if test $# -eq 0; then
+  echo 1>&2 "Try \`$0 --help' for more information"
+  exit 1
+fi
+
+run=:
+sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p'
+sed_minuso='s/.* -o \([^ ]*\).*/\1/p'
+
+# In the cases where this matters, `missing' is being run in the
+# srcdir already.
+if test -f configure.ac; then
+  configure_ac=configure.ac
+else
+  configure_ac=configure.in
+fi
+
+msg="missing on your system"
+
+case $1 in
+--run)
+  # Try to run requested program, and just exit if it succeeds.
+  run=
+  shift
+  "$@" && exit 0
+  # Exit code 63 means version mismatch.  This often happens
+  # when the user try to use an ancient version of a tool on
+  # a file that requires a minimum version.  In this case we
+  # we should proceed has if the program had been absent, or
+  # if --run hadn't been passed.
+  if test $? = 63; then
+    run=:
+    msg="probably too old"
+  fi
+  ;;
+
+  -h|--h|--he|--hel|--help)
+    echo "\
+$0 [OPTION]... PROGRAM [ARGUMENT]...
+
+Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an
+error status if there is no known handling for PROGRAM.
+
+Options:
+  -h, --help      display this help and exit
+  -v, --version   output version information and exit
+  --run           try to run the given command, and emulate it if it fails
+
+Supported PROGRAM values:
+  aclocal      touch file \`aclocal.m4'
+  autoconf     touch file \`configure'
+  autoheader   touch file \`config.h.in'
+  autom4te     touch the output file, or create a stub one
+  automake     touch all \`Makefile.in' files
+  bison        create \`y.tab.[ch]', if possible, from existing .[ch]
+  flex         create \`lex.yy.c', if possible, from existing .c
+  help2man     touch the output file
+  lex          create \`lex.yy.c', if possible, from existing .c
+  makeinfo     touch the output file
+  tar          try tar, gnutar, gtar, then tar without non-portable flags
+  yacc         create \`y.tab.[ch]', if possible, from existing .[ch]
+
+Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and
+\`g' are ignored when checking the name.
+
+Send bug reports to <bug-automake@gnu.org>."
+    exit $?
+    ;;
+
+  -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
+    echo "missing $scriptversion (GNU Automake)"
+    exit $?
+    ;;
+
+  -*)
+    echo 1>&2 "$0: Unknown \`$1' option"
+    echo 1>&2 "Try \`$0 --help' for more information"
+    exit 1
+    ;;
+
+esac
+
+# normalize program name to check for.
+program=`echo "$1" | sed '
+  s/^gnu-//; t
+  s/^gnu//; t
+  s/^g//; t'`
+
+# Now exit if we have it, but it failed.  Also exit now if we
+# don't have it and --version was passed (most likely to detect
+# the program).  This is about non-GNU programs, so use $1 not
+# $program.
+case $1 in
+  lex*|yacc*)
+    # Not GNU programs, they don't have --version.
+    ;;
+
+  tar*)
+    if test -n "$run"; then
+       echo 1>&2 "ERROR: \`tar' requires --run"
+       exit 1
+    elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
+       exit 1
+    fi
+    ;;
+
+  *)
+    if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
+       # We have it, but it failed.
+       exit 1
+    elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
+       # Could not run --version or --help.  This is probably someone
+       # running `$TOOL --version' or `$TOOL --help' to check whether
+       # $TOOL exists and not knowing $TOOL uses missing.
+       exit 1
+    fi
+    ;;
+esac
+
+# If it does not exist, or fails to run (possibly an outdated version),
+# try to emulate it.
+case $program in
+  aclocal*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`acinclude.m4' or \`${configure_ac}'.  You might want
+         to install the \`Automake' and \`Perl' packages.  Grab them from
+         any GNU archive site."
+    touch aclocal.m4
+    ;;
+
+  autoconf*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`${configure_ac}'.  You might want to install the
+         \`Autoconf' and \`GNU m4' packages.  Grab them from any GNU
+         archive site."
+    touch configure
+    ;;
+
+  autoheader*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`acconfig.h' or \`${configure_ac}'.  You might want
+         to install the \`Autoconf' and \`GNU m4' packages.  Grab them
+         from any GNU archive site."
+    files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}`
+    test -z "$files" && files="config.h"
+    touch_files=
+    for f in $files; do
+      case $f in
+      *:*) touch_files="$touch_files "`echo "$f" |
+				       sed -e 's/^[^:]*://' -e 's/:.*//'`;;
+      *) touch_files="$touch_files $f.in";;
+      esac
+    done
+    touch $touch_files
+    ;;
+
+  automake*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'.
+         You might want to install the \`Automake' and \`Perl' packages.
+         Grab them from any GNU archive site."
+    find . -type f -name Makefile.am -print |
+	   sed 's/\.am$/.in/' |
+	   while read f; do touch "$f"; done
+    ;;
+
+  autom4te*)
+    echo 1>&2 "\
+WARNING: \`$1' is needed, but is $msg.
+         You might have modified some files without having the
+         proper tools for further handling them.
+         You can get \`$1' as part of \`Autoconf' from any GNU
+         archive site."
+
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -f "$file"; then
+	touch $file
+    else
+	test -z "$file" || exec >$file
+	echo "#! /bin/sh"
+	echo "# Created by GNU Automake missing as a replacement of"
+	echo "#  $ $@"
+	echo "exit 0"
+	chmod +x $file
+	exit 1
+    fi
+    ;;
+
+  bison*|yacc*)
+    echo 1>&2 "\
+WARNING: \`$1' $msg.  You should only need it if
+         you modified a \`.y' file.  You may need the \`Bison' package
+         in order for those modifications to take effect.  You can get
+         \`Bison' from any GNU archive site."
+    rm -f y.tab.c y.tab.h
+    if test $# -ne 1; then
+        eval LASTARG="\${$#}"
+	case $LASTARG in
+	*.y)
+	    SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" y.tab.c
+	    fi
+	    SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" y.tab.h
+	    fi
+	  ;;
+	esac
+    fi
+    if test ! -f y.tab.h; then
+	echo >y.tab.h
+    fi
+    if test ! -f y.tab.c; then
+	echo 'main() { return 0; }' >y.tab.c
+    fi
+    ;;
+
+  lex*|flex*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified a \`.l' file.  You may need the \`Flex' package
+         in order for those modifications to take effect.  You can get
+         \`Flex' from any GNU archive site."
+    rm -f lex.yy.c
+    if test $# -ne 1; then
+        eval LASTARG="\${$#}"
+	case $LASTARG in
+	*.l)
+	    SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" lex.yy.c
+	    fi
+	  ;;
+	esac
+    fi
+    if test ! -f lex.yy.c; then
+	echo 'main() { return 0; }' >lex.yy.c
+    fi
+    ;;
+
+  help2man*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+	 you modified a dependency of a manual page.  You may need the
+	 \`Help2man' package in order for those modifications to take
+	 effect.  You can get \`Help2man' from any GNU archive site."
+
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -f "$file"; then
+	touch $file
+    else
+	test -z "$file" || exec >$file
+	echo ".ab help2man is required to generate this page"
+	exit $?
+    fi
+    ;;
+
+  makeinfo*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified a \`.texi' or \`.texinfo' file, or any other file
+         indirectly affecting the aspect of the manual.  The spurious
+         call might also be the consequence of using a buggy \`make' (AIX,
+         DU, IRIX).  You might want to install the \`Texinfo' package or
+         the \`GNU make' package.  Grab either from any GNU archive site."
+    # The file to touch is that specified with -o ...
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -z "$file"; then
+      # ... or it is the one specified with @setfilename ...
+      infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'`
+      file=`sed -n '
+	/^@setfilename/{
+	  s/.* \([^ ]*\) *$/\1/
+	  p
+	  q
+	}' $infile`
+      # ... or it is derived from the source name (dir/f.texi becomes f.info)
+      test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info
+    fi
+    # If the file does not exist, the user really needs makeinfo;
+    # let's fail without touching anything.
+    test -f $file || exit 1
+    touch $file
+    ;;
+
+  tar*)
+    shift
+
+    # We have already tried tar in the generic part.
+    # Look for gnutar/gtar before invocation to avoid ugly error
+    # messages.
+    if (gnutar --version > /dev/null 2>&1); then
+       gnutar "$@" && exit 0
+    fi
+    if (gtar --version > /dev/null 2>&1); then
+       gtar "$@" && exit 0
+    fi
+    firstarg="$1"
+    if shift; then
+	case $firstarg in
+	*o*)
+	    firstarg=`echo "$firstarg" | sed s/o//`
+	    tar "$firstarg" "$@" && exit 0
+	    ;;
+	esac
+	case $firstarg in
+	*h*)
+	    firstarg=`echo "$firstarg" | sed s/h//`
+	    tar "$firstarg" "$@" && exit 0
+	    ;;
+	esac
+    fi
+
+    echo 1>&2 "\
+WARNING: I can't seem to be able to run \`tar' with the given arguments.
+         You may want to install GNU tar or Free paxutils, or check the
+         command line arguments."
+    exit 1
+    ;;
+
+  *)
+    echo 1>&2 "\
+WARNING: \`$1' is needed, and is $msg.
+         You might have modified some files without having the
+         proper tools for further handling them.  Check the \`README' file,
+         it often tells you about the needed prerequisites for installing
+         this package.  You may also peek at any GNU archive site, in case
+         some other package would contain this missing \`$1' program."
+    exit 1
+    ;;
+esac
+
+exit 0
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/COPYING.LIB b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/COPYING.LIB
new file mode 100644
index 0000000..ba2be48
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/COPYING.LIB
@@ -0,0 +1,515 @@
+
+                  GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+     51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+                            Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+  This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it.  You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations
+below.
+
+  When we speak of free software, we are referring to freedom of use,
+not price.  Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+  To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights.  These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+  For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you.  You must make sure that they, too, receive or can get the source
+code.  If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it.  And you must show them these terms so they know their rights.
+
+  We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+  To protect each distributor, we want to make it very clear that
+there is no warranty for the free library.  Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+^L
+  Finally, software patents pose a constant threat to the existence of
+any free program.  We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder.  Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+  Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License.  This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License.  We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+  When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library.  The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom.  The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+  We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License.  It also provides other free software developers Less
+of an advantage over competing non-free programs.  These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries.  However, the Lesser license provides advantages in certain
+special circumstances.
+
+  For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it
+becomes
+a de-facto standard.  To achieve this, non-free programs must be
+allowed to use the library.  A more frequent case is that a free
+library does the same job as widely used non-free libraries.  In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+  In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software.  For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+  Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.  Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library".  The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+^L
+                  GNU LESSER GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+  A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+  The "Library", below, refers to any such software library or work
+which has been distributed under these terms.  A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language.  (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+  "Source code" for a work means the preferred form of the work for
+making modifications to it.  For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control
+compilation
+and installation of the library.
+
+  Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it).  Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+  1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+  You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+  2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+
+    b) You must cause the files modified to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    c) You must cause the whole of the work to be licensed at no
+    charge to all third parties under the terms of this License.
+
+    d) If a facility in the modified Library refers to a function or a
+    table of data to be supplied by an application program that uses
+    the facility, other than as an argument passed when the facility
+    is invoked, then you must make a good faith effort to ensure that,
+    in the event an application does not supply such function or
+    table, the facility still operates, and performs whatever part of
+    its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has
+    a purpose that is entirely well-defined independent of the
+    application.  Therefore, Subsection 2d requires that any
+    application-supplied function or table used by this function must
+    be optional: if the application does not supply it, the square
+    root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library.  To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License.  (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.)  Do not make any other change in
+these notices.
+^L
+  Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+  This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+  4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+  If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library".  Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+  However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library".  The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+  When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library.  The
+threshold for this to be true is not precisely defined by law.
+
+  If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work.  (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+  Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+^L
+  6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+  You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License.  You must supply a copy of this License.  If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License.  Also, you must do one
+of these things:
+
+    a) Accompany the work with the complete corresponding
+    machine-readable source code for the Library including whatever
+    changes were used in the work (which must be distributed under
+    Sections 1 and 2 above); and, if the work is an executable linked
+    with the Library, with the complete machine-readable "work that
+    uses the Library", as object code and/or source code, so that the
+    user can modify the Library and then relink to produce a modified
+    executable containing the modified Library.  (It is understood
+    that the user who changes the contents of definitions files in the
+    Library will not necessarily be able to recompile the application
+    to use the modified definitions.)
+
+    b) Use a suitable shared library mechanism for linking with the
+    Library.  A suitable mechanism is one that (1) uses at run time a
+    copy of the library already present on the user's computer system,
+    rather than copying library functions into the executable, and (2)
+    will operate properly with a modified version of the library, if
+    the user installs one, as long as the modified version is
+    interface-compatible with the version that the work was made with.
+
+    c) Accompany the work with a written offer, valid for at
+    least three years, to give the same user the materials
+    specified in Subsection 6a, above, for a charge no more
+    than the cost of performing this distribution.
+
+    d) If distribution of the work is made by offering access to copy
+    from a designated place, offer equivalent access to copy the above
+    specified materials from the same place.
+
+    e) Verify that the user has already received a copy of these
+    materials or that you have already sent this user a copy.
+
+  For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it.  However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+  It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system.  Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+^L
+  7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work
+    based on the Library, uncombined with any other library
+    facilities.  This must be distributed under the terms of the
+    Sections above.
+
+    b) Give prominent notice with the combined library of the fact
+    that part of it is a work based on the Library, and explaining
+    where to find the accompanying uncombined form of the same work.
+
+  8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License.  Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License.  However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+  9. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Library or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+  10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+^L
+  11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply, and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License
+may add an explicit geographical distribution limitation excluding those
+countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation.  If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+^L
+  14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission.  For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this.  Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+                            NO WARRANTY
+
+  15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+                     END OF TERMS AND CONDITIONS
+^L
+           How to Apply These Terms to Your New Libraries
+
+  If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change.  You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms
+of the ordinary General Public License).
+
+  To apply these terms, attach the following notices to the library.
+It is safest to attach them to the start of each source file to most
+effectively convey the exclusion of warranty; and each file should
+have at least the "copyright" line and a pointer to where the full
+notice is found.
+
+
+    <one line to give the library's name and a brief idea of what it
+does.>
+    Copyright (C) <year>  <name of author>
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+
+Also add information on how to contact you by electronic and paper
+mail.
+
+You should also get your employer (if you work as a programmer) or
+your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the
+  library `Frob' (a library for tweaking knobs) written by James
+Random Hacker.
+
+  <signature of Ty Coon>, 1 April 1990
+  Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.am b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.am
new file mode 100644
index 0000000..4a5e945
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.am
@@ -0,0 +1,164 @@
+## Makefile.am -- Process this file with automake to produce Makefile.in
+##
+##   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
+##   Written by Gary V. Vaughan, 2003
+##
+##   NOTE: The canonical source of this file is maintained with the
+##   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+##
+## GNU Libltdl is free software; you can redistribute it and/or
+## modify it under the terms of the GNU Lesser General Public
+## License as published by the Free Software Foundation; either
+## version 2 of the License, or (at your option) any later version.
+##
+## As a special exception to the GNU Lesser General Public License,
+## if you distribute this file as part of a program or library that
+## is built using GNU libtool, you may include this file under the
+## same distribution terms that you use for the rest of that program.
+##
+## GNU Libltdl is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU Lesser General Public License for more details.
+##
+## You should have received a copy of the GNU LesserGeneral Public
+## License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+## copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+## or obtained by writing to the Free Software Foundation, Inc.,
+## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#####
+
+ACLOCAL_AMFLAGS = -I m4
+AUTOMAKE_OPTIONS = foreign
+AM_CPPFLAGS =
+AM_LDFLAGS =
+BUILT_SOURCES =
+include_HEADERS =
+noinst_LTLIBRARIES =
+lib_LTLIBRARIES =
+EXTRA_LTLIBRARIES =
+EXTRA_DIST =
+CLEANFILES =
+MOSTLYCLEANFILES =
+
+# -I$(srcdir) is needed for user that built libltdl with a sub-Automake
+# (not as a sub-package!) using 'nostdinc':
+AM_CPPFLAGS	       += -DLT_CONFIG_H='<$(LT_CONFIG_H)>' \
+			  -DLTDL -I. -I$(srcdir) -Ilibltdl \
+			  -I$(srcdir)/libltdl -I$(srcdir)/libltdl
+AM_LDFLAGS	       += -no-undefined
+LTDL_VERSION_INFO	= -version-info 10:0:3
+
+noinst_LTLIBRARIES	+= $(LT_DLLOADERS)
+
+if INSTALL_LTDL
+ltdlincludedir		= $(includedir)/libltdl
+ltdlinclude_HEADERS	= libltdl/lt_system.h \
+			  libltdl/lt_error.h \
+			  libltdl/lt_dlloader.h
+include_HEADERS	       += ltdl.h
+lib_LTLIBRARIES	       += libltdl.la
+endif
+
+if CONVENIENCE_LTDL
+noinst_LTLIBRARIES     += libltdlc.la
+endif
+
+libltdl_la_SOURCES = libltdl/lt__alloc.h \
+			  libltdl/lt__dirent.h \
+			  libltdl/lt__glibc.h \
+			  libltdl/lt__private.h \
+			  libltdl/lt__strl.h \
+			  libltdl/lt_dlloader.h \
+			  libltdl/lt_error.h \
+	                  libltdl/lt_system.h \
+			  libltdl/slist.h \
+			  loaders/preopen.c \
+			  lt__alloc.c \
+			  lt_dlloader.c \
+			  lt_error.c \
+			  ltdl.c \
+			  ltdl.h \
+			  slist.c
+
+EXTRA_DIST	       += lt__dirent.c \
+			  lt__strl.c
+
+libltdl_la_CPPFLAGS	= -DLTDLOPEN=$(LTDLOPEN) $(AM_CPPFLAGS)
+libltdl_la_LDFLAGS	= $(AM_LDFLAGS) $(LTDL_VERSION_INFO) $(LT_DLPREOPEN)
+libltdl_la_LIBADD	= $(LTLIBOBJS)
+libltdl_la_DEPENDENCIES	= $(LT_DLLOADERS) $(LTLIBOBJS)
+
+libltdlc_la_SOURCES	= $(libltdl_la_SOURCES)
+libltdlc_la_CPPFLAGS	= -DLTDLOPEN=$(LTDLOPEN)c $(AM_CPPFLAGS)
+libltdlc_la_LDFLAGS	= $(AM_LDFLAGS) $(LT_DLPREOPEN)
+libltdlc_la_LIBADD	= $(libltdl_la_LIBADD)
+libltdlc_la_DEPENDENCIES= $(libltdl_la_DEPENDENCIES)
+
+## The loaders are preopened by libltdl, itself always built from
+## pic-objects (either as a shared library, or a convenience library),
+## so the loaders themselves must be made from pic-objects too.  We
+## use convenience libraries for that purpose:
+EXTRA_LTLIBRARIES	       += dlopen.la \
+				  dld_link.la \
+				  dyld.la \
+				  load_add_on.la \
+				  loadlibrary.la \
+				  shl_load.la
+
+dlopen_la_SOURCES	= loaders/dlopen.c
+dlopen_la_LDFLAGS	= -module -avoid-version
+dlopen_la_LIBADD 	= $(LIBADD_DLOPEN)
+
+dld_link_la_SOURCES	= loaders/dld_link.c
+dld_link_la_LDFLAGS	= -module -avoid-version
+dld_link_la_LIBADD	= -ldld
+
+dyld_la_SOURCES		= loaders/dyld.c
+dyld_la_LDFLAGS		= -module -avoid-version
+
+load_add_on_la_SOURCES	= loaders/load_add_on.c
+load_add_on_la_LDFLAGS	= -module -avoid-version
+
+loadlibrary_la_SOURCES	= loaders/loadlibrary.c
+loadlibrary_la_LDFLAGS	= -module -avoid-version
+
+shl_load_la_SOURCES	= loaders/shl_load.c
+shl_load_la_LDFLAGS	= -module -avoid-version
+shl_load_la_LIBADD	= $(LIBADD_SHL_LOAD)
+
+## Make sure these will be cleaned even when they're not built by default:
+CLEANFILES		       += libltdl.la \
+				  libltdlc.la \
+				  libdlloader.la
+
+## Automake-1.9.6 doesn't clean subdir AC_LIBOBJ compiled objects
+## automatically:
+CLEANFILES	       += $(LIBOBJS) $(LTLIBOBJS)
+
+EXTRA_DIST	       += COPYING.LIB \
+			  configure.ac \
+			  Makefile.am \
+			  aclocal.m4 \
+			  Makefile.in \
+			  configure \
+			  config-h.in \
+			  README
+
+## --------------------------- ##
+## Gnulib Makefile.am snippets ##
+## --------------------------- ##
+
+BUILT_SOURCES	+= $(ARGZ_H)
+EXTRA_DIST	+= argz_.h \
+		   argz.c
+
+# We need the following in order to create an <argz.h> when the system
+# doesn't have one that works with the given compiler.
+all-local $(lib_OBJECTS): $(ARGZ_H)
+argz.h: argz_.h
+	$(mkinstalldirs) . 
+	cp $(srcdir)/argz_.h $@-t
+	mv $@-t $@
+MOSTLYCLEANFILES += argz.h \
+		    argz.h-t
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.in b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.in
new file mode 100644
index 0000000..daff330
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.in
@@ -0,0 +1,1052 @@
+# Makefile.in generated by automake 1.11.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009  Free Software Foundation,
+# Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#####
+
+
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+@INSTALL_LTDL_TRUE@am__append_1 = ltdl.h
+@INSTALL_LTDL_TRUE@am__append_2 = libltdl.la
+@CONVENIENCE_LTDL_TRUE@am__append_3 = libltdlc.la
+subdir = .
+DIST_COMMON = README $(am__configure_deps) $(am__include_HEADERS_DIST) \
+	$(am__ltdlinclude_HEADERS_DIST) $(srcdir)/Makefile.am \
+	$(srcdir)/Makefile.in $(srcdir)/config-h.in \
+	$(top_srcdir)/configure COPYING.LIB argz.c config/compile \
+	config/config.guess config/config.sub config/depcomp \
+	config/install-sh config/ltmain.sh  \
+	config/missing  lt__dirent.c lt__strl.c
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/argz.m4 \
+	$(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltdl.m4 \
+	$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
+	$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
+	$(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno config.status.lineno
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)" \
+	"$(DESTDIR)$(ltdlincludedir)"
+LTLIBRARIES = $(lib_LTLIBRARIES) $(noinst_LTLIBRARIES)
+dld_link_la_DEPENDENCIES =
+am_dld_link_la_OBJECTS = dld_link.lo
+dld_link_la_OBJECTS = $(am_dld_link_la_OBJECTS)
+dld_link_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(dld_link_la_LDFLAGS) $(LDFLAGS) -o $@
+am__DEPENDENCIES_1 =
+dlopen_la_DEPENDENCIES = $(am__DEPENDENCIES_1)
+am_dlopen_la_OBJECTS = dlopen.lo
+dlopen_la_OBJECTS = $(am_dlopen_la_OBJECTS)
+dlopen_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(dlopen_la_LDFLAGS) $(LDFLAGS) -o $@
+dyld_la_LIBADD =
+am_dyld_la_OBJECTS = dyld.lo
+dyld_la_OBJECTS = $(am_dyld_la_OBJECTS)
+dyld_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(dyld_la_LDFLAGS) \
+	$(LDFLAGS) -o $@
+am_libltdl_la_OBJECTS = libltdl_la-preopen.lo libltdl_la-lt__alloc.lo \
+	libltdl_la-lt_dlloader.lo libltdl_la-lt_error.lo \
+	libltdl_la-ltdl.lo libltdl_la-slist.lo
+libltdl_la_OBJECTS = $(am_libltdl_la_OBJECTS)
+libltdl_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(libltdl_la_LDFLAGS) $(LDFLAGS) -o $@
+@INSTALL_LTDL_TRUE@am_libltdl_la_rpath = -rpath $(libdir)
+am__objects_1 = libltdlc_la-preopen.lo libltdlc_la-lt__alloc.lo \
+	libltdlc_la-lt_dlloader.lo libltdlc_la-lt_error.lo \
+	libltdlc_la-ltdl.lo libltdlc_la-slist.lo
+am_libltdlc_la_OBJECTS = $(am__objects_1)
+libltdlc_la_OBJECTS = $(am_libltdlc_la_OBJECTS)
+libltdlc_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(libltdlc_la_LDFLAGS) $(LDFLAGS) -o $@
+@CONVENIENCE_LTDL_TRUE@am_libltdlc_la_rpath =
+load_add_on_la_LIBADD =
+am_load_add_on_la_OBJECTS = load_add_on.lo
+load_add_on_la_OBJECTS = $(am_load_add_on_la_OBJECTS)
+load_add_on_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(load_add_on_la_LDFLAGS) $(LDFLAGS) -o $@
+loadlibrary_la_LIBADD =
+am_loadlibrary_la_OBJECTS = loadlibrary.lo
+loadlibrary_la_OBJECTS = $(am_loadlibrary_la_OBJECTS)
+loadlibrary_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(loadlibrary_la_LDFLAGS) $(LDFLAGS) -o $@
+shl_load_la_DEPENDENCIES = $(am__DEPENDENCIES_1)
+am_shl_load_la_OBJECTS = shl_load.lo
+shl_load_la_OBJECTS = $(am_shl_load_la_OBJECTS)
+shl_load_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(shl_load_la_LDFLAGS) $(LDFLAGS) -o $@
+DEFAULT_INCLUDES = -I.@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+am__depfiles_maybe = depfiles
+am__mv = mv -f
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+	$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
+	$(LDFLAGS) -o $@
+SOURCES = $(dld_link_la_SOURCES) $(dlopen_la_SOURCES) \
+	$(dyld_la_SOURCES) $(libltdl_la_SOURCES) \
+	$(libltdlc_la_SOURCES) $(load_add_on_la_SOURCES) \
+	$(loadlibrary_la_SOURCES) $(shl_load_la_SOURCES)
+DIST_SOURCES = $(dld_link_la_SOURCES) $(dlopen_la_SOURCES) \
+	$(dyld_la_SOURCES) $(libltdl_la_SOURCES) \
+	$(libltdlc_la_SOURCES) $(load_add_on_la_SOURCES) \
+	$(loadlibrary_la_SOURCES) $(shl_load_la_SOURCES)
+am__include_HEADERS_DIST = ltdl.h
+am__ltdlinclude_HEADERS_DIST = libltdl/lt_system.h libltdl/lt_error.h \
+	libltdl/lt_dlloader.h
+HEADERS = $(include_HEADERS) $(ltdlinclude_HEADERS)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+am__remove_distdir = \
+  { test ! -d "$(distdir)" \
+    || { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
+         && rm -fr "$(distdir)"; }; }
+DIST_ARCHIVES = $(distdir).tar.gz
+GZIP_ENV = --best
+distuninstallcheck_listfiles = find . -type f -print
+distcleancheck_listfiles = find . -type f -print
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+ARGZ_H = @ARGZ_H@
+AS = @AS@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBADD_DL = @LIBADD_DL@
+LIBADD_DLD_LINK = @LIBADD_DLD_LINK@
+LIBADD_DLOPEN = @LIBADD_DLOPEN@
+LIBADD_SHL_LOAD = @LIBADD_SHL_LOAD@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTDLOPEN = @LTDLOPEN@
+LTLIBOBJS = @LTLIBOBJS@
+LT_CONFIG_H = @LT_CONFIG_H@
+LT_DLLOADERS = @LT_DLLOADERS@
+LT_DLPREOPEN = @LT_DLPREOPEN@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sys_symbol_underscore = @sys_symbol_underscore@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+ACLOCAL_AMFLAGS = -I m4
+AUTOMAKE_OPTIONS = foreign
+
+# -I$(srcdir) is needed for user that built libltdl with a sub-Automake
+# (not as a sub-package!) using 'nostdinc':
+AM_CPPFLAGS = -DLT_CONFIG_H='<$(LT_CONFIG_H)>' -DLTDL -I. -I$(srcdir) \
+	-Ilibltdl -I$(srcdir)/libltdl -I$(srcdir)/libltdl
+AM_LDFLAGS = -no-undefined
+BUILT_SOURCES = $(ARGZ_H)
+include_HEADERS = $(am__append_1)
+noinst_LTLIBRARIES = $(LT_DLLOADERS) $(am__append_3)
+lib_LTLIBRARIES = $(am__append_2)
+EXTRA_LTLIBRARIES = dlopen.la dld_link.la dyld.la load_add_on.la \
+	loadlibrary.la shl_load.la
+EXTRA_DIST = lt__dirent.c lt__strl.c COPYING.LIB configure.ac \
+	Makefile.am aclocal.m4 Makefile.in configure config-h.in \
+	README argz_.h argz.c
+CLEANFILES = libltdl.la libltdlc.la libdlloader.la $(LIBOBJS) \
+	$(LTLIBOBJS)
+MOSTLYCLEANFILES = argz.h argz.h-t
+LTDL_VERSION_INFO = -version-info 10:0:3
+@INSTALL_LTDL_TRUE@ltdlincludedir = $(includedir)/libltdl
+@INSTALL_LTDL_TRUE@ltdlinclude_HEADERS = libltdl/lt_system.h \
+@INSTALL_LTDL_TRUE@			  libltdl/lt_error.h \
+@INSTALL_LTDL_TRUE@			  libltdl/lt_dlloader.h
+
+libltdl_la_SOURCES = libltdl/lt__alloc.h \
+			  libltdl/lt__dirent.h \
+			  libltdl/lt__glibc.h \
+			  libltdl/lt__private.h \
+			  libltdl/lt__strl.h \
+			  libltdl/lt_dlloader.h \
+			  libltdl/lt_error.h \
+	                  libltdl/lt_system.h \
+			  libltdl/slist.h \
+			  loaders/preopen.c \
+			  lt__alloc.c \
+			  lt_dlloader.c \
+			  lt_error.c \
+			  ltdl.c \
+			  ltdl.h \
+			  slist.c
+
+libltdl_la_CPPFLAGS = -DLTDLOPEN=$(LTDLOPEN) $(AM_CPPFLAGS)
+libltdl_la_LDFLAGS = $(AM_LDFLAGS) $(LTDL_VERSION_INFO) $(LT_DLPREOPEN)
+libltdl_la_LIBADD = $(LTLIBOBJS)
+libltdl_la_DEPENDENCIES = $(LT_DLLOADERS) $(LTLIBOBJS)
+libltdlc_la_SOURCES = $(libltdl_la_SOURCES)
+libltdlc_la_CPPFLAGS = -DLTDLOPEN=$(LTDLOPEN)c $(AM_CPPFLAGS)
+libltdlc_la_LDFLAGS = $(AM_LDFLAGS) $(LT_DLPREOPEN)
+libltdlc_la_LIBADD = $(libltdl_la_LIBADD)
+libltdlc_la_DEPENDENCIES = $(libltdl_la_DEPENDENCIES)
+dlopen_la_SOURCES = loaders/dlopen.c
+dlopen_la_LDFLAGS = -module -avoid-version
+dlopen_la_LIBADD = $(LIBADD_DLOPEN)
+dld_link_la_SOURCES = loaders/dld_link.c
+dld_link_la_LDFLAGS = -module -avoid-version
+dld_link_la_LIBADD = -ldld
+dyld_la_SOURCES = loaders/dyld.c
+dyld_la_LDFLAGS = -module -avoid-version
+load_add_on_la_SOURCES = loaders/load_add_on.c
+load_add_on_la_LDFLAGS = -module -avoid-version
+loadlibrary_la_SOURCES = loaders/loadlibrary.c
+loadlibrary_la_LDFLAGS = -module -avoid-version
+shl_load_la_SOURCES = loaders/shl_load.c
+shl_load_la_LDFLAGS = -module -avoid-version
+shl_load_la_LIBADD = $(LIBADD_SHL_LOAD)
+all: $(BUILT_SOURCES) config.h
+	$(MAKE) $(AM_MAKEFLAGS) all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+am--refresh:
+	@:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
+	      $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
+	$(am__cd) $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    echo ' $(SHELL) ./config.status'; \
+	    $(SHELL) ./config.status;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	$(SHELL) ./config.status --recheck
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	$(am__cd) $(srcdir) && $(AUTOCONF)
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+$(am__aclocal_m4_deps):
+
+config.h: stamp-h1
+	@if test ! -f $@; then \
+	  rm -f stamp-h1; \
+	  $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \
+	else :; fi
+
+stamp-h1: $(srcdir)/config-h.in $(top_builddir)/config.status
+	@rm -f stamp-h1
+	cd $(top_builddir) && $(SHELL) ./config.status config.h
+$(srcdir)/config-h.in:  $(am__configure_deps) 
+	($(am__cd) $(top_srcdir) && $(AUTOHEADER))
+	rm -f stamp-h1
+	touch $@
+
+distclean-hdr:
+	-rm -f config.h stamp-h1
+install-libLTLIBRARIES: $(lib_LTLIBRARIES)
+	@$(NORMAL_INSTALL)
+	test -z "$(libdir)" || $(MKDIR_P) "$(DESTDIR)$(libdir)"
+	@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
+	list2=; for p in $$list; do \
+	  if test -f $$p; then \
+	    list2="$$list2 $$p"; \
+	  else :; fi; \
+	done; \
+	test -z "$$list2" || { \
+	  echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \
+	  $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \
+	}
+
+uninstall-libLTLIBRARIES:
+	@$(NORMAL_UNINSTALL)
+	@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
+	for p in $$list; do \
+	  $(am__strip_dir) \
+	  echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \
+	  $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \
+	done
+
+clean-libLTLIBRARIES:
+	-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
+	@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+
+clean-noinstLTLIBRARIES:
+	-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
+	@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+dld_link.la: $(dld_link_la_OBJECTS) $(dld_link_la_DEPENDENCIES) 
+	$(dld_link_la_LINK)  $(dld_link_la_OBJECTS) $(dld_link_la_LIBADD) $(LIBS)
+dlopen.la: $(dlopen_la_OBJECTS) $(dlopen_la_DEPENDENCIES) 
+	$(dlopen_la_LINK)  $(dlopen_la_OBJECTS) $(dlopen_la_LIBADD) $(LIBS)
+dyld.la: $(dyld_la_OBJECTS) $(dyld_la_DEPENDENCIES) 
+	$(dyld_la_LINK)  $(dyld_la_OBJECTS) $(dyld_la_LIBADD) $(LIBS)
+libltdl.la: $(libltdl_la_OBJECTS) $(libltdl_la_DEPENDENCIES) 
+	$(libltdl_la_LINK) $(am_libltdl_la_rpath) $(libltdl_la_OBJECTS) $(libltdl_la_LIBADD) $(LIBS)
+libltdlc.la: $(libltdlc_la_OBJECTS) $(libltdlc_la_DEPENDENCIES) 
+	$(libltdlc_la_LINK) $(am_libltdlc_la_rpath) $(libltdlc_la_OBJECTS) $(libltdlc_la_LIBADD) $(LIBS)
+load_add_on.la: $(load_add_on_la_OBJECTS) $(load_add_on_la_DEPENDENCIES) 
+	$(load_add_on_la_LINK)  $(load_add_on_la_OBJECTS) $(load_add_on_la_LIBADD) $(LIBS)
+loadlibrary.la: $(loadlibrary_la_OBJECTS) $(loadlibrary_la_DEPENDENCIES) 
+	$(loadlibrary_la_LINK)  $(loadlibrary_la_OBJECTS) $(loadlibrary_la_LIBADD) $(LIBS)
+shl_load.la: $(shl_load_la_OBJECTS) $(shl_load_la_DEPENDENCIES) 
+	$(shl_load_la_LINK)  $(shl_load_la_OBJECTS) $(shl_load_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@$(DEPDIR)/argz.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@$(DEPDIR)/lt__dirent.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@$(DEPDIR)/lt__strl.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dld_link.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dlopen.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dyld.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-lt__alloc.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-lt_dlloader.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-lt_error.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-ltdl.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-preopen.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdl_la-slist.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-lt__alloc.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-lt_dlloader.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-lt_error.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-ltdl.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-preopen.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libltdlc_la-slist.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/load_add_on.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/loadlibrary.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/shl_load.Plo@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+@am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@	$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+dld_link.lo: loaders/dld_link.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT dld_link.lo -MD -MP -MF $(DEPDIR)/dld_link.Tpo -c -o dld_link.lo `test -f 'loaders/dld_link.c' || echo '$(srcdir)/'`loaders/dld_link.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/dld_link.Tpo $(DEPDIR)/dld_link.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/dld_link.c' object='dld_link.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o dld_link.lo `test -f 'loaders/dld_link.c' || echo '$(srcdir)/'`loaders/dld_link.c
+
+dlopen.lo: loaders/dlopen.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT dlopen.lo -MD -MP -MF $(DEPDIR)/dlopen.Tpo -c -o dlopen.lo `test -f 'loaders/dlopen.c' || echo '$(srcdir)/'`loaders/dlopen.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/dlopen.Tpo $(DEPDIR)/dlopen.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/dlopen.c' object='dlopen.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o dlopen.lo `test -f 'loaders/dlopen.c' || echo '$(srcdir)/'`loaders/dlopen.c
+
+dyld.lo: loaders/dyld.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT dyld.lo -MD -MP -MF $(DEPDIR)/dyld.Tpo -c -o dyld.lo `test -f 'loaders/dyld.c' || echo '$(srcdir)/'`loaders/dyld.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/dyld.Tpo $(DEPDIR)/dyld.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/dyld.c' object='dyld.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o dyld.lo `test -f 'loaders/dyld.c' || echo '$(srcdir)/'`loaders/dyld.c
+
+libltdl_la-preopen.lo: loaders/preopen.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-preopen.lo -MD -MP -MF $(DEPDIR)/libltdl_la-preopen.Tpo -c -o libltdl_la-preopen.lo `test -f 'loaders/preopen.c' || echo '$(srcdir)/'`loaders/preopen.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-preopen.Tpo $(DEPDIR)/libltdl_la-preopen.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/preopen.c' object='libltdl_la-preopen.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-preopen.lo `test -f 'loaders/preopen.c' || echo '$(srcdir)/'`loaders/preopen.c
+
+libltdl_la-lt__alloc.lo: lt__alloc.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-lt__alloc.lo -MD -MP -MF $(DEPDIR)/libltdl_la-lt__alloc.Tpo -c -o libltdl_la-lt__alloc.lo `test -f 'lt__alloc.c' || echo '$(srcdir)/'`lt__alloc.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-lt__alloc.Tpo $(DEPDIR)/libltdl_la-lt__alloc.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt__alloc.c' object='libltdl_la-lt__alloc.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-lt__alloc.lo `test -f 'lt__alloc.c' || echo '$(srcdir)/'`lt__alloc.c
+
+libltdl_la-lt_dlloader.lo: lt_dlloader.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-lt_dlloader.lo -MD -MP -MF $(DEPDIR)/libltdl_la-lt_dlloader.Tpo -c -o libltdl_la-lt_dlloader.lo `test -f 'lt_dlloader.c' || echo '$(srcdir)/'`lt_dlloader.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-lt_dlloader.Tpo $(DEPDIR)/libltdl_la-lt_dlloader.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt_dlloader.c' object='libltdl_la-lt_dlloader.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-lt_dlloader.lo `test -f 'lt_dlloader.c' || echo '$(srcdir)/'`lt_dlloader.c
+
+libltdl_la-lt_error.lo: lt_error.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-lt_error.lo -MD -MP -MF $(DEPDIR)/libltdl_la-lt_error.Tpo -c -o libltdl_la-lt_error.lo `test -f 'lt_error.c' || echo '$(srcdir)/'`lt_error.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-lt_error.Tpo $(DEPDIR)/libltdl_la-lt_error.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt_error.c' object='libltdl_la-lt_error.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-lt_error.lo `test -f 'lt_error.c' || echo '$(srcdir)/'`lt_error.c
+
+libltdl_la-ltdl.lo: ltdl.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-ltdl.lo -MD -MP -MF $(DEPDIR)/libltdl_la-ltdl.Tpo -c -o libltdl_la-ltdl.lo `test -f 'ltdl.c' || echo '$(srcdir)/'`ltdl.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-ltdl.Tpo $(DEPDIR)/libltdl_la-ltdl.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='ltdl.c' object='libltdl_la-ltdl.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-ltdl.lo `test -f 'ltdl.c' || echo '$(srcdir)/'`ltdl.c
+
+libltdl_la-slist.lo: slist.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdl_la-slist.lo -MD -MP -MF $(DEPDIR)/libltdl_la-slist.Tpo -c -o libltdl_la-slist.lo `test -f 'slist.c' || echo '$(srcdir)/'`slist.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdl_la-slist.Tpo $(DEPDIR)/libltdl_la-slist.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='slist.c' object='libltdl_la-slist.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdl_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdl_la-slist.lo `test -f 'slist.c' || echo '$(srcdir)/'`slist.c
+
+libltdlc_la-preopen.lo: loaders/preopen.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-preopen.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-preopen.Tpo -c -o libltdlc_la-preopen.lo `test -f 'loaders/preopen.c' || echo '$(srcdir)/'`loaders/preopen.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-preopen.Tpo $(DEPDIR)/libltdlc_la-preopen.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/preopen.c' object='libltdlc_la-preopen.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-preopen.lo `test -f 'loaders/preopen.c' || echo '$(srcdir)/'`loaders/preopen.c
+
+libltdlc_la-lt__alloc.lo: lt__alloc.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-lt__alloc.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-lt__alloc.Tpo -c -o libltdlc_la-lt__alloc.lo `test -f 'lt__alloc.c' || echo '$(srcdir)/'`lt__alloc.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-lt__alloc.Tpo $(DEPDIR)/libltdlc_la-lt__alloc.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt__alloc.c' object='libltdlc_la-lt__alloc.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-lt__alloc.lo `test -f 'lt__alloc.c' || echo '$(srcdir)/'`lt__alloc.c
+
+libltdlc_la-lt_dlloader.lo: lt_dlloader.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-lt_dlloader.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-lt_dlloader.Tpo -c -o libltdlc_la-lt_dlloader.lo `test -f 'lt_dlloader.c' || echo '$(srcdir)/'`lt_dlloader.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-lt_dlloader.Tpo $(DEPDIR)/libltdlc_la-lt_dlloader.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt_dlloader.c' object='libltdlc_la-lt_dlloader.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-lt_dlloader.lo `test -f 'lt_dlloader.c' || echo '$(srcdir)/'`lt_dlloader.c
+
+libltdlc_la-lt_error.lo: lt_error.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-lt_error.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-lt_error.Tpo -c -o libltdlc_la-lt_error.lo `test -f 'lt_error.c' || echo '$(srcdir)/'`lt_error.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-lt_error.Tpo $(DEPDIR)/libltdlc_la-lt_error.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='lt_error.c' object='libltdlc_la-lt_error.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-lt_error.lo `test -f 'lt_error.c' || echo '$(srcdir)/'`lt_error.c
+
+libltdlc_la-ltdl.lo: ltdl.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-ltdl.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-ltdl.Tpo -c -o libltdlc_la-ltdl.lo `test -f 'ltdl.c' || echo '$(srcdir)/'`ltdl.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-ltdl.Tpo $(DEPDIR)/libltdlc_la-ltdl.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='ltdl.c' object='libltdlc_la-ltdl.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-ltdl.lo `test -f 'ltdl.c' || echo '$(srcdir)/'`ltdl.c
+
+libltdlc_la-slist.lo: slist.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libltdlc_la-slist.lo -MD -MP -MF $(DEPDIR)/libltdlc_la-slist.Tpo -c -o libltdlc_la-slist.lo `test -f 'slist.c' || echo '$(srcdir)/'`slist.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/libltdlc_la-slist.Tpo $(DEPDIR)/libltdlc_la-slist.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='slist.c' object='libltdlc_la-slist.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libltdlc_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libltdlc_la-slist.lo `test -f 'slist.c' || echo '$(srcdir)/'`slist.c
+
+load_add_on.lo: loaders/load_add_on.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT load_add_on.lo -MD -MP -MF $(DEPDIR)/load_add_on.Tpo -c -o load_add_on.lo `test -f 'loaders/load_add_on.c' || echo '$(srcdir)/'`loaders/load_add_on.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/load_add_on.Tpo $(DEPDIR)/load_add_on.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/load_add_on.c' object='load_add_on.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o load_add_on.lo `test -f 'loaders/load_add_on.c' || echo '$(srcdir)/'`loaders/load_add_on.c
+
+loadlibrary.lo: loaders/loadlibrary.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT loadlibrary.lo -MD -MP -MF $(DEPDIR)/loadlibrary.Tpo -c -o loadlibrary.lo `test -f 'loaders/loadlibrary.c' || echo '$(srcdir)/'`loaders/loadlibrary.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/loadlibrary.Tpo $(DEPDIR)/loadlibrary.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/loadlibrary.c' object='loadlibrary.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o loadlibrary.lo `test -f 'loaders/loadlibrary.c' || echo '$(srcdir)/'`loaders/loadlibrary.c
+
+shl_load.lo: loaders/shl_load.c
+@am__fastdepCC_TRUE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT shl_load.lo -MD -MP -MF $(DEPDIR)/shl_load.Tpo -c -o shl_load.lo `test -f 'loaders/shl_load.c' || echo '$(srcdir)/'`loaders/shl_load.c
+@am__fastdepCC_TRUE@	$(am__mv) $(DEPDIR)/shl_load.Tpo $(DEPDIR)/shl_load.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='loaders/shl_load.c' object='shl_load.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL)  --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o shl_load.lo `test -f 'loaders/shl_load.c' || echo '$(srcdir)/'`loaders/shl_load.c
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool config.lt
+install-includeHEADERS: $(include_HEADERS)
+	@$(NORMAL_INSTALL)
+	test -z "$(includedir)" || $(MKDIR_P) "$(DESTDIR)$(includedir)"
+	@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
+	  $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
+	done
+
+uninstall-includeHEADERS:
+	@$(NORMAL_UNINSTALL)
+	@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
+	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+	test -n "$$files" || exit 0; \
+	echo " ( cd '$(DESTDIR)$(includedir)' && rm -f" $$files ")"; \
+	cd "$(DESTDIR)$(includedir)" && rm -f $$files
+install-ltdlincludeHEADERS: $(ltdlinclude_HEADERS)
+	@$(NORMAL_INSTALL)
+	test -z "$(ltdlincludedir)" || $(MKDIR_P) "$(DESTDIR)$(ltdlincludedir)"
+	@list='$(ltdlinclude_HEADERS)'; test -n "$(ltdlincludedir)" || list=; \
+	for p in $$list; do \
+	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+	  echo "$$d$$p"; \
+	done | $(am__base_list) | \
+	while read files; do \
+	  echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(ltdlincludedir)'"; \
+	  $(INSTALL_HEADER) $$files "$(DESTDIR)$(ltdlincludedir)" || exit $$?; \
+	done
+
+uninstall-ltdlincludeHEADERS:
+	@$(NORMAL_UNINSTALL)
+	@list='$(ltdlinclude_HEADERS)'; test -n "$(ltdlincludedir)" || list=; \
+	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+	test -n "$$files" || exit 0; \
+	echo " ( cd '$(DESTDIR)$(ltdlincludedir)' && rm -f" $$files ")"; \
+	cd "$(DESTDIR)$(ltdlincludedir)" && rm -f $$files
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+	      END { if (nonempty) { for (i in files) print i; }; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES) config-h.in $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	set x; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS) config-h.in $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+	      END { if (nonempty) { for (i in files) print i; }; }'`; \
+	shift; \
+	if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  if test $$# -gt 0; then \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      "$$@" $$unique; \
+	  else \
+	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	      $$unique; \
+	  fi; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES) config-h.in $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	list='$(SOURCES) $(HEADERS) config-h.in $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+	      END { if (nonempty) { for (i in files) print i; }; }'`; \
+	test -z "$(CTAGS_ARGS)$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && $(am__cd) $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) "$$here"
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	$(am__remove_distdir)
+	test -d "$(distdir)" || mkdir "$(distdir)"
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d "$(distdir)/$$file"; then \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+	    fi; \
+	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+	  else \
+	    test -f "$(distdir)/$$file" \
+	    || cp -p $$d/$$file "$(distdir)/$$file" \
+	    || exit 1; \
+	  fi; \
+	done
+	-test -n "$(am__skip_mode_fix)" \
+	|| find "$(distdir)" -type d ! -perm -755 \
+		-exec chmod u+rwx,go+rx {} \; -o \
+	  ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
+	|| chmod -R a+r "$(distdir)"
+dist-gzip: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__remove_distdir)
+
+dist-bzip2: distdir
+	tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2
+	$(am__remove_distdir)
+
+dist-lzma: distdir
+	tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma
+	$(am__remove_distdir)
+
+dist-xz: distdir
+	tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz
+	$(am__remove_distdir)
+
+dist-tarZ: distdir
+	tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+	$(am__remove_distdir)
+
+dist-shar: distdir
+	shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+	$(am__remove_distdir)
+
+dist-zip: distdir
+	-rm -f $(distdir).zip
+	zip -rq $(distdir).zip $(distdir)
+	$(am__remove_distdir)
+
+dist dist-all: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__remove_distdir)
+
+# This target untars the dist file and tries a VPATH configuration.  Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+distcheck: dist
+	case '$(DIST_ARCHIVES)' in \
+	*.tar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
+	*.tar.bz2*) \
+	  bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
+	*.tar.lzma*) \
+	  lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\
+	*.tar.xz*) \
+	  xz -dc $(distdir).tar.xz | $(am__untar) ;;\
+	*.tar.Z*) \
+	  uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+	*.shar.gz*) \
+	  GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
+	*.zip*) \
+	  unzip $(distdir).zip ;;\
+	esac
+	chmod -R a-w $(distdir); chmod a+w $(distdir)
+	mkdir $(distdir)/_build
+	mkdir $(distdir)/_inst
+	chmod a-w $(distdir)
+	test -d $(distdir)/_build || exit 0; \
+	dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+	  && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+	  && am__cwd=`pwd` \
+	  && $(am__cd) $(distdir)/_build \
+	  && ../configure --srcdir=.. --prefix="$$dc_install_base" \
+	    $(DISTCHECK_CONFIGURE_FLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) dvi \
+	  && $(MAKE) $(AM_MAKEFLAGS) check \
+	  && $(MAKE) $(AM_MAKEFLAGS) install \
+	  && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+	  && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+	  && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+	        distuninstallcheck \
+	  && chmod -R a-w "$$dc_install_base" \
+	  && ({ \
+	       (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+	            distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+	      } || { rm -rf "$$dc_destdir"; exit 1; }) \
+	  && rm -rf "$$dc_destdir" \
+	  && $(MAKE) $(AM_MAKEFLAGS) dist \
+	  && rm -rf $(DIST_ARCHIVES) \
+	  && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
+	  && cd "$$am__cwd" \
+	  || exit 1
+	$(am__remove_distdir)
+	@(echo "$(distdir) archives ready for distribution: "; \
+	  list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+	  sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
+distuninstallcheck:
+	@$(am__cd) '$(distuninstallcheck_dir)' \
+	&& test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
+	   || { echo "ERROR: files left after uninstall:" ; \
+	        if test -n "$(DESTDIR)"; then \
+	          echo "  (check DESTDIR support)"; \
+	        fi ; \
+	        $(distuninstallcheck_listfiles) ; \
+	        exit 1; } >&2
+distcleancheck: distclean
+	@if test '$(srcdir)' = . ; then \
+	  echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+	  exit 1 ; \
+	fi
+	@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+	  || { echo "ERROR: files left in build directory after distclean:" ; \
+	       $(distcleancheck_listfiles) ; \
+	       exit 1; } >&2
+check-am: all-am
+check: $(BUILT_SOURCES)
+	$(MAKE) $(AM_MAKEFLAGS) check-am
+all-am: Makefile $(LTLIBRARIES) $(HEADERS) config.h
+installdirs:
+	for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)" "$(DESTDIR)$(ltdlincludedir)"; do \
+	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+	done
+install: $(BUILT_SOURCES)
+	$(MAKE) $(AM_MAKEFLAGS) install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+	-test -z "$(MOSTLYCLEANFILES)" || rm -f $(MOSTLYCLEANFILES)
+
+clean-generic:
+	-test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+	-test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES)
+clean: clean-am
+
+clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \
+	clean-noinstLTLIBRARIES mostlyclean-am
+
+distclean: distclean-am
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -rf $(DEPDIR) ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-hdr distclean-libtool distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am: install-includeHEADERS install-ltdlincludeHEADERS
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am: install-libLTLIBRARIES
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -rf $(top_srcdir)/autom4te.cache
+	-rm -rf $(DEPDIR) ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-includeHEADERS uninstall-libLTLIBRARIES \
+	uninstall-ltdlincludeHEADERS
+
+.MAKE: all check install install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \
+	clean-generic clean-libLTLIBRARIES clean-libtool \
+	clean-noinstLTLIBRARIES ctags dist dist-all dist-bzip2 \
+	dist-gzip dist-lzma dist-shar dist-tarZ dist-xz dist-zip \
+	distcheck distclean distclean-compile distclean-generic \
+	distclean-hdr distclean-libtool distclean-tags distcleancheck \
+	distdir distuninstallcheck dvi dvi-am html html-am info \
+	info-am install install-am install-data install-data-am \
+	install-dvi install-dvi-am install-exec install-exec-am \
+	install-html install-html-am install-includeHEADERS \
+	install-info install-info-am install-libLTLIBRARIES \
+	install-ltdlincludeHEADERS install-man install-pdf \
+	install-pdf-am install-ps install-ps-am install-strip \
+	installcheck installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-compile \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags uninstall uninstall-am uninstall-includeHEADERS \
+	uninstall-libLTLIBRARIES uninstall-ltdlincludeHEADERS
+
+
+# We need the following in order to create an <argz.h> when the system
+# doesn't have one that works with the given compiler.
+all-local $(lib_OBJECTS): $(ARGZ_H)
+argz.h: argz_.h
+	$(mkinstalldirs) . 
+	cp $(srcdir)/argz_.h $@-t
+	mv $@-t $@
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.inc b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.inc
new file mode 100644
index 0000000..adf2aad
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/Makefile.inc
@@ -0,0 +1,153 @@
+## Makefile.inc -- includable Makefile.am snippet
+##
+##   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
+##   Written by Gary V. Vaughan, 2003
+##
+##   NOTE: The canonical source of this file is maintained with the
+##   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+##
+## GNU Libltdl is free software; you can redistribute it and/or
+## modify it under the terms of the GNU Lesser General Public
+## License as published by the Free Software Foundation; either
+## version 2 of the License, or (at your option) any later version.
+##
+## As a special exception to the GNU Lesser General Public License,
+## if you distribute this file as part of a program or library that
+## is built using GNU libtool, you may include this file under the
+## same distribution terms that you use for the rest of that program.
+##
+## GNU Libltdl is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU Lesser General Public License for more details.
+##
+## You should have received a copy of the GNU LesserGeneral Public
+## License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+## copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+## or obtained by writing to the Free Software Foundation, Inc.,
+## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#####
+
+## DO NOT REMOVE THIS LINE -- make depends on it
+
+# -I$(srcdir) is needed for user that built libltdl with a sub-Automake
+# (not as a sub-package!) using 'nostdinc':
+AM_CPPFLAGS	       += -DLT_CONFIG_H='<$(LT_CONFIG_H)>' \
+			  -DLTDL -I. -I$(srcdir) -Ilibltdl \
+			  -I$(srcdir)/libltdl -I$(srcdir)/libltdl/libltdl
+AM_LDFLAGS	       += -no-undefined
+LTDL_VERSION_INFO	= -version-info 10:0:3
+
+noinst_LTLIBRARIES	+= $(LT_DLLOADERS)
+
+if INSTALL_LTDL
+ltdlincludedir		= $(includedir)/libltdl
+ltdlinclude_HEADERS	= libltdl/libltdl/lt_system.h \
+			  libltdl/libltdl/lt_error.h \
+			  libltdl/libltdl/lt_dlloader.h
+include_HEADERS	       += libltdl/ltdl.h
+lib_LTLIBRARIES	       += libltdl/libltdl.la
+endif
+
+if CONVENIENCE_LTDL
+noinst_LTLIBRARIES     += libltdl/libltdlc.la
+endif
+
+libltdl_libltdl_la_SOURCES = libltdl/libltdl/lt__alloc.h \
+			  libltdl/libltdl/lt__dirent.h \
+			  libltdl/libltdl/lt__glibc.h \
+			  libltdl/libltdl/lt__private.h \
+			  libltdl/libltdl/lt__strl.h \
+			  libltdl/libltdl/lt_dlloader.h \
+			  libltdl/libltdl/lt_error.h \
+	                  libltdl/libltdl/lt_system.h \
+			  libltdl/libltdl/slist.h \
+			  libltdl/loaders/preopen.c \
+			  libltdl/lt__alloc.c \
+			  libltdl/lt_dlloader.c \
+			  libltdl/lt_error.c \
+			  libltdl/ltdl.c \
+			  libltdl/ltdl.h \
+			  libltdl/slist.c
+
+EXTRA_DIST	       += libltdl/lt__dirent.c \
+			  libltdl/lt__strl.c
+
+libltdl_libltdl_la_CPPFLAGS	= -DLTDLOPEN=$(LTDLOPEN) $(AM_CPPFLAGS)
+libltdl_libltdl_la_LDFLAGS	= $(AM_LDFLAGS) $(LTDL_VERSION_INFO) $(LT_DLPREOPEN)
+libltdl_libltdl_la_LIBADD	= $(LTLIBOBJS)
+libltdl_libltdl_la_DEPENDENCIES	= $(LT_DLLOADERS) $(LTLIBOBJS)
+
+libltdl_libltdlc_la_SOURCES	= $(libltdl_libltdl_la_SOURCES)
+libltdl_libltdlc_la_CPPFLAGS	= -DLTDLOPEN=$(LTDLOPEN)c $(AM_CPPFLAGS)
+libltdl_libltdlc_la_LDFLAGS	= $(AM_LDFLAGS) $(LT_DLPREOPEN)
+libltdl_libltdlc_la_LIBADD	= $(libltdl_libltdl_la_LIBADD)
+libltdl_libltdlc_la_DEPENDENCIES= $(libltdl_libltdl_la_DEPENDENCIES)
+
+## The loaders are preopened by libltdl, itself always built from
+## pic-objects (either as a shared library, or a convenience library),
+## so the loaders themselves must be made from pic-objects too.  We
+## use convenience libraries for that purpose:
+EXTRA_LTLIBRARIES	       += libltdl/dlopen.la \
+				  libltdl/dld_link.la \
+				  libltdl/dyld.la \
+				  libltdl/load_add_on.la \
+				  libltdl/loadlibrary.la \
+				  libltdl/shl_load.la
+
+libltdl_dlopen_la_SOURCES	= libltdl/loaders/dlopen.c
+libltdl_dlopen_la_LDFLAGS	= -module -avoid-version
+libltdl_dlopen_la_LIBADD 	= $(LIBADD_DLOPEN)
+
+libltdl_dld_link_la_SOURCES	= libltdl/loaders/dld_link.c
+libltdl_dld_link_la_LDFLAGS	= -module -avoid-version
+libltdl_dld_link_la_LIBADD	= -ldld
+
+libltdl_dyld_la_SOURCES		= libltdl/loaders/dyld.c
+libltdl_dyld_la_LDFLAGS		= -module -avoid-version
+
+libltdl_load_add_on_la_SOURCES	= libltdl/loaders/load_add_on.c
+libltdl_load_add_on_la_LDFLAGS	= -module -avoid-version
+
+libltdl_loadlibrary_la_SOURCES	= libltdl/loaders/loadlibrary.c
+libltdl_loadlibrary_la_LDFLAGS	= -module -avoid-version
+
+libltdl_shl_load_la_SOURCES	= libltdl/loaders/shl_load.c
+libltdl_shl_load_la_LDFLAGS	= -module -avoid-version
+libltdl_shl_load_la_LIBADD	= $(LIBADD_SHL_LOAD)
+
+## Make sure these will be cleaned even when they're not built by default:
+CLEANFILES		       += libltdl/libltdl.la \
+				  libltdl/libltdlc.la \
+				  libltdl/libdlloader.la
+
+## Automake-1.9.6 doesn't clean subdir AC_LIBOBJ compiled objects
+## automatically:
+CLEANFILES	       += $(LIBOBJS) $(LTLIBOBJS)
+
+EXTRA_DIST	       += libltdl/COPYING.LIB \
+			  libltdl/configure.ac \
+			  libltdl/Makefile.am \
+			  libltdl/aclocal.m4 \
+			  libltdl/Makefile.in \
+			  libltdl/configure \
+			  libltdl/config-h.in \
+			  libltdl/README
+
+## --------------------------- ##
+## Gnulib Makefile.am snippets ##
+## --------------------------- ##
+
+BUILT_SOURCES	+= libltdl/$(ARGZ_H)
+EXTRA_DIST	+= libltdl/argz_.h \
+		   libltdl/argz.c
+
+# We need the following in order to create an <argz.h> when the system
+# doesn't have one that works with the given compiler.
+all-local $(lib_OBJECTS): libltdl/$(ARGZ_H)
+libltdl/argz.h: libltdl/argz_.h
+	$(mkinstalldirs) . libltdl/
+	cp $(srcdir)/libltdl/argz_.h $@-t
+	mv $@-t $@
+MOSTLYCLEANFILES += libltdl/argz.h \
+		    libltdl/argz.h-t
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/README b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/README
new file mode 100644
index 0000000..90a0aef
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/README
@@ -0,0 +1,20 @@
+This is GNU libltdl, a system independent dlopen wrapper for GNU libtool.
+
+It supports the following dlopen interfaces:
+* dlopen (POSIX)
+* shl_load (HP-UX)
+* LoadLibrary (Win16 and Win32)
+* load_add_on (BeOS)
+* GNU DLD (emulates dynamic linking for static libraries)
+* dyld (darwin/Mac OS X)
+* libtool's dlpreopen
+--
+   Copyright (C) 1999, 2003, 2011 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1999
+
+   This file is part of GNU Libtool.
+
+Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.  This file is offered as-is,
+without warranty of any kind.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/aclocal.m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/aclocal.m4
new file mode 100644
index 0000000..ca56e93
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/aclocal.m4
@@ -0,0 +1,958 @@
+# generated automatically by aclocal 1.11.1 -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
+# 2005, 2006, 2007, 2008, 2009  Free Software Foundation, Inc.
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.68],,
+[m4_warning([this file was generated for autoconf 2.68.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically `autoreconf'.])])
+
+# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_AUTOMAKE_VERSION(VERSION)
+# ----------------------------
+# Automake X.Y traces this macro to ensure aclocal.m4 has been
+# generated from the m4 files accompanying Automake X.Y.
+# (This private macro should not be called outside this file.)
+AC_DEFUN([AM_AUTOMAKE_VERSION],
+[am__api_version='1.11'
+dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+dnl require some minimum version.  Point them to the right macro.
+m4_if([$1], [1.11.1], [],
+      [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+])
+
+# _AM_AUTOCONF_VERSION(VERSION)
+# -----------------------------
+# aclocal traces this macro to find the Autoconf version.
+# This is a private macro too.  Using m4_define simplifies
+# the logic in aclocal, which can simply ignore this definition.
+m4_define([_AM_AUTOCONF_VERSION], [])
+
+# AM_SET_CURRENT_AUTOMAKE_VERSION
+# -------------------------------
+# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+# This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+[AM_AUTOMAKE_VERSION([1.11.1])dnl
+m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
+
+# AM_AUX_DIR_EXPAND                                         -*- Autoconf -*-
+
+# Copyright (C) 2001, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
+# $ac_aux_dir to `$srcdir/foo'.  In other projects, it is set to
+# `$srcdir', `$srcdir/..', or `$srcdir/../..'.
+#
+# Of course, Automake must honor this variable whenever it calls a
+# tool from the auxiliary directory.  The problem is that $srcdir (and
+# therefore $ac_aux_dir as well) can be either absolute or relative,
+# depending on how configure is run.  This is pretty annoying, since
+# it makes $ac_aux_dir quite unusable in subdirectories: in the top
+# source directory, any form will work fine, but in subdirectories a
+# relative path needs to be adjusted first.
+#
+# $ac_aux_dir/missing
+#    fails when called from a subdirectory if $ac_aux_dir is relative
+# $top_srcdir/$ac_aux_dir/missing
+#    fails if $ac_aux_dir is absolute,
+#    fails when called from a subdirectory in a VPATH build with
+#          a relative $ac_aux_dir
+#
+# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
+# are both prefixed by $srcdir.  In an in-source build this is usually
+# harmless because $srcdir is `.', but things will broke when you
+# start a VPATH build or use an absolute $srcdir.
+#
+# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
+# iff we strip the leading $srcdir from $ac_aux_dir.  That would be:
+#   am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
+# and then we would define $MISSING as
+#   MISSING="\${SHELL} $am_aux_dir/missing"
+# This will work as long as MISSING is not called from configure, because
+# unfortunately $(top_srcdir) has no meaning in configure.
+# However there are other variables, like CC, which are often used in
+# configure, and could therefore not use this "fixed" $ac_aux_dir.
+#
+# Another solution, used here, is to always expand $ac_aux_dir to an
+# absolute PATH.  The drawback is that using absolute paths prevent a
+# configured tree to be moved without reconfiguration.
+
+AC_DEFUN([AM_AUX_DIR_EXPAND],
+[dnl Rely on autoconf to set up CDPATH properly.
+AC_PREREQ([2.50])dnl
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+])
+
+# AM_CONDITIONAL                                            -*- Autoconf -*-
+
+# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 9
+
+# AM_CONDITIONAL(NAME, SHELL-CONDITION)
+# -------------------------------------
+# Define a conditional.
+AC_DEFUN([AM_CONDITIONAL],
+[AC_PREREQ(2.52)dnl
+ ifelse([$1], [TRUE],  [AC_FATAL([$0: invalid condition: $1])],
+	[$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
+AC_SUBST([$1_TRUE])dnl
+AC_SUBST([$1_FALSE])dnl
+_AM_SUBST_NOTMAKE([$1_TRUE])dnl
+_AM_SUBST_NOTMAKE([$1_FALSE])dnl
+m4_define([_AM_COND_VALUE_$1], [$2])dnl
+if $2; then
+  $1_TRUE=
+  $1_FALSE='#'
+else
+  $1_TRUE='#'
+  $1_FALSE=
+fi
+AC_CONFIG_COMMANDS_PRE(
+[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
+  AC_MSG_ERROR([[conditional "$1" was never defined.
+Usually this means the macro was only invoked conditionally.]])
+fi])])
+
+# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 10
+
+# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be
+# written in clear, in which case automake, when reading aclocal.m4,
+# will think it sees a *use*, and therefore will trigger all it's
+# C support machinery.  Also note that it means that autoscan, seeing
+# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
+
+
+# _AM_DEPENDENCIES(NAME)
+# ----------------------
+# See how the compiler implements dependency checking.
+# NAME is "CC", "CXX", "GCJ", or "OBJC".
+# We try a few techniques and use that to set a single cache variable.
+#
+# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
+# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
+# dependency, and given that the user is not expected to run this macro,
+# just rely on AC_PROG_CC.
+AC_DEFUN([_AM_DEPENDENCIES],
+[AC_REQUIRE([AM_SET_DEPDIR])dnl
+AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
+AC_REQUIRE([AM_MAKE_INCLUDE])dnl
+AC_REQUIRE([AM_DEP_TRACK])dnl
+
+ifelse([$1], CC,   [depcc="$CC"   am_compiler_list=],
+       [$1], CXX,  [depcc="$CXX"  am_compiler_list=],
+       [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
+       [$1], UPC,  [depcc="$UPC"  am_compiler_list=],
+       [$1], GCJ,  [depcc="$GCJ"  am_compiler_list='gcc3 gcc'],
+                   [depcc="$$1"   am_compiler_list=])
+
+AC_CACHE_CHECK([dependency style of $depcc],
+               [am_cv_$1_dependencies_compiler_type],
+[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named `D' -- because `-MD' means `put the output
+  # in D'.
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_$1_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
+  fi
+  am__universal=false
+  m4_case([$1], [CC],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac],
+    [CXX],
+    [case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac])
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
+      # Solaris 8's {/usr,}/bin/sh.
+      touch sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with `-c' and `-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle `-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # after this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvisualcpp | msvcmsys)
+      # This compiler won't grok `-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_$1_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_$1_dependencies_compiler_type=none
+fi
+])
+AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
+AM_CONDITIONAL([am__fastdep$1], [
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_$1_dependencies_compiler_type" = gcc3])
+])
+
+
+# AM_SET_DEPDIR
+# -------------
+# Choose a directory name for dependency files.
+# This macro is AC_REQUIREd in _AM_DEPENDENCIES
+AC_DEFUN([AM_SET_DEPDIR],
+[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
+])
+
+
+# AM_DEP_TRACK
+# ------------
+AC_DEFUN([AM_DEP_TRACK],
+[AC_ARG_ENABLE(dependency-tracking,
+[  --disable-dependency-tracking  speeds up one-time build
+  --enable-dependency-tracking   do not reject slow dependency extractors])
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+fi
+AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
+AC_SUBST([AMDEPBACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
+])
+
+# Generate code to set up dependency tracking.              -*- Autoconf -*-
+
+# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+#serial 5
+
+# _AM_OUTPUT_DEPENDENCY_COMMANDS
+# ------------------------------
+AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+[{
+  # Autoconf 2.62 quotes --file arguments for eval, but not when files
+  # are listed without --file.  Let's play safe and only enable the eval
+  # if we detect the quoting.
+  case $CONFIG_FILES in
+  *\'*) eval set x "$CONFIG_FILES" ;;
+  *)   set x $CONFIG_FILES ;;
+  esac
+  shift
+  for mf
+  do
+    # Strip MF so we end up with the name of the file.
+    mf=`echo "$mf" | sed -e 's/:.*$//'`
+    # Check whether this is an Automake generated Makefile or not.
+    # We used to match only the files named `Makefile.in', but
+    # some people rename them; so instead we look at the file content.
+    # Grep'ing the first line is not enough: some people post-process
+    # each Makefile.in and add a new line on top of each file to say so.
+    # Grep'ing the whole file is not good either: AIX grep has a line
+    # limit of 2048, but all sed's we know have understand at least 4000.
+    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+      dirpart=`AS_DIRNAME("$mf")`
+    else
+      continue
+    fi
+    # Extract the definition of DEPDIR, am__include, and am__quote
+    # from the Makefile without running `make'.
+    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+    test -z "$DEPDIR" && continue
+    am__include=`sed -n 's/^am__include = //p' < "$mf"`
+    test -z "am__include" && continue
+    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+    # When using ansi2knr, U may be empty or an underscore; expand it
+    U=`sed -n 's/^U = //p' < "$mf"`
+    # Find all dependency output files, they are included files with
+    # $(DEPDIR) in their names.  We invoke sed twice because it is the
+    # simplest approach to changing $(DEPDIR) to its actual value in the
+    # expansion.
+    for file in `sed -n "
+      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
+      # Make sure the directory exists.
+      test -f "$dirpart/$file" && continue
+      fdir=`AS_DIRNAME(["$file"])`
+      AS_MKDIR_P([$dirpart/$fdir])
+      # echo "creating $dirpart/$file"
+      echo '# dummy' > "$dirpart/$file"
+    done
+  done
+}
+])# _AM_OUTPUT_DEPENDENCY_COMMANDS
+
+
+# AM_OUTPUT_DEPENDENCY_COMMANDS
+# -----------------------------
+# This macro should only be invoked once -- use via AC_REQUIRE.
+#
+# This code is only required when automatic dependency tracking
+# is enabled.  FIXME.  This creates each `.P' file that we will
+# need in order to bootstrap the dependency handling code.
+AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
+[AC_CONFIG_COMMANDS([depfiles],
+     [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
+     [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
+])
+
+# Do all the work for Automake.                             -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
+# 2005, 2006, 2008, 2009 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 16
+
+# This macro actually does too much.  Some checks are only needed if
+# your package does certain things.  But this isn't really a big deal.
+
+# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
+# AM_INIT_AUTOMAKE([OPTIONS])
+# -----------------------------------------------
+# The call with PACKAGE and VERSION arguments is the old style
+# call (pre autoconf-2.50), which is being phased out.  PACKAGE
+# and VERSION should now be passed to AC_INIT and removed from
+# the call to AM_INIT_AUTOMAKE.
+# We support both call styles for the transition.  After
+# the next Automake release, Autoconf can make the AC_INIT
+# arguments mandatory, and then we can depend on a new Autoconf
+# release and drop the old call support.
+AC_DEFUN([AM_INIT_AUTOMAKE],
+[AC_PREREQ([2.62])dnl
+dnl Autoconf wants to disallow AM_ names.  We explicitly allow
+dnl the ones we care about.
+m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
+AC_REQUIRE([AC_PROG_INSTALL])dnl
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+AC_SUBST([CYGPATH_W])
+
+# Define the identity of the package.
+dnl Distinguish between old-style and new-style calls.
+m4_ifval([$2],
+[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ AC_SUBST([PACKAGE], [$1])dnl
+ AC_SUBST([VERSION], [$2])],
+[_AM_SET_OPTIONS([$1])dnl
+dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,,
+  [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+ AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
+
+_AM_IF_OPTION([no-define],,
+[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package])
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl
+
+# Some tools Automake needs.
+AC_REQUIRE([AM_SANITY_CHECK])dnl
+AC_REQUIRE([AC_ARG_PROGRAM])dnl
+AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version})
+AM_MISSING_PROG(AUTOCONF, autoconf)
+AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version})
+AM_MISSING_PROG(AUTOHEADER, autoheader)
+AM_MISSING_PROG(MAKEINFO, makeinfo)
+AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
+AC_REQUIRE([AM_PROG_MKDIR_P])dnl
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([AC_PROG_MAKE_SET])dnl
+AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
+	      [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
+			     [_AM_PROG_TAR([v7])])])
+_AM_IF_OPTION([no-dependencies],,
+[AC_PROVIDE_IFELSE([AC_PROG_CC],
+		  [_AM_DEPENDENCIES(CC)],
+		  [define([AC_PROG_CC],
+			  defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_CXX],
+		  [_AM_DEPENDENCIES(CXX)],
+		  [define([AC_PROG_CXX],
+			  defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJC],
+		  [_AM_DEPENDENCIES(OBJC)],
+		  [define([AC_PROG_OBJC],
+			  defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl
+])
+_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl
+dnl The `parallel-tests' driver may need to know about EXEEXT, so add the
+dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen.  This macro
+dnl is hooked onto _AC_COMPILER_EXEEXT early, see below.
+AC_CONFIG_COMMANDS_PRE(dnl
+[m4_provide_if([_AM_COMPILER_EXEEXT],
+  [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl
+])
+
+dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion.  Do not
+dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further
+dnl mangled by Autoconf and run in a shell conditional statement.
+m4_define([_AC_COMPILER_EXEEXT],
+m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])])
+
+
+# When config.status generates a header, we must update the stamp-h file.
+# This file resides in the same directory as the config header
+# that is generated.  The stamp files are numbered to have different names.
+
+# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
+# loop where config.status creates the headers, so we can generate
+# our stamp files there.
+AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
+[# Compute $1's index in $config_headers.
+_am_arg=$1
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $_am_arg | $_am_arg:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
+
+# Copyright (C) 2001, 2003, 2005, 2008  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_SH
+# ------------------
+# Define $install_sh.
+AC_DEFUN([AM_PROG_INSTALL_SH],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+if test x"${install_sh}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
+  *)
+    install_sh="\${SHELL} $am_aux_dir/install-sh"
+  esac
+fi
+AC_SUBST(install_sh)])
+
+# Copyright (C) 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 2
+
+# Check whether the underlying file-system supports filenames
+# with a leading dot.  For instance MS-DOS doesn't.
+AC_DEFUN([AM_SET_LEADING_DOT],
+[rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+AC_SUBST([am__leading_dot])])
+
+# Check to see how 'make' treats includes.	            -*- Autoconf -*-
+
+# Copyright (C) 2001, 2002, 2003, 2005, 2009  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 4
+
+# AM_MAKE_INCLUDE()
+# -----------------
+# Check to see how make treats includes.
+AC_DEFUN([AM_MAKE_INCLUDE],
+[am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo this is the am__doit target
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+AC_MSG_CHECKING([for style of include used by $am_make])
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# Ignore all kinds of additional output from `make'.
+case `$am_make -s -f confmf 2> /dev/null` in #(
+*the\ am__doit\ target*)
+  am__include=include
+  am__quote=
+  _am_result=GNU
+  ;;
+esac
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   case `$am_make -s -f confmf 2> /dev/null` in #(
+   *the\ am__doit\ target*)
+     am__include=.include
+     am__quote="\""
+     _am_result=BSD
+     ;;
+   esac
+fi
+AC_SUBST([am__include])
+AC_SUBST([am__quote])
+AC_MSG_RESULT([$_am_result])
+rm -f confinc confmf
+])
+
+# Fake the existence of programs that GNU maintainers use.  -*- Autoconf -*-
+
+# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 6
+
+# AM_MISSING_PROG(NAME, PROGRAM)
+# ------------------------------
+AC_DEFUN([AM_MISSING_PROG],
+[AC_REQUIRE([AM_MISSING_HAS_RUN])
+$1=${$1-"${am_missing_run}$2"}
+AC_SUBST($1)])
+
+
+# AM_MISSING_HAS_RUN
+# ------------------
+# Define MISSING if not defined so far and test if it supports --run.
+# If it does, set am_missing_run to use it, otherwise, to nothing.
+AC_DEFUN([AM_MISSING_HAS_RUN],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([missing])dnl
+if test x"${MISSING+set}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+  *)
+    MISSING="\${SHELL} $am_aux_dir/missing" ;;
+  esac
+fi
+# Use eval to expand $SHELL
+if eval "$MISSING --run true"; then
+  am_missing_run="$MISSING --run "
+else
+  am_missing_run=
+  AC_MSG_WARN([`missing' script is too old or missing])
+fi
+])
+
+# Copyright (C) 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_MKDIR_P
+# ---------------
+# Check for `mkdir -p'.
+AC_DEFUN([AM_PROG_MKDIR_P],
+[AC_PREREQ([2.60])dnl
+AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+dnl Automake 1.8 to 1.9.6 used to define mkdir_p.  We now use MKDIR_P,
+dnl while keeping a definition of mkdir_p for backward compatibility.
+dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile.
+dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of
+dnl Makefile.ins that do not define MKDIR_P, so we do our own
+dnl adjustment using top_builddir (which is defined more often than
+dnl MKDIR_P).
+AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl
+case $mkdir_p in
+  [[\\/$]]* | ?:[[\\/]]*) ;;
+  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
+esac
+])
+
+# Helper functions for option handling.                     -*- Autoconf -*-
+
+# Copyright (C) 2001, 2002, 2003, 2005, 2008  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 4
+
+# _AM_MANGLE_OPTION(NAME)
+# -----------------------
+AC_DEFUN([_AM_MANGLE_OPTION],
+[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
+
+# _AM_SET_OPTION(NAME)
+# ------------------------------
+# Set option NAME.  Presently that only means defining a flag for this option.
+AC_DEFUN([_AM_SET_OPTION],
+[m4_define(_AM_MANGLE_OPTION([$1]), 1)])
+
+# _AM_SET_OPTIONS(OPTIONS)
+# ----------------------------------
+# OPTIONS is a space-separated list of Automake options.
+AC_DEFUN([_AM_SET_OPTIONS],
+[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
+
+# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
+# -------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+AC_DEFUN([_AM_IF_OPTION],
+[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
+
+# Check to make sure that the build environment is sane.    -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 5
+
+# AM_SANITY_CHECK
+# ---------------
+AC_DEFUN([AM_SANITY_CHECK],
+[AC_MSG_CHECKING([whether build environment is sane])
+# Just in case
+sleep 1
+echo timestamp > conftest.file
+# Reject unsafe characters in $srcdir or the absolute working directory
+# name.  Accept space and tab only in the latter.
+am_lf='
+'
+case `pwd` in
+  *[[\\\"\#\$\&\'\`$am_lf]]*)
+    AC_MSG_ERROR([unsafe absolute working directory name]);;
+esac
+case $srcdir in
+  *[[\\\"\#\$\&\'\`$am_lf\ \	]]*)
+    AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);;
+esac
+
+# Do `set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
+   if test "$[*]" = "X"; then
+      # -L didn't work.
+      set X `ls -t "$srcdir/configure" conftest.file`
+   fi
+   rm -f conftest.file
+   if test "$[*]" != "X $srcdir/configure conftest.file" \
+      && test "$[*]" != "X conftest.file $srcdir/configure"; then
+
+      # If neither matched, then we have a broken ls.  This can happen
+      # if, for instance, CONFIG_SHELL is bash and it inherits a
+      # broken ls alias from the environment.  This has actually
+      # happened.  Such a system could not be considered "sane".
+      AC_MSG_ERROR([ls -t appears to fail.  Make sure there is not a broken
+alias in your environment])
+   fi
+
+   test "$[2]" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   AC_MSG_ERROR([newly created file is older than distributed files!
+Check your system clock])
+fi
+AC_MSG_RESULT(yes)])
+
+# Copyright (C) 2001, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_STRIP
+# ---------------------
+# One issue with vendor `install' (even GNU) is that you can't
+# specify the program used to strip binaries.  This is especially
+# annoying in cross-compiling environments, where the build's strip
+# is unlikely to handle the host's binaries.
+# Fortunately install-sh will honor a STRIPPROG variable, so we
+# always use install-sh in `make install-strip', and initialize
+# STRIPPROG with the value of the STRIP variable (set by the user).
+AC_DEFUN([AM_PROG_INSTALL_STRIP],
+[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+# Installed binaries are usually stripped using `strip' when the user
+# run `make install-strip'.  However `strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the `STRIP' environment variable to overrule this program.
+dnl Don't test for $cross_compiling = yes, because it might be `maybe'.
+if test "$cross_compiling" != no; then
+  AC_CHECK_TOOL([STRIP], [strip], :)
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+AC_SUBST([INSTALL_STRIP_PROGRAM])])
+
+# Copyright (C) 2006, 2008  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 2
+
+# _AM_SUBST_NOTMAKE(VARIABLE)
+# ---------------------------
+# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in.
+# This macro is traced by Automake.
+AC_DEFUN([_AM_SUBST_NOTMAKE])
+
+# AM_SUBST_NOTMAKE(VARIABLE)
+# ---------------------------
+# Public sister of _AM_SUBST_NOTMAKE.
+AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
+
+# Check how to create a tarball.                            -*- Autoconf -*-
+
+# Copyright (C) 2004, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 2
+
+# _AM_PROG_TAR(FORMAT)
+# --------------------
+# Check how to create a tarball in format FORMAT.
+# FORMAT should be one of `v7', `ustar', or `pax'.
+#
+# Substitute a variable $(am__tar) that is a command
+# writing to stdout a FORMAT-tarball containing the directory
+# $tardir.
+#     tardir=directory && $(am__tar) > result.tar
+#
+# Substitute a variable $(am__untar) that extract such
+# a tarball read from stdin.
+#     $(am__untar) < result.tar
+AC_DEFUN([_AM_PROG_TAR],
+[# Always define AMTAR for backward compatibility.
+AM_MISSING_PROG([AMTAR], [tar])
+m4_if([$1], [v7],
+     [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'],
+     [m4_case([$1], [ustar],, [pax],,
+              [m4_fatal([Unknown tar format])])
+AC_MSG_CHECKING([how to create a $1 tar archive])
+# Loop over all known methods to create a tar archive until one works.
+_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
+_am_tools=${am_cv_prog_tar_$1-$_am_tools}
+# Do not fold the above two line into one, because Tru64 sh and
+# Solaris sh will not grok spaces in the rhs of `-'.
+for _am_tool in $_am_tools
+do
+  case $_am_tool in
+  gnutar)
+    for _am_tar in tar gnutar gtar;
+    do
+      AM_RUN_LOG([$_am_tar --version]) && break
+    done
+    am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
+    am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
+    am__untar="$_am_tar -xf -"
+    ;;
+  plaintar)
+    # Must skip GNU tar: if it does not support --format= it doesn't create
+    # ustar tarball either.
+    (tar --version) >/dev/null 2>&1 && continue
+    am__tar='tar chf - "$$tardir"'
+    am__tar_='tar chf - "$tardir"'
+    am__untar='tar xf -'
+    ;;
+  pax)
+    am__tar='pax -L -x $1 -w "$$tardir"'
+    am__tar_='pax -L -x $1 -w "$tardir"'
+    am__untar='pax -r'
+    ;;
+  cpio)
+    am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
+    am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
+    am__untar='cpio -i -H $1 -d'
+    ;;
+  none)
+    am__tar=false
+    am__tar_=false
+    am__untar=false
+    ;;
+  esac
+
+  # If the value was cached, stop now.  We just wanted to have am__tar
+  # and am__untar set.
+  test -n "${am_cv_prog_tar_$1}" && break
+
+  # tar/untar a dummy directory, and stop if the command works
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  echo GrepMe > conftest.dir/file
+  AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
+  rm -rf conftest.dir
+  if test -s conftest.tar; then
+    AM_RUN_LOG([$am__untar <conftest.tar])
+    grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
+  fi
+done
+rm -rf conftest.dir
+
+AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
+AC_MSG_RESULT([$am_cv_prog_tar_$1])])
+AC_SUBST([am__tar])
+AC_SUBST([am__untar])
+]) # _AM_PROG_TAR
+
+m4_include([m4/argz.m4])
+m4_include([m4/libtool.m4])
+m4_include([m4/ltdl.m4])
+m4_include([m4/ltoptions.m4])
+m4_include([m4/ltsugar.m4])
+m4_include([m4/ltversion.m4])
+m4_include([m4/lt~obsolete.m4])
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz.c
new file mode 100644
index 0000000..8567723
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz.c
@@ -0,0 +1,226 @@
+/* argz.c -- argz implementation for non-glibc systems
+
+   Copyright (C) 2004, 2006, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if defined(LTDL) && defined LT_CONFIG_H
+#  include LT_CONFIG_H
+#else
+#  include <config.h>
+#endif
+
+#include <argz.h>
+
+#include <assert.h>
+#include <stddef.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <errno.h>
+#include <string.h>
+
+#define EOS_CHAR '\0'
+
+error_t
+argz_append (char **pargz, size_t *pargz_len, const char *buf, size_t buf_len)
+{
+  size_t argz_len;
+  char  *argz;
+
+  assert (pargz);
+  assert (pargz_len);
+  assert ((*pargz && *pargz_len) || (!*pargz && !*pargz_len));
+
+  /* If nothing needs to be appended, no more work is required.  */
+  if (buf_len == 0)
+    return 0;
+
+  /* Ensure there is enough room to append BUF_LEN.  */
+  argz_len = *pargz_len + buf_len;
+  argz = (char *) realloc (*pargz, argz_len);
+  if (!argz)
+    return ENOMEM;
+
+  /* Copy characters from BUF after terminating '\0' in ARGZ.  */
+  memcpy (argz + *pargz_len, buf, buf_len);
+
+  /* Assign new values.  */
+  *pargz = argz;
+  *pargz_len = argz_len;
+
+  return 0;
+}
+
+
+error_t
+argz_create_sep (const char *str, int delim, char **pargz, size_t *pargz_len)
+{
+  size_t argz_len;
+  char *argz = 0;
+
+  assert (str);
+  assert (pargz);
+  assert (pargz_len);
+
+  /* Make a copy of STR, but replacing each occurrence of
+     DELIM with '\0'.  */
+  argz_len = 1+ strlen (str);
+  if (argz_len)
+    {
+      const char *p;
+      char *q;
+
+      argz = (char *) malloc (argz_len);
+      if (!argz)
+	return ENOMEM;
+
+      for (p = str, q = argz; *p != EOS_CHAR; ++p)
+	{
+	  if (*p == delim)
+	    {
+	      /* Ignore leading delimiters, and fold consecutive
+		 delimiters in STR into a single '\0' in ARGZ.  */
+	      if ((q > argz) && (q[-1] != EOS_CHAR))
+		*q++ = EOS_CHAR;
+	      else
+		--argz_len;
+	    }
+	  else
+	    *q++ = *p;
+	}
+      /* Copy terminating EOS_CHAR.  */
+      *q = *p;
+    }
+
+  /* If ARGZ_LEN has shrunk to nothing, release ARGZ's memory.  */
+  if (!argz_len)
+    argz = (free (argz), (char *) 0);
+
+  /* Assign new values.  */
+  *pargz = argz;
+  *pargz_len = argz_len;
+
+  return 0;
+}
+
+
+error_t
+argz_insert (char **pargz, size_t *pargz_len, char *before, const char *entry)
+{
+  assert (pargz);
+  assert (pargz_len);
+  assert (entry && *entry);
+
+  /* No BEFORE address indicates ENTRY should be inserted after the
+     current last element.  */
+  if (!before)
+    return argz_append (pargz, pargz_len, entry, 1+ strlen (entry));
+
+  /* This probably indicates a programmer error, but to preserve
+     semantics, scan back to the start of an entry if BEFORE points
+     into the middle of it.  */
+  while ((before > *pargz) && (before[-1] != EOS_CHAR))
+    --before;
+
+  {
+    size_t entry_len	= 1+ strlen (entry);
+    size_t argz_len	= *pargz_len + entry_len;
+    size_t offset	= before - *pargz;
+    char   *argz	= (char *) realloc (*pargz, argz_len);
+
+    if (!argz)
+      return ENOMEM;
+
+    /* Make BEFORE point to the equivalent offset in ARGZ that it
+       used to have in *PARGZ incase realloc() moved the block.  */
+    before = argz + offset;
+
+    /* Move the ARGZ entries starting at BEFORE up into the new
+       space at the end -- making room to copy ENTRY into the
+       resulting gap.  */
+    memmove (before + entry_len, before, *pargz_len - offset);
+    memcpy  (before, entry, entry_len);
+
+    /* Assign new values.  */
+    *pargz = argz;
+    *pargz_len = argz_len;
+  }
+
+  return 0;
+}
+
+
+char *
+argz_next (char *argz, size_t argz_len, const char *entry)
+{
+  assert ((argz && argz_len) || (!argz && !argz_len));
+
+  if (entry)
+    {
+      /* Either ARGZ/ARGZ_LEN is empty, or ENTRY points into an address
+	 within the ARGZ vector.  */
+      assert ((!argz && !argz_len)
+	      || ((argz <= entry) && (entry < (argz + argz_len))));
+
+      /* Move to the char immediately after the terminating
+	 '\0' of ENTRY.  */
+      entry = 1+ strchr (entry, EOS_CHAR);
+
+      /* Return either the new ENTRY, or else NULL if ARGZ is
+	 exhausted.  */
+      return (entry >= argz + argz_len) ? 0 : (char *) entry;
+    }
+  else
+    {
+      /* This should probably be flagged as a programmer error,
+	 since starting an argz_next loop with the iterator set
+	 to ARGZ is safer.  To preserve semantics, handle the NULL
+	 case by returning the start of ARGZ (if any).  */
+      if (argz_len > 0)
+	return argz;
+      else
+	return 0;
+    }
+}
+
+
+void
+argz_stringify (char *argz, size_t argz_len, int sep)
+{
+  assert ((argz && argz_len) || (!argz && !argz_len));
+
+  if (sep)
+    {
+      --argz_len;		/* don't stringify the terminating EOS */
+      while (--argz_len > 0)
+	{
+	  if (argz[argz_len] == EOS_CHAR)
+	    argz[argz_len] = sep;
+	}
+    }
+}
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz_.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz_.h
new file mode 100644
index 0000000..0557575
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/argz_.h
@@ -0,0 +1,68 @@
+/* lt__argz.h -- internal argz interface for non-glibc systems
+
+   Copyright (C) 2004, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+*/
+
+#if !defined(LT__ARGZ_H)
+#define LT__ARGZ_H 1
+
+#include <stdlib.h>
+#define __need_error_t
+#include <errno.h>
+#include <sys/types.h>
+
+#if defined(LTDL)
+#  include "lt__glibc.h"
+#  include "lt_system.h"
+#else
+#  define LT_SCOPE
+#endif
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+LT_SCOPE error_t argz_append	(char **pargz, size_t *pargz_len,
+				 const char *buf, size_t buf_len);
+LT_SCOPE error_t argz_create_sep(const char *str, int delim,
+				 char **pargz, size_t *pargz_len);
+LT_SCOPE error_t argz_insert	(char **pargz, size_t *pargz_len,
+				 char *before, const char *entry);
+LT_SCOPE char *	 argz_next	(char *argz, size_t argz_len,
+				 const char *entry);
+LT_SCOPE void	 argz_stringify	(char *argz, size_t argz_len, int sep);
+
+#if defined(__cplusplus)
+}
+#endif
+
+#if !defined(LTDL)
+#  undef LT_SCOPE
+#endif
+
+#endif /*!defined(LT__ARGZ_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/config-h.in b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/config-h.in
new file mode 100644
index 0000000..23eab00
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/config-h.in
@@ -0,0 +1,178 @@
+/* config-h.in.  Generated from configure.ac by autoheader.  */
+
+/* Define to 1 if you have the `argz_add' function. */
+#undef HAVE_ARGZ_ADD
+
+/* Define to 1 if you have the `argz_append' function. */
+#undef HAVE_ARGZ_APPEND
+
+/* Define to 1 if you have the `argz_count' function. */
+#undef HAVE_ARGZ_COUNT
+
+/* Define to 1 if you have the `argz_create_sep' function. */
+#undef HAVE_ARGZ_CREATE_SEP
+
+/* Define to 1 if you have the <argz.h> header file. */
+#undef HAVE_ARGZ_H
+
+/* Define to 1 if you have the `argz_insert' function. */
+#undef HAVE_ARGZ_INSERT
+
+/* Define to 1 if you have the `argz_next' function. */
+#undef HAVE_ARGZ_NEXT
+
+/* Define to 1 if you have the `argz_stringify' function. */
+#undef HAVE_ARGZ_STRINGIFY
+
+/* Define to 1 if you have the `closedir' function. */
+#undef HAVE_CLOSEDIR
+
+/* Define to 1 if you have the declaration of `cygwin_conv_path', and to 0 if
+   you don't. */
+#undef HAVE_DECL_CYGWIN_CONV_PATH
+
+/* Define to 1 if you have the <dirent.h> header file. */
+#undef HAVE_DIRENT_H
+
+/* Define if you have the GNU dld library. */
+#undef HAVE_DLD
+
+/* Define to 1 if you have the <dld.h> header file. */
+#undef HAVE_DLD_H
+
+/* Define to 1 if you have the `dlerror' function. */
+#undef HAVE_DLERROR
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <dl.h> header file. */
+#undef HAVE_DL_H
+
+/* Define if you have the _dyld_func_lookup function. */
+#undef HAVE_DYLD
+
+/* Define to 1 if the system has the type `error_t'. */
+#undef HAVE_ERROR_T
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define if you have the libdl library or equivalent. */
+#undef HAVE_LIBDL
+
+/* Define if libdlloader will be built on this platform */
+#undef HAVE_LIBDLLOADER
+
+/* Define to 1 if you have the <mach-o/dyld.h> header file. */
+#undef HAVE_MACH_O_DYLD_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* Define to 1 if you have the `opendir' function. */
+#undef HAVE_OPENDIR
+
+/* Define if libtool can extract symbol lists from object files. */
+#undef HAVE_PRELOADED_SYMBOLS
+
+/* Define to 1 if you have the `readdir' function. */
+#undef HAVE_READDIR
+
+/* Define if you have the shl_load function. */
+#undef HAVE_SHL_LOAD
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#undef HAVE_STDLIB_H
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#undef HAVE_STRING_H
+
+/* Define to 1 if you have the `strlcat' function. */
+#undef HAVE_STRLCAT
+
+/* Define to 1 if you have the `strlcpy' function. */
+#undef HAVE_STRLCPY
+
+/* Define to 1 if you have the <sys/dl.h> header file. */
+#undef HAVE_SYS_DL_H
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#undef HAVE_SYS_STAT_H
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#undef HAVE_SYS_TYPES_H
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* This value is set to 1 to indicate that the system argz facility works */
+#undef HAVE_WORKING_ARGZ
+
+/* Define if the OS needs help to load dependent libraries for dlopen(). */
+#undef LTDL_DLOPEN_DEPLIBS
+
+/* Define to the system default library search path. */
+#undef LT_DLSEARCH_PATH
+
+/* The archive extension */
+#undef LT_LIBEXT
+
+/* The archive prefix */
+#undef LT_LIBPREFIX
+
+/* Define to the extension used for runtime loadable modules, say, ".so". */
+#undef LT_MODULE_EXT
+
+/* Define to the name of the environment variable that determines the run-time
+   module search path. */
+#undef LT_MODULE_PATH_VAR
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Define to the shared library suffix, say, ".dylib". */
+#undef LT_SHARED_EXT
+
+/* Define if dlsym() requires a leading underscore in symbol names. */
+#undef NEED_USCORE
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to 1 if you have the ANSI C header files. */
+#undef STDC_HEADERS
+
+/* Version number of package */
+#undef VERSION
+
+/* Define so that glibc/gnulib argp.h does not typedef error_t. */
+#undef __error_t_defined
+
+/* Define to a type to use for `error_t' if it is not otherwise available. */
+#undef error_t
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure
new file mode 100755
index 0000000..f18f272
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure
@@ -0,0 +1,14879 @@
+#! /bin/sh
+# Guess values for system-dependent variables and create Makefiles.
+# Generated by GNU Autoconf 2.68 for libltdl 2.4.2.
+#
+# Report bugs to <bug-libtool@gnu.org>.
+#
+#
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+#
+#
+# This configure script is free software; the Free Software Foundation
+# gives unlimited permission to copy, distribute and modify it.
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested="  as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
+  as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
+  eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
+  test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
+
+  test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\
+      || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1
+test \$(( 1 + 1 )) = 2 || exit 1"
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  break 2
+fi
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org and
+$0: bug-libtool@gnu.org about your system, including any
+$0: error possibly output before this message. Then install
+$0: a modern shell, or manually run the script under such a
+$0: shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+
+  as_lineno_1=$LINENO as_lineno_1a=$LINENO
+  as_lineno_2=$LINENO as_lineno_2a=$LINENO
+  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
+  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
+  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
+  sed -n '
+    p
+    /[$]LINENO/=
+  ' <$as_myself |
+    sed '
+      s/[$]LINENO.*/&-/
+      t lineno
+      b
+      :lineno
+      N
+      :loop
+      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
+      t loop
+      s/-\n.*//
+    ' >$as_me.lineno &&
+  chmod +x "$as_me.lineno" ||
+    { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
+
+  # Don't try to exec as it changes $[0], causing all sort of problems
+  # (the dirname of $[0] is not the place where we might find the
+  # original and so on.  Autoconf is especially sensitive to this).
+  . "./$as_me.lineno"
+  # Exit status is that of the last command.
+  exit
+}
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+SHELL=${CONFIG_SHELL-/bin/sh}
+
+
+test -n "$DJDIR" || exec 7<&0 </dev/null
+exec 6>&1
+
+# Name of the host.
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# so uname gets run too.
+ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
+
+#
+# Initializations.
+#
+ac_default_prefix=/usr/local
+ac_clean_files=
+ac_config_libobj_dir=.
+LIBOBJS=
+cross_compiling=no
+subdirs=
+MFLAGS=
+MAKEFLAGS=
+
+# Identity of this package.
+PACKAGE_NAME='libltdl'
+PACKAGE_TARNAME='libltdl'
+PACKAGE_VERSION='2.4.2'
+PACKAGE_STRING='libltdl 2.4.2'
+PACKAGE_BUGREPORT='bug-libtool@gnu.org'
+PACKAGE_URL=''
+
+ac_unique_file="ltdl.c"
+# Factoring default headers for most tests.
+ac_includes_default="\
+#include <stdio.h>
+#ifdef HAVE_SYS_TYPES_H
+# include <sys/types.h>
+#endif
+#ifdef HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_STRING_H
+# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
+#  include <memory.h>
+# endif
+# include <string.h>
+#endif
+#ifdef HAVE_STRINGS_H
+# include <strings.h>
+#endif
+#ifdef HAVE_INTTYPES_H
+# include <inttypes.h>
+#endif
+#ifdef HAVE_STDINT_H
+# include <stdint.h>
+#endif
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif"
+
+ac_subst_vars='am__EXEEXT_FALSE
+am__EXEEXT_TRUE
+LTLIBOBJS
+LTDLOPEN
+LT_CONFIG_H
+CONVENIENCE_LTDL_FALSE
+CONVENIENCE_LTDL_TRUE
+INSTALL_LTDL_FALSE
+INSTALL_LTDL_TRUE
+ARGZ_H
+LIBOBJS
+sys_symbol_underscore
+LIBADD_DL
+LT_DLPREOPEN
+LIBADD_DLD_LINK
+LIBADD_SHL_LOAD
+LIBADD_DLOPEN
+LT_DLLOADERS
+CPP
+OTOOL64
+OTOOL
+LIPO
+NMEDIT
+DSYMUTIL
+MANIFEST_TOOL
+RANLIB
+ac_ct_AR
+AR
+LN_S
+NM
+ac_ct_DUMPBIN
+DUMPBIN
+LD
+FGREP
+EGREP
+GREP
+SED
+am__fastdepCC_FALSE
+am__fastdepCC_TRUE
+CCDEPMODE
+AMDEPBACKSLASH
+AMDEP_FALSE
+AMDEP_TRUE
+am__quote
+am__include
+DEPDIR
+OBJEXT
+EXEEXT
+ac_ct_CC
+CPPFLAGS
+LDFLAGS
+CFLAGS
+CC
+host_os
+host_vendor
+host_cpu
+host
+build_os
+build_vendor
+build_cpu
+build
+LIBTOOL
+OBJDUMP
+DLLTOOL
+AS
+am__untar
+am__tar
+AMTAR
+am__leading_dot
+SET_MAKE
+AWK
+mkdir_p
+MKDIR_P
+INSTALL_STRIP_PROGRAM
+STRIP
+install_sh
+MAKEINFO
+AUTOHEADER
+AUTOMAKE
+AUTOCONF
+ACLOCAL
+VERSION
+PACKAGE
+CYGPATH_W
+am__isrc
+INSTALL_DATA
+INSTALL_SCRIPT
+INSTALL_PROGRAM
+target_alias
+host_alias
+build_alias
+LIBS
+ECHO_T
+ECHO_N
+ECHO_C
+DEFS
+mandir
+localedir
+libdir
+psdir
+pdfdir
+dvidir
+htmldir
+infodir
+docdir
+oldincludedir
+includedir
+localstatedir
+sharedstatedir
+sysconfdir
+datadir
+datarootdir
+libexecdir
+sbindir
+bindir
+program_transform_name
+prefix
+exec_prefix
+PACKAGE_URL
+PACKAGE_BUGREPORT
+PACKAGE_STRING
+PACKAGE_VERSION
+PACKAGE_TARNAME
+PACKAGE_NAME
+PATH_SEPARATOR
+SHELL'
+ac_subst_files=''
+ac_user_opts='
+enable_option_checking
+enable_shared
+enable_static
+with_pic
+enable_fast_install
+enable_dependency_tracking
+with_gnu_ld
+with_sysroot
+enable_libtool_lock
+enable_ltdl_install
+'
+      ac_precious_vars='build_alias
+host_alias
+target_alias
+CC
+CFLAGS
+LDFLAGS
+LIBS
+CPPFLAGS
+CPP'
+
+
+# Initialize some variables set by options.
+ac_init_help=
+ac_init_version=false
+ac_unrecognized_opts=
+ac_unrecognized_sep=
+# The variables have the same names as the options, with
+# dashes changed to underlines.
+cache_file=/dev/null
+exec_prefix=NONE
+no_create=
+no_recursion=
+prefix=NONE
+program_prefix=NONE
+program_suffix=NONE
+program_transform_name=s,x,x,
+silent=
+site=
+srcdir=
+verbose=
+x_includes=NONE
+x_libraries=NONE
+
+# Installation directory options.
+# These are left unexpanded so users can "make install exec_prefix=/foo"
+# and all the variables that are supposed to be based on exec_prefix
+# by default will actually change.
+# Use braces instead of parens because sh, perl, etc. also accept them.
+# (The list follows the same order as the GNU Coding Standards.)
+bindir='${exec_prefix}/bin'
+sbindir='${exec_prefix}/sbin'
+libexecdir='${exec_prefix}/libexec'
+datarootdir='${prefix}/share'
+datadir='${datarootdir}'
+sysconfdir='${prefix}/etc'
+sharedstatedir='${prefix}/com'
+localstatedir='${prefix}/var'
+includedir='${prefix}/include'
+oldincludedir='/usr/include'
+docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
+infodir='${datarootdir}/info'
+htmldir='${docdir}'
+dvidir='${docdir}'
+pdfdir='${docdir}'
+psdir='${docdir}'
+libdir='${exec_prefix}/lib'
+localedir='${datarootdir}/locale'
+mandir='${datarootdir}/man'
+
+ac_prev=
+ac_dashdash=
+for ac_option
+do
+  # If the previous option needs an argument, assign it.
+  if test -n "$ac_prev"; then
+    eval $ac_prev=\$ac_option
+    ac_prev=
+    continue
+  fi
+
+  case $ac_option in
+  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+  *=)   ac_optarg= ;;
+  *)    ac_optarg=yes ;;
+  esac
+
+  # Accept the important Cygnus configure options, so we can diagnose typos.
+
+  case $ac_dashdash$ac_option in
+  --)
+    ac_dashdash=yes ;;
+
+  -bindir | --bindir | --bindi | --bind | --bin | --bi)
+    ac_prev=bindir ;;
+  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
+    bindir=$ac_optarg ;;
+
+  -build | --build | --buil | --bui | --bu)
+    ac_prev=build_alias ;;
+  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
+    build_alias=$ac_optarg ;;
+
+  -cache-file | --cache-file | --cache-fil | --cache-fi \
+  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+    ac_prev=cache_file ;;
+  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
+    cache_file=$ac_optarg ;;
+
+  --config-cache | -C)
+    cache_file=config.cache ;;
+
+  -datadir | --datadir | --datadi | --datad)
+    ac_prev=datadir ;;
+  -datadir=* | --datadir=* | --datadi=* | --datad=*)
+    datadir=$ac_optarg ;;
+
+  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
+  | --dataroo | --dataro | --datar)
+    ac_prev=datarootdir ;;
+  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
+  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
+    datarootdir=$ac_optarg ;;
+
+  -disable-* | --disable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=no ;;
+
+  -docdir | --docdir | --docdi | --doc | --do)
+    ac_prev=docdir ;;
+  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
+    docdir=$ac_optarg ;;
+
+  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
+    ac_prev=dvidir ;;
+  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
+    dvidir=$ac_optarg ;;
+
+  -enable-* | --enable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=\$ac_optarg ;;
+
+  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
+  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
+  | --exec | --exe | --ex)
+    ac_prev=exec_prefix ;;
+  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
+  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
+  | --exec=* | --exe=* | --ex=*)
+    exec_prefix=$ac_optarg ;;
+
+  -gas | --gas | --ga | --g)
+    # Obsolete; use --with-gas.
+    with_gas=yes ;;
+
+  -help | --help | --hel | --he | -h)
+    ac_init_help=long ;;
+  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
+    ac_init_help=recursive ;;
+  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
+    ac_init_help=short ;;
+
+  -host | --host | --hos | --ho)
+    ac_prev=host_alias ;;
+  -host=* | --host=* | --hos=* | --ho=*)
+    host_alias=$ac_optarg ;;
+
+  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
+    ac_prev=htmldir ;;
+  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
+  | --ht=*)
+    htmldir=$ac_optarg ;;
+
+  -includedir | --includedir | --includedi | --included | --include \
+  | --includ | --inclu | --incl | --inc)
+    ac_prev=includedir ;;
+  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
+  | --includ=* | --inclu=* | --incl=* | --inc=*)
+    includedir=$ac_optarg ;;
+
+  -infodir | --infodir | --infodi | --infod | --info | --inf)
+    ac_prev=infodir ;;
+  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
+    infodir=$ac_optarg ;;
+
+  -libdir | --libdir | --libdi | --libd)
+    ac_prev=libdir ;;
+  -libdir=* | --libdir=* | --libdi=* | --libd=*)
+    libdir=$ac_optarg ;;
+
+  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
+  | --libexe | --libex | --libe)
+    ac_prev=libexecdir ;;
+  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
+  | --libexe=* | --libex=* | --libe=*)
+    libexecdir=$ac_optarg ;;
+
+  -localedir | --localedir | --localedi | --localed | --locale)
+    ac_prev=localedir ;;
+  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
+    localedir=$ac_optarg ;;
+
+  -localstatedir | --localstatedir | --localstatedi | --localstated \
+  | --localstate | --localstat | --localsta | --localst | --locals)
+    ac_prev=localstatedir ;;
+  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
+  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
+    localstatedir=$ac_optarg ;;
+
+  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
+    ac_prev=mandir ;;
+  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
+    mandir=$ac_optarg ;;
+
+  -nfp | --nfp | --nf)
+    # Obsolete; use --without-fp.
+    with_fp=no ;;
+
+  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
+  | --no-cr | --no-c | -n)
+    no_create=yes ;;
+
+  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
+  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
+    no_recursion=yes ;;
+
+  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
+  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
+  | --oldin | --oldi | --old | --ol | --o)
+    ac_prev=oldincludedir ;;
+  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
+  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
+  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
+    oldincludedir=$ac_optarg ;;
+
+  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+    ac_prev=prefix ;;
+  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+    prefix=$ac_optarg ;;
+
+  -program-prefix | --program-prefix | --program-prefi | --program-pref \
+  | --program-pre | --program-pr | --program-p)
+    ac_prev=program_prefix ;;
+  -program-prefix=* | --program-prefix=* | --program-prefi=* \
+  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
+    program_prefix=$ac_optarg ;;
+
+  -program-suffix | --program-suffix | --program-suffi | --program-suff \
+  | --program-suf | --program-su | --program-s)
+    ac_prev=program_suffix ;;
+  -program-suffix=* | --program-suffix=* | --program-suffi=* \
+  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
+    program_suffix=$ac_optarg ;;
+
+  -program-transform-name | --program-transform-name \
+  | --program-transform-nam | --program-transform-na \
+  | --program-transform-n | --program-transform- \
+  | --program-transform | --program-transfor \
+  | --program-transfo | --program-transf \
+  | --program-trans | --program-tran \
+  | --progr-tra | --program-tr | --program-t)
+    ac_prev=program_transform_name ;;
+  -program-transform-name=* | --program-transform-name=* \
+  | --program-transform-nam=* | --program-transform-na=* \
+  | --program-transform-n=* | --program-transform-=* \
+  | --program-transform=* | --program-transfor=* \
+  | --program-transfo=* | --program-transf=* \
+  | --program-trans=* | --program-tran=* \
+  | --progr-tra=* | --program-tr=* | --program-t=*)
+    program_transform_name=$ac_optarg ;;
+
+  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
+    ac_prev=pdfdir ;;
+  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
+    pdfdir=$ac_optarg ;;
+
+  -psdir | --psdir | --psdi | --psd | --ps)
+    ac_prev=psdir ;;
+  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
+    psdir=$ac_optarg ;;
+
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil)
+    silent=yes ;;
+
+  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
+    ac_prev=sbindir ;;
+  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
+  | --sbi=* | --sb=*)
+    sbindir=$ac_optarg ;;
+
+  -sharedstatedir | --sharedstatedir | --sharedstatedi \
+  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
+  | --sharedst | --shareds | --shared | --share | --shar \
+  | --sha | --sh)
+    ac_prev=sharedstatedir ;;
+  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
+  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
+  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
+  | --sha=* | --sh=*)
+    sharedstatedir=$ac_optarg ;;
+
+  -site | --site | --sit)
+    ac_prev=site ;;
+  -site=* | --site=* | --sit=*)
+    site=$ac_optarg ;;
+
+  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+    ac_prev=srcdir ;;
+  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+    srcdir=$ac_optarg ;;
+
+  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
+  | --syscon | --sysco | --sysc | --sys | --sy)
+    ac_prev=sysconfdir ;;
+  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
+  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
+    sysconfdir=$ac_optarg ;;
+
+  -target | --target | --targe | --targ | --tar | --ta | --t)
+    ac_prev=target_alias ;;
+  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
+    target_alias=$ac_optarg ;;
+
+  -v | -verbose | --verbose | --verbos | --verbo | --verb)
+    verbose=yes ;;
+
+  -version | --version | --versio | --versi | --vers | -V)
+    ac_init_version=: ;;
+
+  -with-* | --with-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=\$ac_optarg ;;
+
+  -without-* | --without-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=no ;;
+
+  --x)
+    # Obsolete; use --with-x.
+    with_x=yes ;;
+
+  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
+  | --x-incl | --x-inc | --x-in | --x-i)
+    ac_prev=x_includes ;;
+  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
+  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
+    x_includes=$ac_optarg ;;
+
+  -x-libraries | --x-libraries | --x-librarie | --x-librari \
+  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
+    ac_prev=x_libraries ;;
+  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
+  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
+    x_libraries=$ac_optarg ;;
+
+  -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
+    ;;
+
+  *=*)
+    ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
+    # Reject names that are not valid shell variable names.
+    case $ac_envvar in #(
+      '' | [0-9]* | *[!_$as_cr_alnum]* )
+      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
+    esac
+    eval $ac_envvar=\$ac_optarg
+    export $ac_envvar ;;
+
+  *)
+    # FIXME: should be removed in autoconf 3.0.
+    $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
+    expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
+      $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
+    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+    ;;
+
+  esac
+done
+
+if test -n "$ac_prev"; then
+  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
+  as_fn_error $? "missing argument to $ac_option"
+fi
+
+if test -n "$ac_unrecognized_opts"; then
+  case $enable_option_checking in
+    no) ;;
+    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
+    *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
+  esac
+fi
+
+# Check all directory arguments for consistency.
+for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
+		datadir sysconfdir sharedstatedir localstatedir includedir \
+		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
+		libdir localedir mandir
+do
+  eval ac_val=\$$ac_var
+  # Remove trailing slashes.
+  case $ac_val in
+    */ )
+      ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
+      eval $ac_var=\$ac_val;;
+  esac
+  # Be sure to have absolute directory names.
+  case $ac_val in
+    [\\/$]* | ?:[\\/]* )  continue;;
+    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
+  esac
+  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
+done
+
+# There might be people who depend on the old broken behavior: `$host'
+# used to hold the argument of --host etc.
+# FIXME: To remove some day.
+build=$build_alias
+host=$host_alias
+target=$target_alias
+
+# FIXME: To remove some day.
+if test "x$host_alias" != x; then
+  if test "x$build_alias" = x; then
+    cross_compiling=maybe
+    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used" >&2
+  elif test "x$build_alias" != "x$host_alias"; then
+    cross_compiling=yes
+  fi
+fi
+
+ac_tool_prefix=
+test -n "$host_alias" && ac_tool_prefix=$host_alias-
+
+test "$silent" = yes && exec 6>/dev/null
+
+
+ac_pwd=`pwd` && test -n "$ac_pwd" &&
+ac_ls_di=`ls -di .` &&
+ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
+  as_fn_error $? "working directory cannot be determined"
+test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
+  as_fn_error $? "pwd does not report name of working directory"
+
+
+# Find the source files, if location was not specified.
+if test -z "$srcdir"; then
+  ac_srcdir_defaulted=yes
+  # Try the directory containing this script, then the parent directory.
+  ac_confdir=`$as_dirname -- "$as_myself" ||
+$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_myself" : 'X\(//\)[^/]' \| \
+	 X"$as_myself" : 'X\(//\)$' \| \
+	 X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_myself" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  srcdir=$ac_confdir
+  if test ! -r "$srcdir/$ac_unique_file"; then
+    srcdir=..
+  fi
+else
+  ac_srcdir_defaulted=no
+fi
+if test ! -r "$srcdir/$ac_unique_file"; then
+  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
+  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
+fi
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
+ac_abs_confdir=`(
+	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
+	pwd)`
+# When building in place, set srcdir=.
+if test "$ac_abs_confdir" = "$ac_pwd"; then
+  srcdir=.
+fi
+# Remove unnecessary trailing slashes from srcdir.
+# Double slashes in file names in object file debugging info
+# mess up M-x gdb in Emacs.
+case $srcdir in
+*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
+esac
+for ac_var in $ac_precious_vars; do
+  eval ac_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_env_${ac_var}_value=\$${ac_var}
+  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_cv_env_${ac_var}_value=\$${ac_var}
+done
+
+#
+# Report the --help message.
+#
+if test "$ac_init_help" = "long"; then
+  # Omit some internal or obsolete options to make the list less imposing.
+  # This message is too long to be a string in the A/UX 3.1 sh.
+  cat <<_ACEOF
+\`configure' configures libltdl 2.4.2 to adapt to many kinds of systems.
+
+Usage: $0 [OPTION]... [VAR=VALUE]...
+
+To assign environment variables (e.g., CC, CFLAGS...), specify them as
+VAR=VALUE.  See below for descriptions of some of the useful variables.
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+  -h, --help              display this help and exit
+      --help=short        display options specific to this package
+      --help=recursive    display the short help of all the included packages
+  -V, --version           display version information and exit
+  -q, --quiet, --silent   do not print \`checking ...' messages
+      --cache-file=FILE   cache test results in FILE [disabled]
+  -C, --config-cache      alias for \`--cache-file=config.cache'
+  -n, --no-create         do not create output files
+      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
+
+Installation directories:
+  --prefix=PREFIX         install architecture-independent files in PREFIX
+                          [$ac_default_prefix]
+  --exec-prefix=EPREFIX   install architecture-dependent files in EPREFIX
+                          [PREFIX]
+
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
+
+For better control, use the options below.
+
+Fine tuning of the installation directories:
+  --bindir=DIR            user executables [EPREFIX/bin]
+  --sbindir=DIR           system admin executables [EPREFIX/sbin]
+  --libexecdir=DIR        program executables [EPREFIX/libexec]
+  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
+  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
+  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
+  --libdir=DIR            object code libraries [EPREFIX/lib]
+  --includedir=DIR        C header files [PREFIX/include]
+  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
+  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
+  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
+  --infodir=DIR           info documentation [DATAROOTDIR/info]
+  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
+  --mandir=DIR            man documentation [DATAROOTDIR/man]
+  --docdir=DIR            documentation root [DATAROOTDIR/doc/libltdl]
+  --htmldir=DIR           html documentation [DOCDIR]
+  --dvidir=DIR            dvi documentation [DOCDIR]
+  --pdfdir=DIR            pdf documentation [DOCDIR]
+  --psdir=DIR             ps documentation [DOCDIR]
+_ACEOF
+
+  cat <<\_ACEOF
+
+Program names:
+  --program-prefix=PREFIX            prepend PREFIX to installed program names
+  --program-suffix=SUFFIX            append SUFFIX to installed program names
+  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
+
+System types:
+  --build=BUILD     configure for building on BUILD [guessed]
+  --host=HOST       cross-compile to build programs to run on HOST [BUILD]
+_ACEOF
+fi
+
+if test -n "$ac_init_help"; then
+  case $ac_init_help in
+     short | recursive ) echo "Configuration of libltdl 2.4.2:";;
+   esac
+  cat <<\_ACEOF
+
+Optional Features:
+  --disable-option-checking  ignore unrecognized --enable/--with options
+  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
+  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
+  --enable-shared[=PKGS]  build shared libraries [default=yes]
+  --enable-static[=PKGS]  build static libraries [default=yes]
+  --enable-fast-install[=PKGS]
+                          optimize for fast installation [default=yes]
+  --disable-dependency-tracking  speeds up one-time build
+  --enable-dependency-tracking   do not reject slow dependency extractors
+  --disable-libtool-lock  avoid locking (might break parallel builds)
+  --enable-ltdl-install   install libltdl
+
+Optional Packages:
+  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
+  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
+  --with-pic[=PKGS]       try to use only PIC/non-PIC objects [default=use
+                          both]
+  --with-gnu-ld           assume the C compiler uses GNU ld [default=no]
+  --with-sysroot=DIR Search for dependent libraries within DIR
+                        (or the compiler's sysroot if not specified).
+
+Some influential environment variables:
+  CC          C compiler command
+  CFLAGS      C compiler flags
+  LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries in a
+              nonstandard directory <lib dir>
+  LIBS        libraries to pass to the linker, e.g. -l<library>
+  CPPFLAGS    (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
+              you have headers in a nonstandard directory <include dir>
+  CPP         C preprocessor
+
+Use these variables to override the choices made by `configure' or to help
+it to find libraries and programs with nonstandard names/locations.
+
+Report bugs to <bug-libtool@gnu.org>.
+_ACEOF
+ac_status=$?
+fi
+
+if test "$ac_init_help" = "recursive"; then
+  # If there are subdirs, report their specific --help.
+  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
+    test -d "$ac_dir" ||
+      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
+      continue
+    ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+    cd "$ac_dir" || { ac_status=$?; continue; }
+    # Check for guested configure.
+    if test -f "$ac_srcdir/configure.gnu"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
+    elif test -f "$ac_srcdir/configure"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure" --help=recursive
+    else
+      $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
+    fi || ac_status=$?
+    cd "$ac_pwd" || { ac_status=$?; break; }
+  done
+fi
+
+test -n "$ac_init_help" && exit $ac_status
+if $ac_init_version; then
+  cat <<\_ACEOF
+libltdl configure 2.4.2
+generated by GNU Autoconf 2.68
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This configure script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it.
+_ACEOF
+  exit
+fi
+
+## ------------------------ ##
+## Autoconf initialization. ##
+## ------------------------ ##
+
+# ac_fn_c_try_compile LINENO
+# --------------------------
+# Try to compile conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext
+  if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest.$ac_objext; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_compile
+
+# ac_fn_c_try_link LINENO
+# -----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_link ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext conftest$ac_exeext
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest$ac_exeext && {
+	 test "$cross_compiling" = yes ||
+	 $as_test_x conftest$ac_exeext
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+  # interfere with the next link command; also delete a directory that is
+  # left behind by Apple's compiler.  We do this before executing the actions.
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_link
+
+# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
+# -------------------------------------------------------
+# Tests whether HEADER exists and can be compiled using the include files in
+# INCLUDES, setting the cache variable VAR accordingly.
+ac_fn_c_check_header_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+#include <$2>
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_header_compile
+
+# ac_fn_c_try_cpp LINENO
+# ----------------------
+# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_cpp ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_cpp conftest.$ac_ext"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } > conftest.i && {
+	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+    ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_cpp
+
+# ac_fn_c_try_run LINENO
+# ----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
+# that executables *can* be run.
+ac_fn_c_try_run ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: program exited with status $ac_status" >&5
+       $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+       ac_retval=$ac_status
+fi
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_run
+
+# ac_fn_c_check_func LINENO FUNC VAR
+# ----------------------------------
+# Tests whether FUNC exists, setting the cache variable VAR accordingly
+ac_fn_c_check_func ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
+   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
+#define $2 innocuous_$2
+
+/* System header to define __stub macros and hopefully few prototypes,
+    which can conflict with char $2 (); below.
+    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+    <limits.h> exists even on freestanding compilers.  */
+
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+
+#undef $2
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char $2 ();
+/* The GNU C library defines this for functions which it implements
+    to always fail with ENOSYS.  Some functions are actually named
+    something starting with __ and the normal name is an alias.  */
+#if defined __stub_$2 || defined __stub___$2
+choke me
+#endif
+
+int
+main ()
+{
+return $2 ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_func
+
+# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES
+# ---------------------------------------------
+# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR
+# accordingly.
+ac_fn_c_check_decl ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  as_decl_name=`echo $2|sed 's/ *(.*//'`
+  as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'`
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5
+$as_echo_n "checking whether $as_decl_name is declared... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+int
+main ()
+{
+#ifndef $as_decl_name
+#ifdef __cplusplus
+  (void) $as_decl_use;
+#else
+  (void) $as_decl_name;
+#endif
+#endif
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_decl
+
+# ac_fn_c_check_type LINENO TYPE VAR INCLUDES
+# -------------------------------------------
+# Tests whether TYPE exists after having included INCLUDES, setting cache
+# variable VAR accordingly.
+ac_fn_c_check_type ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  eval "$3=no"
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+int
+main ()
+{
+if (sizeof ($2))
+	 return 0;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+int
+main ()
+{
+if (sizeof (($2)))
+	    return 0;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  eval "$3=yes"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_type
+cat >config.log <<_ACEOF
+This file contains any messages produced by compilers while
+running configure, to aid debugging if configure makes a mistake.
+
+It was created by libltdl $as_me 2.4.2, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  $ $0 $@
+
+_ACEOF
+exec 5>>config.log
+{
+cat <<_ASUNAME
+## --------- ##
+## Platform. ##
+## --------- ##
+
+hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
+
+/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
+/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
+/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
+/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
+
+_ASUNAME
+
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    $as_echo "PATH: $as_dir"
+  done
+IFS=$as_save_IFS
+
+} >&5
+
+cat >&5 <<_ACEOF
+
+
+## ----------- ##
+## Core tests. ##
+## ----------- ##
+
+_ACEOF
+
+
+# Keep a trace of the command line.
+# Strip out --no-create and --no-recursion so they do not pile up.
+# Strip out --silent because we don't want to record it for future runs.
+# Also quote any args containing shell meta-characters.
+# Make two passes to allow for proper duplicate-argument suppression.
+ac_configure_args=
+ac_configure_args0=
+ac_configure_args1=
+ac_must_keep_next=false
+for ac_pass in 1 2
+do
+  for ac_arg
+  do
+    case $ac_arg in
+    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
+    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+    | -silent | --silent | --silen | --sile | --sil)
+      continue ;;
+    *\'*)
+      ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    case $ac_pass in
+    1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
+    2)
+      as_fn_append ac_configure_args1 " '$ac_arg'"
+      if test $ac_must_keep_next = true; then
+	ac_must_keep_next=false # Got value, back to normal.
+      else
+	case $ac_arg in
+	  *=* | --config-cache | -C | -disable-* | --disable-* \
+	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
+	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
+	  | -with-* | --with-* | -without-* | --without-* | --x)
+	    case "$ac_configure_args0 " in
+	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
+	    esac
+	    ;;
+	  -* ) ac_must_keep_next=true ;;
+	esac
+      fi
+      as_fn_append ac_configure_args " '$ac_arg'"
+      ;;
+    esac
+  done
+done
+{ ac_configure_args0=; unset ac_configure_args0;}
+{ ac_configure_args1=; unset ac_configure_args1;}
+
+# When interrupted or exit'd, cleanup temporary files, and complete
+# config.log.  We remove comments because anyway the quotes in there
+# would cause problems or look ugly.
+# WARNING: Use '\'' to represent an apostrophe within the trap.
+# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
+trap 'exit_status=$?
+  # Save into config.log some information that might help in debugging.
+  {
+    echo
+
+    $as_echo "## ---------------- ##
+## Cache variables. ##
+## ---------------- ##"
+    echo
+    # The following way of writing the cache mishandles newlines in values,
+(
+  for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+  (set) 2>&1 |
+    case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      sed -n \
+	"s/'\''/'\''\\\\'\'''\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
+      ;; #(
+    *)
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+)
+    echo
+
+    $as_echo "## ----------------- ##
+## Output variables. ##
+## ----------------- ##"
+    echo
+    for ac_var in $ac_subst_vars
+    do
+      eval ac_val=\$$ac_var
+      case $ac_val in
+      *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+      esac
+      $as_echo "$ac_var='\''$ac_val'\''"
+    done | sort
+    echo
+
+    if test -n "$ac_subst_files"; then
+      $as_echo "## ------------------- ##
+## File substitutions. ##
+## ------------------- ##"
+      echo
+      for ac_var in $ac_subst_files
+      do
+	eval ac_val=\$$ac_var
+	case $ac_val in
+	*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+	esac
+	$as_echo "$ac_var='\''$ac_val'\''"
+      done | sort
+      echo
+    fi
+
+    if test -s confdefs.h; then
+      $as_echo "## ----------- ##
+## confdefs.h. ##
+## ----------- ##"
+      echo
+      cat confdefs.h
+      echo
+    fi
+    test "$ac_signal" != 0 &&
+      $as_echo "$as_me: caught signal $ac_signal"
+    $as_echo "$as_me: exit $exit_status"
+  } >&5
+  rm -f core *.core core.conftest.* &&
+    rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
+    exit $exit_status
+' 0
+for ac_signal in 1 2 13 15; do
+  trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
+done
+ac_signal=0
+
+# confdefs.h avoids OS command line length limits that DEFS can exceed.
+rm -f -r conftest* confdefs.h
+
+$as_echo "/* confdefs.h */" > confdefs.h
+
+# Predefined preprocessor variables.
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_NAME "$PACKAGE_NAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_VERSION "$PACKAGE_VERSION"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_STRING "$PACKAGE_STRING"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_URL "$PACKAGE_URL"
+_ACEOF
+
+
+# Let the site file select an alternate cache file if it wants to.
+# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
+if test -n "$CONFIG_SITE"; then
+  # We do not want a PATH search for config.site.
+  case $CONFIG_SITE in #((
+    -*)  ac_site_file1=./$CONFIG_SITE;;
+    */*) ac_site_file1=$CONFIG_SITE;;
+    *)   ac_site_file1=./$CONFIG_SITE;;
+  esac
+elif test "x$prefix" != xNONE; then
+  ac_site_file1=$prefix/share/config.site
+  ac_site_file2=$prefix/etc/config.site
+else
+  ac_site_file1=$ac_default_prefix/share/config.site
+  ac_site_file2=$ac_default_prefix/etc/config.site
+fi
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
+do
+  test "x$ac_site_file" = xNONE && continue
+  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
+$as_echo "$as_me: loading site script $ac_site_file" >&6;}
+    sed 's/^/| /' "$ac_site_file" >&5
+    . "$ac_site_file" \
+      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "failed to load site script $ac_site_file
+See \`config.log' for more details" "$LINENO" 5; }
+  fi
+done
+
+if test -r "$cache_file"; then
+  # Some versions of bash will fail to source /dev/null (special files
+  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
+  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
+$as_echo "$as_me: loading cache $cache_file" >&6;}
+    case $cache_file in
+      [\\/]* | ?:[\\/]* ) . "$cache_file";;
+      *)                      . "./$cache_file";;
+    esac
+  fi
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
+$as_echo "$as_me: creating cache $cache_file" >&6;}
+  >$cache_file
+fi
+
+# Check that the precious variables saved in the cache have kept the same
+# value.
+ac_cache_corrupted=false
+for ac_var in $ac_precious_vars; do
+  eval ac_old_set=\$ac_cv_env_${ac_var}_set
+  eval ac_new_set=\$ac_env_${ac_var}_set
+  eval ac_old_val=\$ac_cv_env_${ac_var}_value
+  eval ac_new_val=\$ac_env_${ac_var}_value
+  case $ac_old_set,$ac_new_set in
+    set,)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,set)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,);;
+    *)
+      if test "x$ac_old_val" != "x$ac_new_val"; then
+	# differences in whitespace do not lead to failure.
+	ac_old_val_w=`echo x $ac_old_val`
+	ac_new_val_w=`echo x $ac_new_val`
+	if test "$ac_old_val_w" != "$ac_new_val_w"; then
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
+$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
+	  ac_cache_corrupted=:
+	else
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
+$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
+	  eval $ac_var=\$ac_old_val
+	fi
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   former value:  \`$ac_old_val'" >&5
+$as_echo "$as_me:   former value:  \`$ac_old_val'" >&2;}
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   current value: \`$ac_new_val'" >&5
+$as_echo "$as_me:   current value: \`$ac_new_val'" >&2;}
+      fi;;
+  esac
+  # Pass precious variables to config.status.
+  if test "$ac_new_set" = set; then
+    case $ac_new_val in
+    *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
+    *) ac_arg=$ac_var=$ac_new_val ;;
+    esac
+    case " $ac_configure_args " in
+      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
+      *) as_fn_append ac_configure_args " '$ac_arg'" ;;
+    esac
+  fi
+done
+if $ac_cache_corrupted; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
+$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
+  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+fi
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+ac_config_headers="$ac_config_headers config.h:config-h.in"
+
+
+ac_aux_dir=
+for ac_dir in config "$srcdir"/config; do
+  if test -f "$ac_dir/install-sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install-sh -c"
+    break
+  elif test -f "$ac_dir/install.sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install.sh -c"
+    break
+  elif test -f "$ac_dir/shtool"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/shtool install -c"
+    break
+  fi
+done
+if test -z "$ac_aux_dir"; then
+  as_fn_error $? "cannot find install-sh, install.sh, or shtool in config \"$srcdir\"/config" "$LINENO" 5
+fi
+
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
+
+
+
+
+
+
+
+
+ # I am me!
+
+
+## ------------------------ ##
+## Automake Initialisation. ##
+## ------------------------ ##
+
+am__api_version='1.11'
+
+# Find a good install program.  We prefer a C program (faster),
+# so one script is as good as another.  But avoid the broken or
+# incompatible versions:
+# SysV /etc/install, /usr/sbin/install
+# SunOS /usr/etc/install
+# IRIX /sbin/install
+# AIX /bin/install
+# AmigaOS /C/install, which installs bootblocks on floppy discs
+# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
+# AFS /usr/afsws/bin/install, which mishandles nonexistent args
+# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
+# OS/2's system install, which has a completely different semantic
+# ./install, which can be erroneously created by make from ./install.sh.
+# Reject install programs that cannot install multiple files.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
+$as_echo_n "checking for a BSD-compatible install... " >&6; }
+if test -z "$INSTALL"; then
+if ${ac_cv_path_install+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    # Account for people who put trailing slashes in PATH elements.
+case $as_dir/ in #((
+  ./ | .// | /[cC]/* | \
+  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
+  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
+  /usr/ucb/* ) ;;
+  *)
+    # OSF1 and SCO ODT 3.0 have their own names for install.
+    # Don't use installbsd from OSF since it installs stuff as root
+    # by default.
+    for ac_prog in ginstall scoinst install; do
+      for ac_exec_ext in '' $ac_executable_extensions; do
+	if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
+	  if test $ac_prog = install &&
+	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # AIX install.  It has an incompatible calling convention.
+	    :
+	  elif test $ac_prog = install &&
+	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # program-specific install script used by HP pwplus--don't use.
+	    :
+	  else
+	    rm -rf conftest.one conftest.two conftest.dir
+	    echo one > conftest.one
+	    echo two > conftest.two
+	    mkdir conftest.dir
+	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
+	      test -s conftest.one && test -s conftest.two &&
+	      test -s conftest.dir/conftest.one &&
+	      test -s conftest.dir/conftest.two
+	    then
+	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
+	      break 3
+	    fi
+	  fi
+	fi
+      done
+    done
+    ;;
+esac
+
+  done
+IFS=$as_save_IFS
+
+rm -rf conftest.one conftest.two conftest.dir
+
+fi
+  if test "${ac_cv_path_install+set}" = set; then
+    INSTALL=$ac_cv_path_install
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for INSTALL within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    INSTALL=$ac_install_sh
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
+$as_echo "$INSTALL" >&6; }
+
+# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
+# It thinks the first close brace ends the variable substitution.
+test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
+
+test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
+
+test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
+$as_echo_n "checking whether build environment is sane... " >&6; }
+# Just in case
+sleep 1
+echo timestamp > conftest.file
+# Reject unsafe characters in $srcdir or the absolute working directory
+# name.  Accept space and tab only in the latter.
+am_lf='
+'
+case `pwd` in
+  *[\\\"\#\$\&\'\`$am_lf]*)
+    as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
+esac
+case $srcdir in
+  *[\\\"\#\$\&\'\`$am_lf\ \	]*)
+    as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;;
+esac
+
+# Do `set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
+   if test "$*" = "X"; then
+      # -L didn't work.
+      set X `ls -t "$srcdir/configure" conftest.file`
+   fi
+   rm -f conftest.file
+   if test "$*" != "X $srcdir/configure conftest.file" \
+      && test "$*" != "X conftest.file $srcdir/configure"; then
+
+      # If neither matched, then we have a broken ls.  This can happen
+      # if, for instance, CONFIG_SHELL is bash and it inherits a
+      # broken ls alias from the environment.  This has actually
+      # happened.  Such a system could not be considered "sane".
+      as_fn_error $? "ls -t appears to fail.  Make sure there is not a broken
+alias in your environment" "$LINENO" 5
+   fi
+
+   test "$2" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   as_fn_error $? "newly created file is older than distributed files!
+Check your system clock" "$LINENO" 5
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+test "$program_prefix" != NONE &&
+  program_transform_name="s&^&$program_prefix&;$program_transform_name"
+# Use a double $ so make ignores it.
+test "$program_suffix" != NONE &&
+  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
+# Double any \ or $.
+# By default was `s,x,x', remove it if useless.
+ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
+program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
+
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+
+if test x"${MISSING+set}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+  *)
+    MISSING="\${SHELL} $am_aux_dir/missing" ;;
+  esac
+fi
+# Use eval to expand $SHELL
+if eval "$MISSING --run true"; then
+  am_missing_run="$MISSING --run "
+else
+  am_missing_run=
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5
+$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;}
+fi
+
+if test x"${install_sh}" != xset; then
+  case $am_aux_dir in
+  *\ * | *\	*)
+    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
+  *)
+    install_sh="\${SHELL} $am_aux_dir/install-sh"
+  esac
+fi
+
+# Installed binaries are usually stripped using `strip' when the user
+# run `make install-strip'.  However `strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the `STRIP' environment variable to overrule this program.
+if test "$cross_compiling" != no; then
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
+$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
+if test -z "$MKDIR_P"; then
+  if ${ac_cv_path_mkdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in mkdir gmkdir; do
+	 for ac_exec_ext in '' $ac_executable_extensions; do
+	   { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
+	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
+	     'mkdir (GNU coreutils) '* | \
+	     'mkdir (coreutils) '* | \
+	     'mkdir (fileutils) '4.1*)
+	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
+	       break 3;;
+	   esac
+	 done
+       done
+  done
+IFS=$as_save_IFS
+
+fi
+
+  test -d ./--version && rmdir ./--version
+  if test "${ac_cv_path_mkdir+set}" = set; then
+    MKDIR_P="$ac_cv_path_mkdir -p"
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for MKDIR_P within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    MKDIR_P="$ac_install_sh -d"
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
+$as_echo "$MKDIR_P" >&6; }
+
+mkdir_p="$MKDIR_P"
+case $mkdir_p in
+  [\\/$]* | ?:[\\/]*) ;;
+  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
+esac
+
+for ac_prog in gawk mawk nawk awk
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AWK+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AWK"; then
+  ac_cv_prog_AWK="$AWK" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AWK="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AWK=$ac_cv_prog_AWK
+if test -n "$AWK"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
+$as_echo "$AWK" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$AWK" && break
+done
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
+$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
+set x ${MAKE-make}
+ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
+if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat >conftest.make <<\_ACEOF
+SHELL = /bin/sh
+all:
+	@echo '@@@%%%=$(MAKE)=@@@%%%'
+_ACEOF
+# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
+case `${MAKE-make} -f conftest.make 2>/dev/null` in
+  *@@@%%%=?*=@@@%%%*)
+    eval ac_cv_prog_make_${ac_make}_set=yes;;
+  *)
+    eval ac_cv_prog_make_${ac_make}_set=no;;
+esac
+rm -f conftest.make
+fi
+if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+  SET_MAKE=
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+  SET_MAKE="MAKE=${MAKE-make}"
+fi
+
+rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  am__isrc=' -I$(srcdir)'
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+
+
+# Define the identity of the package.
+ PACKAGE='libltdl'
+ VERSION='2.4.2'
+
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE "$PACKAGE"
+_ACEOF
+
+
+cat >>confdefs.h <<_ACEOF
+#define VERSION "$VERSION"
+_ACEOF
+
+# Some tools Automake needs.
+
+ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
+
+
+AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
+
+
+AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
+
+
+AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
+
+
+MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
+
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+# Always define AMTAR for backward compatibility.
+
+AMTAR=${AMTAR-"${am_missing_run}tar"}
+
+am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'
+
+
+
+
+
+
+
+## ------------------------------- ##
+## Libtool specific configuration. ##
+## ------------------------------- ##
+pkgdatadir='${datadir}'"/${PACKAGE}"
+
+
+## ----------------------- ##
+## Libtool initialisation. ##
+## ----------------------- ##
+case `pwd` in
+  *\ * | *\	*)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
+$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
+esac
+
+
+
+macro_version='2.4.2'
+macro_revision='1.3337'
+
+
+
+
+
+
+
+
+
+
+
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+# Make sure we can run config.sub.
+$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
+  as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
+$as_echo_n "checking build system type... " >&6; }
+if ${ac_cv_build+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_build_alias=$build_alias
+test "x$ac_build_alias" = x &&
+  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
+test "x$ac_build_alias" = x &&
+  as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
+ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
+  as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
+$as_echo "$ac_cv_build" >&6; }
+case $ac_cv_build in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
+esac
+build=$ac_cv_build
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_build
+shift
+build_cpu=$1
+build_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+build_os=$*
+IFS=$ac_save_IFS
+case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
+$as_echo_n "checking host system type... " >&6; }
+if ${ac_cv_host+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test "x$host_alias" = x; then
+  ac_cv_host=$ac_cv_build
+else
+  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
+    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
+$as_echo "$ac_cv_host" >&6; }
+case $ac_cv_host in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
+esac
+host=$ac_cv_host
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_host
+shift
+host_cpu=$1
+host_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+host_os=$*
+IFS=$ac_save_IFS
+case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
+
+
+# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+
+ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+$as_echo_n "checking how to print strings... " >&6; }
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO ""
+}
+
+case "$ECHO" in
+  printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5
+$as_echo "printf" >&6; } ;;
+  print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5
+$as_echo "print -r" >&6; } ;;
+  *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5
+$as_echo "cat" >&6; } ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+DEPDIR="${am__leading_dot}deps"
+
+ac_config_commands="$ac_config_commands depfiles"
+
+
+am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo this is the am__doit target
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
+$as_echo_n "checking for style of include used by $am_make... " >&6; }
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# Ignore all kinds of additional output from `make'.
+case `$am_make -s -f confmf 2> /dev/null` in #(
+*the\ am__doit\ target*)
+  am__include=include
+  am__quote=
+  _am_result=GNU
+  ;;
+esac
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   case `$am_make -s -f confmf 2> /dev/null` in #(
+   *the\ am__doit\ target*)
+     am__include=.include
+     am__quote="\""
+     _am_result=BSD
+     ;;
+   esac
+fi
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
+$as_echo "$_am_result" >&6; }
+rm -f confinc confmf
+
+# Check whether --enable-dependency-tracking was given.
+if test "${enable_dependency_tracking+set}" = set; then :
+  enableval=$enable_dependency_tracking;
+fi
+
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+fi
+ if test "x$enable_dependency_tracking" != xno; then
+  AMDEP_TRUE=
+  AMDEP_FALSE='#'
+else
+  AMDEP_TRUE='#'
+  AMDEP_FALSE=
+fi
+
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_CC"; then
+  ac_ct_CC=$CC
+  # Extract the first word of "gcc", so it can be a program name with args.
+set dummy gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+else
+  CC="$ac_cv_prog_CC"
+fi
+
+if test -z "$CC"; then
+          if test -n "$ac_tool_prefix"; then
+    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  fi
+fi
+if test -z "$CC"; then
+  # Extract the first word of "cc", so it can be a program name with args.
+set dummy cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+  ac_prog_rejected=no
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
+       ac_prog_rejected=yes
+       continue
+     fi
+    ac_cv_prog_CC="cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+if test $ac_prog_rejected = yes; then
+  # We found a bogon in the path, so make sure we never use it.
+  set dummy $ac_cv_prog_CC
+  shift
+  if test $# != 0; then
+    # We chose a different compiler from the bogus one.
+    # However, it has the same basename, so the bogon will be chosen
+    # first if we set CC to just the basename; use the full file name.
+    shift
+    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
+  fi
+fi
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$CC"; then
+  if test -n "$ac_tool_prefix"; then
+  for ac_prog in cl.exe
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$CC" && break
+  done
+fi
+if test -z "$CC"; then
+  ac_ct_CC=$CC
+  for ac_prog in cl.exe
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_CC" && break
+done
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+fi
+
+fi
+
+
+test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "no acceptable C compiler found in \$PATH
+See \`config.log' for more details" "$LINENO" 5; }
+
+# Provide some information about the compiler.
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
+set X $ac_compile
+ac_compiler=$2
+for ac_option in --version -v -V -qversion; do
+  { { ac_try="$ac_compiler $ac_option >&5"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    sed '10a\
+... rest of stderr output deleted ...
+         10q' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+  fi
+  rm -f conftest.er1 conftest.err
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+done
+
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
+# Try to create an executable without -o first, disregard a.out.
+# It will help us diagnose broken compilers, and finding out an intuition
+# of exeext.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
+$as_echo_n "checking whether the C compiler works... " >&6; }
+ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
+
+# The possible output files:
+ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
+
+ac_rmfiles=
+for ac_file in $ac_files
+do
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
+  esac
+done
+rm -f $ac_rmfiles
+
+if { { ac_try="$ac_link_default"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link_default") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
+# in a Makefile.  We should not override ac_cv_exeext if it was cached,
+# so that the user can short-circuit this test for compilers unknown to
+# Autoconf.
+for ac_file in $ac_files ''
+do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
+	;;
+    [ab].out )
+	# We found the default executable, but exeext='' is most
+	# certainly right.
+	break;;
+    *.* )
+	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
+	then :; else
+	   ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	fi
+	# We set ac_cv_exeext here because the later test for it is not
+	# safe: cross compilers may not add the suffix if given an `-o'
+	# argument, so we may need to know it at that point already.
+	# Even if this section looks crufty: it has the advantage of
+	# actually working.
+	break;;
+    * )
+	break;;
+  esac
+done
+test "$ac_cv_exeext" = no && ac_cv_exeext=
+
+else
+  ac_file=''
+fi
+if test -z "$ac_file"; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+$as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error 77 "C compiler cannot create executables
+See \`config.log' for more details" "$LINENO" 5; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
+$as_echo_n "checking for C compiler default output file name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
+$as_echo "$ac_file" >&6; }
+ac_exeext=$ac_cv_exeext
+
+rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
+$as_echo_n "checking for suffix of executables... " >&6; }
+if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
+for ac_file in conftest.exe conftest conftest.*; do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	  break;;
+    * ) break;;
+  esac
+done
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest conftest$ac_cv_exeext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
+$as_echo "$ac_cv_exeext" >&6; }
+
+rm -f conftest.$ac_ext
+EXEEXT=$ac_cv_exeext
+ac_exeext=$EXEEXT
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdio.h>
+int
+main ()
+{
+FILE *f = fopen ("conftest.out", "w");
+ return ferror (f) || fclose (f) != 0;
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files="$ac_clean_files conftest.out"
+# Check that the compiler produces executables we can run.  If not, either
+# the compiler is broken, or we cross compile.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
+if test "$cross_compiling" != yes; then
+  { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+  if { ac_try='./conftest$ac_cv_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then
+    cross_compiling=no
+  else
+    if test "$cross_compiling" = maybe; then
+	cross_compiling=yes
+    else
+	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
+    fi
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
+
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
+$as_echo_n "checking for suffix of object files... " >&6; }
+if ${ac_cv_objext+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+rm -f conftest.o conftest.obj
+if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  for ac_file in conftest.o conftest.obj conftest.*; do
+  test -f "$ac_file" || continue;
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
+    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
+       break;;
+  esac
+done
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest.$ac_cv_objext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
+$as_echo "$ac_cv_objext" >&6; }
+OBJEXT=$ac_cv_objext
+ac_objext=$OBJEXT
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
+$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
+if ${ac_cv_c_compiler_gnu+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+#ifndef __GNUC__
+       choke me
+#endif
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_compiler_gnu=yes
+else
+  ac_compiler_gnu=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ac_cv_c_compiler_gnu=$ac_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
+$as_echo "$ac_cv_c_compiler_gnu" >&6; }
+if test $ac_compiler_gnu = yes; then
+  GCC=yes
+else
+  GCC=
+fi
+ac_test_CFLAGS=${CFLAGS+set}
+ac_save_CFLAGS=$CFLAGS
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
+$as_echo_n "checking whether $CC accepts -g... " >&6; }
+if ${ac_cv_prog_cc_g+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_save_c_werror_flag=$ac_c_werror_flag
+   ac_c_werror_flag=yes
+   ac_cv_prog_cc_g=no
+   CFLAGS="-g"
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+else
+  CFLAGS=""
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  ac_c_werror_flag=$ac_save_c_werror_flag
+	 CFLAGS="-g"
+	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+   ac_c_werror_flag=$ac_save_c_werror_flag
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
+$as_echo "$ac_cv_prog_cc_g" >&6; }
+if test "$ac_test_CFLAGS" = set; then
+  CFLAGS=$ac_save_CFLAGS
+elif test $ac_cv_prog_cc_g = yes; then
+  if test "$GCC" = yes; then
+    CFLAGS="-g -O2"
+  else
+    CFLAGS="-g"
+  fi
+else
+  if test "$GCC" = yes; then
+    CFLAGS="-O2"
+  else
+    CFLAGS=
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
+$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
+if ${ac_cv_prog_cc_c89+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_cv_prog_cc_c89=no
+ac_save_CC=$CC
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdarg.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+     char **p;
+     int i;
+{
+  return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+  char *s;
+  va_list v;
+  va_start (v,p);
+  s = g (p, va_arg (v,int));
+  va_end (v);
+  return s;
+}
+
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
+   function prototypes and stuff, but not '\xHH' hex character constants.
+   These don't provoke an error unfortunately, instead are silently treated
+   as 'x'.  The following induces an error, until -std is added to get
+   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
+   array size at least.  It's necessary to write '\x00'==0 to get something
+   that's true only with -std.  */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
+
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+   inside strings and character constants.  */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;
+int
+main ()
+{
+return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
+  ;
+  return 0;
+}
+_ACEOF
+for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
+	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
+do
+  CC="$ac_save_CC $ac_arg"
+  if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_c89=$ac_arg
+fi
+rm -f core conftest.err conftest.$ac_objext
+  test "x$ac_cv_prog_cc_c89" != "xno" && break
+done
+rm -f conftest.$ac_ext
+CC=$ac_save_CC
+
+fi
+# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_c89" in
+  x)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
+$as_echo "none needed" >&6; } ;;
+  xno)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
+$as_echo "unsupported" >&6; } ;;
+  *)
+    CC="$CC $ac_cv_prog_cc_c89"
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
+$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
+esac
+if test "x$ac_cv_prog_cc_c89" != xno; then :
+
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+depcc="$CC"   am_compiler_list=
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
+$as_echo_n "checking dependency style of $depcc... " >&6; }
+if ${am_cv_CC_dependencies_compiler_type+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named `D' -- because `-MD' means `put the output
+  # in D'.
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_CC_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
+  fi
+  am__universal=false
+  case " $depcc " in #(
+     *\ -arch\ *\ -arch\ *) am__universal=true ;;
+     esac
+
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
+      # Solaris 8's {/usr,}/bin/sh.
+      touch sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    # We check with `-c' and `-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle `-M -o', and we need to detect this.  Also, some Intel
+    # versions had trouble with output in subdirs
+    am__obj=sub/conftest.${OBJEXT-o}
+    am__minus_obj="-o $am__obj"
+    case $depmode in
+    gcc)
+      # This depmode causes a compiler race in universal mode.
+      test "$am__universal" = false || continue
+      ;;
+    nosideeffect)
+      # after this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    msvisualcpp | msvcmsys)
+      # This compiler won't grok `-c -o', but also, the minuso test has
+      # not run yet.  These depmodes are late enough in the game, and
+      # so weak that their functioning should not be impacted.
+      am__obj=conftest.${OBJEXT-o}
+      am__minus_obj=
+      ;;
+    none) break ;;
+    esac
+    if depmode=$depmode \
+       source=sub/conftest.c object=$am__obj \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_CC_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_CC_dependencies_compiler_type=none
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
+$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
+CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
+
+ if
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
+  am__fastdepCC_TRUE=
+  am__fastdepCC_FALSE='#'
+else
+  am__fastdepCC_TRUE='#'
+  am__fastdepCC_FALSE=
+fi
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
+$as_echo_n "checking for a sed that does not truncate output... " >&6; }
+if ${ac_cv_path_SED+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+            ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
+     for ac_i in 1 2 3 4 5 6 7; do
+       ac_script="$ac_script$as_nl$ac_script"
+     done
+     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
+     { ac_script=; unset ac_script;}
+     if test -z "$SED"; then
+  ac_path_SED_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
+# Check for GNU ac_path_SED and select it if it is found.
+  # Check for GNU $ac_path_SED
+case `"$ac_path_SED" --version 2>&1` in
+*GNU*)
+  ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo '' >> "conftest.nl"
+    "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_SED_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_SED="$ac_path_SED"
+      ac_path_SED_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_SED_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_SED"; then
+    as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
+  fi
+else
+  ac_cv_path_SED=$SED
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
+$as_echo "$ac_cv_path_SED" >&6; }
+ SED="$ac_cv_path_SED"
+  rm -f conftest.sed
+
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
+$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
+if ${ac_cv_path_GREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$GREP"; then
+  ac_path_GREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in grep ggrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
+# Check for GNU ac_path_GREP and select it if it is found.
+  # Check for GNU $ac_path_GREP
+case `"$ac_path_GREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'GREP' >> "conftest.nl"
+    "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_GREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_GREP="$ac_path_GREP"
+      ac_path_GREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_GREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_GREP"; then
+    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_GREP=$GREP
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
+$as_echo "$ac_cv_path_GREP" >&6; }
+ GREP="$ac_cv_path_GREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
+$as_echo_n "checking for egrep... " >&6; }
+if ${ac_cv_path_EGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
+   then ac_cv_path_EGREP="$GREP -E"
+   else
+     if test -z "$EGREP"; then
+  ac_path_EGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in egrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
+# Check for GNU ac_path_EGREP and select it if it is found.
+  # Check for GNU $ac_path_EGREP
+case `"$ac_path_EGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'EGREP' >> "conftest.nl"
+    "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_EGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_EGREP="$ac_path_EGREP"
+      ac_path_EGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_EGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_EGREP"; then
+    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_EGREP=$EGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
+$as_echo "$ac_cv_path_EGREP" >&6; }
+ EGREP="$ac_cv_path_EGREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
+$as_echo_n "checking for fgrep... " >&6; }
+if ${ac_cv_path_FGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
+   then ac_cv_path_FGREP="$GREP -F"
+   else
+     if test -z "$FGREP"; then
+  ac_path_FGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in fgrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
+# Check for GNU ac_path_FGREP and select it if it is found.
+  # Check for GNU $ac_path_FGREP
+case `"$ac_path_FGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'FGREP' >> "conftest.nl"
+    "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_FGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_FGREP="$ac_path_FGREP"
+      ac_path_FGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_FGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_FGREP"; then
+    as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_FGREP=$FGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
+$as_echo "$ac_cv_path_FGREP" >&6; }
+ FGREP="$ac_cv_path_FGREP"
+
+
+test -z "$GREP" && GREP=grep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-gnu-ld was given.
+if test "${with_gnu_ld+set}" = set; then :
+  withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
+else
+  with_gnu_ld=no
+fi
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
+$as_echo_n "checking for ld used by $CC... " >&6; }
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [\\/]* | ?:[\\/]*)
+      re_direlt='/[^/][^/]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
+$as_echo_n "checking for GNU ld... " >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
+$as_echo_n "checking for non-GNU ld... " >&6; }
+fi
+if ${lt_cv_path_LD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi
+fi
+
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
+$as_echo "$LD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
+$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
+if ${lt_cv_prog_gnu_ld+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  # I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
+$as_echo "$lt_cv_prog_gnu_ld" >&6; }
+with_gnu_ld=$lt_cv_prog_gnu_ld
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
+$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
+if ${lt_cv_path_NM+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
+$as_echo "$lt_cv_path_NM" >&6; }
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    if test -n "$ac_tool_prefix"; then
+  for ac_prog in dumpbin "link -dump"
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DUMPBIN"; then
+  ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DUMPBIN=$ac_cv_prog_DUMPBIN
+if test -n "$DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
+$as_echo "$DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$DUMPBIN" && break
+  done
+fi
+if test -z "$DUMPBIN"; then
+  ac_ct_DUMPBIN=$DUMPBIN
+  for ac_prog in dumpbin "link -dump"
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DUMPBIN"; then
+  ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
+if test -n "$ac_ct_DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
+$as_echo "$ac_ct_DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_DUMPBIN" && break
+done
+
+  if test "x$ac_ct_DUMPBIN" = x; then
+    DUMPBIN=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DUMPBIN=$ac_ct_DUMPBIN
+  fi
+fi
+
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
+$as_echo_n "checking the name lister ($NM) interface... " >&6; }
+if ${lt_cv_nm_interface+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: output\"" >&5)
+  cat conftest.out >&5
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
+$as_echo "$lt_cv_nm_interface" >&6; }
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
+$as_echo_n "checking whether ln -s works... " >&6; }
+LN_S=$as_ln_s
+if test "$LN_S" = "ln -s"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
+$as_echo "no, using $LN_S" >&6; }
+fi
+
+# find the maximum length of command line arguments
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
+$as_echo_n "checking the maximum length of command line arguments... " >&6; }
+if ${lt_cv_sys_max_cmd_len+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+    i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[	 ]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+
+fi
+
+if test -n $lt_cv_sys_max_cmd_len ; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
+$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
+$as_echo "none" >&6; }
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+
+
+
+
+
+
+: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
+$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,b/c, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
+$as_echo "$xsi_shell" >&6; }
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
+$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
+$as_echo "$lt_shell_append" >&6; }
+
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+
+
+
+
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
+if ${lt_cv_to_host_file_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+
+fi
+
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
+if ${lt_cv_to_tool_file_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  #assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+
+fi
+
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+$as_echo_n "checking for $LD option to reload object files... " >&6; }
+if ${lt_cv_ld_reload_flag+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_reload_flag='-r'
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
+$as_echo "$lt_cv_ld_reload_flag" >&6; }
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test "$GCC" != yes; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
+set dummy ${ac_tool_prefix}objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OBJDUMP"; then
+  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OBJDUMP=$ac_cv_prog_OBJDUMP
+if test -n "$OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
+$as_echo "$OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OBJDUMP"; then
+  ac_ct_OBJDUMP=$OBJDUMP
+  # Extract the first word of "objdump", so it can be a program name with args.
+set dummy objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OBJDUMP"; then
+  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OBJDUMP="objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
+if test -n "$ac_ct_OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
+$as_echo "$ac_ct_OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OBJDUMP" = x; then
+    OBJDUMP="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OBJDUMP=$ac_ct_OBJDUMP
+  fi
+else
+  OBJDUMP="$ac_cv_prog_OBJDUMP"
+fi
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
+$as_echo_n "checking how to recognize dependent libraries... " >&6; }
+if ${lt_cv_deplibs_check_method+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[4-9]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[45]*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[3-9]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+$as_echo "$lt_cv_deplibs_check_method" >&6; }
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DLLTOOL"; then
+  ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DLLTOOL=$ac_cv_prog_DLLTOOL
+if test -n "$DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
+$as_echo "$DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DLLTOOL"; then
+  ac_ct_DLLTOOL=$DLLTOOL
+  # Extract the first word of "dlltool", so it can be a program name with args.
+set dummy dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DLLTOOL"; then
+  ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DLLTOOL="dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
+if test -n "$ac_ct_DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
+$as_echo "$ac_ct_DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DLLTOOL" = x; then
+    DLLTOOL="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DLLTOOL=$ac_ct_DLLTOOL
+  fi
+else
+  DLLTOOL="$ac_cv_prog_DLLTOOL"
+fi
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh
+  # decide which to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+  ;;
+esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  for ac_prog in ar
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AR"; then
+  ac_cv_prog_AR="$AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AR=$ac_cv_prog_AR
+if test -n "$AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
+$as_echo "$AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$AR" && break
+  done
+fi
+if test -z "$AR"; then
+  ac_ct_AR=$AR
+  for ac_prog in ar
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_AR"; then
+  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_AR="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AR=$ac_cv_prog_ac_ct_AR
+if test -n "$ac_ct_AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
+$as_echo "$ac_ct_AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_AR" && break
+done
+
+  if test "x$ac_ct_AR" = x; then
+    AR="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    AR=$ac_ct_AR
+  fi
+fi
+
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
+$as_echo_n "checking for archiver @FILE support... " >&6; }
+if ${lt_cv_ar_at_file+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ar_at_file=no
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
+      { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
+  (eval $lt_ar_try) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+      if test "$ac_status" -eq 0; then
+	# Ensure the archiver fails upon bogus file names.
+	rm -f conftest.$ac_objext libconftest.a
+	{ { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
+  (eval $lt_ar_try) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+	if test "$ac_status" -ne 0; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
+$as_echo "$lt_cv_ar_at_file" >&6; }
+
+if test "x$lt_cv_ar_at_file" = xno; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+test -z "$STRIP" && STRIP=:
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$RANLIB"; then
+  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+RANLIB=$ac_cv_prog_RANLIB
+if test -n "$RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
+$as_echo "$RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_RANLIB"; then
+  ac_ct_RANLIB=$RANLIB
+  # Extract the first word of "ranlib", so it can be a program name with args.
+set dummy ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_RANLIB"; then
+  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_RANLIB="ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
+if test -n "$ac_ct_RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
+$as_echo "$ac_ct_RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_RANLIB" = x; then
+    RANLIB=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    RANLIB=$ac_ct_RANLIB
+  fi
+else
+  RANLIB="$ac_cv_prog_RANLIB"
+fi
+
+test -z "$RANLIB" && RANLIB=:
+
+
+
+
+
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
+$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
+if ${lt_cv_sys_global_symbol_pipe+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[BCDEGRST]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[BCDT]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[ABCDGISTW]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[ABCDEGRST]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[BCDEGRST]'
+  ;;
+osf*)
+  symcode='[BCDEGQRST]'
+  ;;
+solaris*)
+  symcode='[BDRT]'
+  ;;
+sco3.2v5*)
+  symcode='[DT]'
+  ;;
+sysv4.2uw2*)
+  symcode='[DT]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[ABDT]'
+  ;;
+sysv4)
+  symcode='[DFNSTU]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[ABCDGIRSTW]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK '"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[	 ]\($symcode$symcode*\)[	 ][	 ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5
+  (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_globsym_save_LIBS=$LIBS
+	  lt_globsym_save_CFLAGS=$CFLAGS
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS=$lt_globsym_save_LIBS
+	  CFLAGS=$lt_globsym_save_CFLAGS
+	else
+	  echo "cannot find nm_test_func in $nlist" >&5
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&5
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
+    fi
+  else
+    echo "$progname: failed program was:" >&5
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+
+fi
+
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
+$as_echo "failed" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
+$as_echo "ok" >&6; }
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
+$as_echo_n "checking for sysroot... " >&6; }
+
+# Check whether --with-sysroot was given.
+if test "${with_sysroot+set}" = set; then :
+  withval=$with_sysroot;
+else
+  with_sysroot=no
+fi
+
+
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+   if test "$GCC" = yes; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5
+$as_echo "${with_sysroot}" >&6; }
+   as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
+   ;;
+esac
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
+$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+
+
+# Check whether --enable-libtool-lock was given.
+if test "${enable_libtool_lock+set}" = set; then :
+  enableval=$enable_libtool_lock;
+fi
+
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '#line '$LINENO' "configure"' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
+$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
+if ${lt_cv_cc_needs_belf+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+     cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_cc_needs_belf=yes
+else
+  lt_cv_cc_needs_belf=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+     ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
+$as_echo "$lt_cv_cc_needs_belf" >&6; }
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD="${LD-ld}_sol2"
+        fi
+        ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
+set dummy ${ac_tool_prefix}mt; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$MANIFEST_TOOL"; then
+  ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
+if test -n "$MANIFEST_TOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
+$as_echo "$MANIFEST_TOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
+  ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
+  # Extract the first word of "mt", so it can be a program name with args.
+set dummy mt; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_MANIFEST_TOOL"; then
+  ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
+if test -n "$ac_ct_MANIFEST_TOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_MANIFEST_TOOL" = x; then
+    MANIFEST_TOOL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
+  fi
+else
+  MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
+fi
+
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
+if ${lt_cv_path_mainfest_tool+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&5
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+  MANIFEST_TOOL=:
+fi
+
+
+
+
+
+
+  case $host_os in
+    rhapsody* | darwin*)
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DSYMUTIL"; then
+  ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DSYMUTIL=$ac_cv_prog_DSYMUTIL
+if test -n "$DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
+$as_echo "$DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DSYMUTIL"; then
+  ac_ct_DSYMUTIL=$DSYMUTIL
+  # Extract the first word of "dsymutil", so it can be a program name with args.
+set dummy dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DSYMUTIL"; then
+  ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
+if test -n "$ac_ct_DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
+$as_echo "$ac_ct_DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DSYMUTIL" = x; then
+    DSYMUTIL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DSYMUTIL=$ac_ct_DSYMUTIL
+  fi
+else
+  DSYMUTIL="$ac_cv_prog_DSYMUTIL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
+set dummy ${ac_tool_prefix}nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NMEDIT"; then
+  ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+NMEDIT=$ac_cv_prog_NMEDIT
+if test -n "$NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
+$as_echo "$NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_NMEDIT"; then
+  ac_ct_NMEDIT=$NMEDIT
+  # Extract the first word of "nmedit", so it can be a program name with args.
+set dummy nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_NMEDIT"; then
+  ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_NMEDIT="nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
+if test -n "$ac_ct_NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
+$as_echo "$ac_ct_NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_NMEDIT" = x; then
+    NMEDIT=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    NMEDIT=$ac_ct_NMEDIT
+  fi
+else
+  NMEDIT="$ac_cv_prog_NMEDIT"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
+set dummy ${ac_tool_prefix}lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$LIPO"; then
+  ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+LIPO=$ac_cv_prog_LIPO
+if test -n "$LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
+$as_echo "$LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_LIPO"; then
+  ac_ct_LIPO=$LIPO
+  # Extract the first word of "lipo", so it can be a program name with args.
+set dummy lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_LIPO"; then
+  ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_LIPO="lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
+if test -n "$ac_ct_LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
+$as_echo "$ac_ct_LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_LIPO" = x; then
+    LIPO=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    LIPO=$ac_ct_LIPO
+  fi
+else
+  LIPO="$ac_cv_prog_LIPO"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL"; then
+  ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL=$ac_cv_prog_OTOOL
+if test -n "$OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
+$as_echo "$OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL"; then
+  ac_ct_OTOOL=$OTOOL
+  # Extract the first word of "otool", so it can be a program name with args.
+set dummy otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL"; then
+  ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL="otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
+if test -n "$ac_ct_OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
+$as_echo "$ac_ct_OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL" = x; then
+    OTOOL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL=$ac_ct_OTOOL
+  fi
+else
+  OTOOL="$ac_cv_prog_OTOOL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL64"; then
+  ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL64=$ac_cv_prog_OTOOL64
+if test -n "$OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
+$as_echo "$OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL64"; then
+  ac_ct_OTOOL64=$OTOOL64
+  # Extract the first word of "otool64", so it can be a program name with args.
+set dummy otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL64"; then
+  ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL64="otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
+if test -n "$ac_ct_OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
+$as_echo "$ac_ct_OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL64" = x; then
+    OTOOL64=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL64=$ac_ct_OTOOL64
+  fi
+else
+  OTOOL64="$ac_cv_prog_OTOOL64"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
+$as_echo_n "checking for -single_module linker flag... " >&6; }
+if ${lt_cv_apple_cc_single_mod+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&5
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	# If there is a non-empty error log, and "single_module"
+	# appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+	  cat conftest.err >&5
+	# Otherwise, if the output was created with a 0 exit code from
+	# the compiler, it worked.
+	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&5
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
+$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
+$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
+if ${lt_cv_ld_exported_symbols_list+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_ld_exported_symbols_list=yes
+else
+  lt_cv_ld_exported_symbols_list=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+	LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
+$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5
+$as_echo_n "checking for -force_load linker flag... " >&6; }
+if ${lt_cv_ld_force_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+      echo "$AR cru libconftest.a conftest.o" >&5
+      $AR cru libconftest.a conftest.o 2>&5
+      echo "$RANLIB libconftest.a" >&5
+      $RANLIB libconftest.a 2>&5
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+	cat conftest.err >&5
+      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&5
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
+$as_echo "$lt_cv_ld_force_load" >&6; }
+    case $host_os in
+    rhapsody* | darwin1.[012])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[91]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[012]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
+$as_echo_n "checking how to run the C preprocessor... " >&6; }
+# On Suns, sometimes $CPP names a directory.
+if test -n "$CPP" && test -d "$CPP"; then
+  CPP=
+fi
+if test -z "$CPP"; then
+  if ${ac_cv_prog_CPP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+      # Double quotes because CPP needs to be expanded
+    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
+    do
+      ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+  break
+fi
+
+    done
+    ac_cv_prog_CPP=$CPP
+
+fi
+  CPP=$ac_cv_prog_CPP
+else
+  ac_cv_prog_CPP=$CPP
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
+$as_echo "$CPP" >&6; }
+ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
+$as_echo_n "checking for ANSI C header files... " >&6; }
+if ${ac_cv_header_stdc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <float.h>
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_header_stdc=yes
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+if test $ac_cv_header_stdc = yes; then
+  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <string.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "memchr" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "free" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+  if test "$cross_compiling" = yes; then :
+  :
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ctype.h>
+#include <stdlib.h>
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+		   (('a' <= (c) && (c) <= 'i') \
+		     || ('j' <= (c) && (c) <= 'r') \
+		     || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+  int i;
+  for (i = 0; i < 256; i++)
+    if (XOR (islower (i), ISLOWER (i))
+	|| toupper (i) != TOUPPER (i))
+      return 2;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
+$as_echo "$ac_cv_header_stdc" >&6; }
+if test $ac_cv_header_stdc = yes; then
+
+$as_echo "#define STDC_HEADERS 1" >>confdefs.h
+
+fi
+
+# On IRIX 5.3, sys/types and inttypes.h are conflicting.
+for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
+		  inttypes.h stdint.h unistd.h
+do :
+  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
+"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
+_ACEOF
+
+fi
+
+done
+
+
+for ac_header in dlfcn.h
+do :
+  ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
+"
+if test "x$ac_cv_header_dlfcn_h" = xyes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_DLFCN_H 1
+_ACEOF
+
+fi
+
+done
+
+
+
+
+
+# Set options
+enable_dlopen=yes
+enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}as", so it can be a program name with args.
+set dummy ${ac_tool_prefix}as; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AS+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AS"; then
+  ac_cv_prog_AS="$AS" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AS="${ac_tool_prefix}as"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AS=$ac_cv_prog_AS
+if test -n "$AS"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AS" >&5
+$as_echo "$AS" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_AS"; then
+  ac_ct_AS=$AS
+  # Extract the first word of "as", so it can be a program name with args.
+set dummy as; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AS+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_AS"; then
+  ac_cv_prog_ac_ct_AS="$ac_ct_AS" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_AS="as"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AS=$ac_cv_prog_ac_ct_AS
+if test -n "$ac_ct_AS"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AS" >&5
+$as_echo "$ac_ct_AS" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_AS" = x; then
+    AS="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    AS=$ac_ct_AS
+  fi
+else
+  AS="$ac_cv_prog_AS"
+fi
+
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DLLTOOL"; then
+  ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DLLTOOL=$ac_cv_prog_DLLTOOL
+if test -n "$DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
+$as_echo "$DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DLLTOOL"; then
+  ac_ct_DLLTOOL=$DLLTOOL
+  # Extract the first word of "dlltool", so it can be a program name with args.
+set dummy dlltool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DLLTOOL"; then
+  ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DLLTOOL="dlltool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
+if test -n "$ac_ct_DLLTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
+$as_echo "$ac_ct_DLLTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DLLTOOL" = x; then
+    DLLTOOL="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DLLTOOL=$ac_ct_DLLTOOL
+  fi
+else
+  DLLTOOL="$ac_cv_prog_DLLTOOL"
+fi
+
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
+set dummy ${ac_tool_prefix}objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OBJDUMP"; then
+  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OBJDUMP=$ac_cv_prog_OBJDUMP
+if test -n "$OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
+$as_echo "$OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OBJDUMP"; then
+  ac_ct_OBJDUMP=$OBJDUMP
+  # Extract the first word of "objdump", so it can be a program name with args.
+set dummy objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OBJDUMP"; then
+  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OBJDUMP="objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
+if test -n "$ac_ct_OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
+$as_echo "$ac_ct_OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OBJDUMP" = x; then
+    OBJDUMP="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OBJDUMP=$ac_ct_OBJDUMP
+  fi
+else
+  OBJDUMP="$ac_cv_prog_OBJDUMP"
+fi
+
+  ;;
+esac
+
+test -z "$AS" && AS=as
+
+
+
+
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+
+
+
+
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+
+
+
+
+
+
+
+
+
+            # Check whether --enable-shared was given.
+if test "${enable_shared+set}" = set; then :
+  enableval=$enable_shared; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_shared=yes
+fi
+
+
+
+
+
+
+
+
+
+  # Check whether --enable-static was given.
+if test "${enable_static+set}" = set; then :
+  enableval=$enable_static; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_static=yes
+fi
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-pic was given.
+if test "${with_pic+set}" = set; then :
+  withval=$with_pic; lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for lt_pkg in $withval; do
+	IFS="$lt_save_ifs"
+	if test "X$lt_pkg" = "X$lt_p"; then
+	  pic_mode=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  pic_mode=default
+fi
+
+
+test -z "$pic_mode" && pic_mode=default
+
+
+
+
+
+
+
+  # Check whether --enable-fast-install was given.
+if test "${enable_fast_install+set}" = set; then :
+  enableval=$enable_fast_install; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_fast_install=yes
+fi
+
+
+
+
+
+
+
+
+
+
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+test -z "$LN_S" && LN_S="ln -s"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
+$as_echo_n "checking for objdir... " >&6; }
+if ${lt_cv_objdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
+$as_echo "$lt_cv_objdir" >&6; }
+objdir=$lt_cv_objdir
+
+
+
+
+
+cat >>confdefs.h <<_ACEOF
+#define LT_OBJDIR "$lt_cv_objdir/"
+_ACEOF
+
+
+
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+for cc_temp in $compiler""; do
+  case $cc_temp in
+    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
+    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
+$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/${ac_tool_prefix}file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+
+
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
+$as_echo_n "checking for file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  else
+    MAGIC_CMD=:
+  fi
+fi
+
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+
+lt_save_CC="$CC"
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+objext=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+
+lt_prog_compiler_no_builtin_flag=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;;
+  *)
+    lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;;
+  esac
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
+$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
+if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_rtti_exceptions=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="-fno-rtti -fno-exceptions"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_rtti_exceptions=yes
+     fi
+   fi
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
+$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
+
+if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
+    lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
+else
+    :
+fi
+
+fi
+
+
+
+
+
+
+  lt_prog_compiler_wl=
+lt_prog_compiler_pic=
+lt_prog_compiler_static=
+
+
+  if test "$GCC" = yes; then
+    lt_prog_compiler_wl='-Wl,'
+    lt_prog_compiler_static='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            lt_prog_compiler_pic='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      lt_prog_compiler_pic='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      lt_prog_compiler_static=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[3-9]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      lt_prog_compiler_can_build_shared=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	lt_prog_compiler_pic=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      lt_prog_compiler_pic='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      lt_prog_compiler_wl='-Xlinker '
+      if test -n "$lt_prog_compiler_pic"; then
+        lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      lt_prog_compiler_wl='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      else
+	lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      lt_prog_compiler_static='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC (with -KPIC) is the default.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-KPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='--shared'
+	lt_prog_compiler_static='--static'
+	;;
+      nagfor*)
+	# NAG Fortran compiler
+	lt_prog_compiler_wl='-Wl,-Wl,,'
+	lt_prog_compiler_pic='-PIC'
+	lt_prog_compiler_static='-Bstatic'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fpic'
+	lt_prog_compiler_static='-Bstatic'
+        ;;
+      ccc*)
+        lt_prog_compiler_wl='-Wl,'
+        # All Alpha code is PIC.
+        lt_prog_compiler_static='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-qpic'
+	lt_prog_compiler_static='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl=''
+	  ;;
+	*Sun\ F* | *Sun*Fortran*)
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl='-Qoption ld '
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl='-Wl,'
+	  ;;
+        *Intel*\ [CF]*Compiler*)
+	  lt_prog_compiler_wl='-Wl,'
+	  lt_prog_compiler_pic='-fPIC'
+	  lt_prog_compiler_static='-static'
+	  ;;
+	*Portland\ Group*)
+	  lt_prog_compiler_wl='-Wl,'
+	  lt_prog_compiler_pic='-fpic'
+	  lt_prog_compiler_static='-Bstatic'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      lt_prog_compiler_wl='-Wl,'
+      # All OSF/1 code is PIC.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    rdos*)
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    solaris*)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	lt_prog_compiler_wl='-Qoption ld ';;
+      *)
+	lt_prog_compiler_wl='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      lt_prog_compiler_wl='-Qoption ld '
+      lt_prog_compiler_pic='-PIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	lt_prog_compiler_pic='-Kconform_pic'
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    unicos*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_can_build_shared=no
+      ;;
+
+    uts4*)
+      lt_prog_compiler_pic='-pic'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *)
+      lt_prog_compiler_can_build_shared=no
+      ;;
+    esac
+  fi
+
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    lt_prog_compiler_pic=
+    ;;
+  *)
+    lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+    ;;
+esac
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+if ${lt_cv_prog_compiler_pic+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$lt_prog_compiler_pic"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
+$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
+if ${lt_cv_prog_compiler_pic_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_pic_works=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_pic_works=yes
+     fi
+   fi
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
+$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
+    case $lt_prog_compiler_pic in
+     "" | " "*) ;;
+     *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
+     esac
+else
+    lt_prog_compiler_pic=
+     lt_prog_compiler_can_build_shared=no
+fi
+
+fi
+
+
+
+
+
+
+
+
+
+
+
+#
+# Check to make sure the static flag actually works.
+#
+wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
+$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
+if ${lt_cv_prog_compiler_static_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_static_works=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler_static_works=yes
+       fi
+     else
+       lt_cv_prog_compiler_static_works=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
+$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_static_works" = xyes; then
+    :
+else
+    lt_prog_compiler_static=
+fi
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+hard_links="nottested"
+if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
+$as_echo_n "checking if we can lock with hard links... " >&6; }
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
+$as_echo "$hard_links" >&6; }
+  if test "$hard_links" = no; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
+$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+  runpath_var=
+  allow_undefined_flag=
+  always_export_symbols=no
+  archive_cmds=
+  archive_expsym_cmds=
+  compiler_needs_object=no
+  enable_shared_with_static_runtimes=no
+  export_dynamic_flag_spec=
+  export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  hardcode_automatic=no
+  hardcode_direct=no
+  hardcode_direct_absolute=no
+  hardcode_libdir_flag_spec=
+  hardcode_libdir_separator=
+  hardcode_minus_L=no
+  hardcode_shlibpath_var=unsupported
+  inherit_rpath=no
+  link_all_deplibs=unknown
+  module_cmds=
+  module_expsym_cmds=
+  old_archive_from_new_cmds=
+  old_archive_from_expsyms_cmds=
+  thread_safe_flag_spec=
+  whole_archive_flag_spec=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  include_expsyms=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  ld_shlibs=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
+	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+    export_dynamic_flag_spec='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      whole_archive_flag_spec=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[3-9]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	allow_undefined_flag=unsupported
+	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
+      # as there is no search path for DLLs.
+      hardcode_libdir_flag_spec='-L$libdir'
+      export_dynamic_flag_spec='${wl}--export-all-symbols'
+      allow_undefined_flag=unsupported
+      always_export_symbols=no
+      enable_shared_with_static_runtimes=yes
+      export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
+      exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    haiku*)
+      archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      link_all_deplibs=yes
+      ;;
+
+    interix[3-9]*)
+      hardcode_direct=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+      export_dynamic_flag_spec='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=' $pic_flag'
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  whole_archive_flag_spec=
+	  tmp_sharedflag='--shared' ;;
+	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+	  hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+	  archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        ld_shlibs=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+	    archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    ld_shlibs=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+    esac
+
+    if test "$ld_shlibs" = no; then
+      runpath_var=
+      hardcode_libdir_flag_spec=
+      export_dynamic_flag_spec=
+      whole_archive_flag_spec=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      allow_undefined_flag=unsupported
+      always_export_symbols=yes
+      archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      hardcode_minus_L=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	hardcode_direct=unsupported
+      fi
+      ;;
+
+    aix[4-9]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      archive_cmds=''
+      hardcode_direct=yes
+      hardcode_direct_absolute=yes
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      file_list_spec='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[012]|aix4.[012].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  hardcode_direct=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  hardcode_minus_L=yes
+	  hardcode_libdir_flag_spec='-L$libdir'
+	  hardcode_libdir_separator=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      export_dynamic_flag_spec='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      always_export_symbols=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	allow_undefined_flag='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  if ${lt_cv_aix_libpath_+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+  lt_aix_libpath_sed='
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }'
+  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_="/usr/lib:/lib"
+  fi
+
+fi
+
+  aix_libpath=$lt_cv_aix_libpath_
+fi
+
+        hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+        archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
+	  allow_undefined_flag="-z nodefs"
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 if test "${lt_cv_aix_libpath+set}" = set; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  if ${lt_cv_aix_libpath_+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+  lt_aix_libpath_sed='
+      /Import File Strings/,/^$/ {
+	  /^0/ {
+	      s/^0  *\([^ ]*\) *$/\1/
+	      p
+	  }
+      }'
+  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+  if test -z "$lt_cv_aix_libpath_"; then
+    lt_cv_aix_libpath_="/usr/lib:/lib"
+  fi
+
+fi
+
+  aix_libpath=$lt_cv_aix_libpath_
+fi
+
+	 hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  no_undefined_flag=' ${wl}-bernotok'
+	  allow_undefined_flag=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    whole_archive_flag_spec='$convenience'
+	  fi
+	  archive_cmds_need_lc=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[45]*)
+      export_dynamic_flag_spec=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+	# Native MSVC
+	hardcode_libdir_flag_spec=' '
+	allow_undefined_flag=unsupported
+	always_export_symbols=yes
+	file_list_spec='@'
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+	  else
+	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+	  fi~
+	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+	  linknames='
+	# The linker will not automatically build a static lib if we build a DLL.
+	# _LT_TAGVAR(old_archive_from_new_cmds, )='true'
+	enable_shared_with_static_runtimes=yes
+	exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+	export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
+	# Don't use ranlib
+	old_postinstall_cmds='chmod 644 $oldlib'
+	postlink_cmds='lt_outputfile="@OUTPUT@"~
+	  lt_tool_outputfile="@TOOL_OUTPUT@"~
+	  case $lt_outputfile in
+	    *.exe|*.EXE) ;;
+	    *)
+	      lt_outputfile="$lt_outputfile.exe"
+	      lt_tool_outputfile="$lt_tool_outputfile.exe"
+	      ;;
+	  esac~
+	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+	    $RM "$lt_outputfile.manifest";
+	  fi'
+	;;
+      *)
+	# Assume MSVC wrapper
+	hardcode_libdir_flag_spec=' '
+	allow_undefined_flag=unsupported
+	# Tell ltmain to make .lib files, not .a files.
+	libext=lib
+	# Tell ltmain to make .dll files, not .so files.
+	shrext_cmds=".dll"
+	# FIXME: Setting linknames here is a bad hack.
+	archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+	# The linker will automatically build a .lib file if we build a DLL.
+	old_archive_from_new_cmds='true'
+	# FIXME: Should let the user specify the lib program.
+	old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+	enable_shared_with_static_runtimes=yes
+	;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+
+
+  archive_cmds_need_lc=no
+  hardcode_direct=no
+  hardcode_automatic=yes
+  hardcode_shlibpath_var=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    whole_archive_flag_spec='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+
+  else
+    whole_archive_flag_spec=''
+  fi
+  link_all_deplibs=yes
+  allow_undefined_flag="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+
+  else
+  ld_shlibs=no
+  fi
+
+      ;;
+
+    dgux*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_direct=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      hardcode_minus_L=yes
+      export_dynamic_flag_spec='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_separator=:
+	hardcode_direct=yes
+	hardcode_direct_absolute=yes
+	export_dynamic_flag_spec='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	hardcode_minus_L=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5
+$as_echo_n "checking if $CC understands -b... " >&6; }
+if ${lt_cv_prog_compiler__b+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler__b=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS -b"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler__b=yes
+       fi
+     else
+       lt_cv_prog_compiler__b=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5
+$as_echo "$lt_cv_prog_compiler__b" >&6; }
+
+if test x"$lt_cv_prog_compiler__b" = xyes; then
+    archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+else
+    archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+fi
+
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_separator=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  hardcode_direct=no
+	  hardcode_shlibpath_var=no
+	  ;;
+	*)
+	  hardcode_direct=yes
+	  hardcode_direct_absolute=yes
+	  export_dynamic_flag_spec='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  hardcode_minus_L=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+	# This should be the same for all languages, so no per-tag cache variable.
+	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
+if ${lt_cv_irix_exported_symbol+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  save_LDFLAGS="$LDFLAGS"
+	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+	   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+int foo (void) { return 0; }
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_irix_exported_symbol=yes
+else
+  lt_cv_irix_exported_symbol=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+           LDFLAGS="$save_LDFLAGS"
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
+	if test "$lt_cv_irix_exported_symbol" = yes; then
+          archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+	fi
+      else
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      inherit_rpath=yes
+      link_all_deplibs=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    newsos6)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_shlibpath_var=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	hardcode_direct=yes
+	hardcode_shlibpath_var=no
+	hardcode_direct_absolute=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	  export_dynamic_flag_spec='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
+	     archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     hardcode_libdir_flag_spec='-R$libdir'
+	     ;;
+	   *)
+	     archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    os2*)
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_minus_L=yes
+      allow_undefined_flag=unsupported
+      archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	hardcode_libdir_flag_spec='-rpath $libdir'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_separator=:
+      ;;
+
+    solaris*)
+      no_undefined_flag=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_shlibpath_var=no
+      case $host_os in
+      solaris2.[0-5] | solaris2.[0-5].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      link_all_deplibs=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  reload_cmds='$CC -r -o $output$reload_objs'
+	  hardcode_direct=no
+        ;;
+	motorola)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4.3*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_shlibpath_var=no
+      export_dynamic_flag_spec='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	hardcode_shlibpath_var=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	ld_shlibs=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
+      no_undefined_flag='${wl}-z,text'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      no_undefined_flag='${wl}-z,text'
+      allow_undefined_flag='${wl}-z,nodefs'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-R,$libdir'
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      export_dynamic_flag_spec='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      ld_shlibs=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	export_dynamic_flag_spec='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
+$as_echo "$ld_shlibs" >&6; }
+test "$ld_shlibs" = no && can_build_shared=no
+
+with_gnu_ld=$with_gnu_ld
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$archive_cmds_need_lc" in
+x|xyes)
+  # Assume -lc should be added
+  archive_cmds_need_lc=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $archive_cmds in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
+$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
+if ${lt_cv_archive_cmds_need_lc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  $RM conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$lt_prog_compiler_wl
+	  pic_flag=$lt_prog_compiler_pic
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$allow_undefined_flag
+	  allow_undefined_flag=
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
+  (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+	  then
+	    lt_cv_archive_cmds_need_lc=no
+	  else
+	    lt_cv_archive_cmds_need_lc=yes
+	  fi
+	  allow_undefined_flag=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5
+$as_echo "$lt_cv_archive_cmds_need_lc" >&6; }
+      archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
+$as_echo_n "checking dynamic linker characteristics... " >&6; }
+
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[lt_foo]++; }
+  if (lt_freq[lt_foo] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([A-Za-z]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[4-9]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[01] | aix4.[01].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[45]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+    library_names_spec='${libname}.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec="$LIB"
+      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[23].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[01]* | freebsdelf3.[01]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
+  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[3-9]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux # correct to gnu/linux during the next big refactor
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
+	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
+    cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
+  lt_cv_shlibpath_overrides_runpath=yes
+fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+
+fi
+
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[89] | openbsd2.[89].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
+$as_echo "$dynamic_linker" >&6; }
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
+$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
+hardcode_action=
+if test -n "$hardcode_libdir_flag_spec" ||
+   test -n "$runpath_var" ||
+   test "X$hardcode_automatic" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$hardcode_direct" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
+     test "$hardcode_minus_L" != no; then
+    # Linking always hardcodes the temporary library directory.
+    hardcode_action=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    hardcode_action=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  hardcode_action=unsupported
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
+$as_echo "$hardcode_action" >&6; }
+
+if test "$hardcode_action" = relink ||
+   test "$inherit_rpath" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+
+
+
+
+
+
+  if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+
+fi
+
+    ;;
+
+  *)
+    ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
+if test "x$ac_cv_func_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
+$as_echo_n "checking for shl_load in -ldld... " >&6; }
+if ${ac_cv_lib_dld_shl_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char shl_load ();
+int
+main ()
+{
+return shl_load ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_shl_load=yes
+else
+  ac_cv_lib_dld_shl_load=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
+$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
+if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
+else
+  ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
+if test "x$ac_cv_func_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
+$as_echo_n "checking for dlopen in -lsvld... " >&6; }
+if ${ac_cv_lib_svld_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-lsvld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_svld_dlopen=yes
+else
+  ac_cv_lib_svld_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
+$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
+if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
+$as_echo_n "checking for dld_link in -ldld... " >&6; }
+if ${ac_cv_lib_dld_dld_link+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dld_link ();
+int
+main ()
+{
+return dld_link ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_dld_link=yes
+else
+  ac_cv_lib_dld_dld_link=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
+$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
+if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
+  lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
+$as_echo_n "checking whether a program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
+$as_echo "$lt_cv_dlopen_self" >&6; }
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
+$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self_static+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self_static=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self_static=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
+$as_echo "$lt_cv_dlopen_self_static" >&6; }
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+striplib=
+old_striplib=
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
+$as_echo_n "checking whether stripping libraries is possible... " >&6; }
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+    else
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    fi
+    ;;
+  *)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+  # Report which library types will actually be built
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
+$as_echo_n "checking if libtool supports shared libraries... " >&6; }
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
+$as_echo "$can_build_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
+$as_echo_n "checking whether to build shared libraries... " >&6; }
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[4-9]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
+$as_echo "$enable_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
+$as_echo_n "checking whether to build static libraries... " >&6; }
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
+$as_echo "$enable_static" >&6; }
+
+
+
+
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+CC="$lt_save_CC"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+        ac_config_commands="$ac_config_commands libtool"
+
+
+
+
+# Only expand once:
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking which extension is used for runtime loadable modules" >&5
+$as_echo_n "checking which extension is used for runtime loadable modules... " >&6; }
+if ${libltdl_cv_shlibext+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+module=yes
+eval libltdl_cv_shlibext=$shrext_cmds
+module=no
+eval libltdl_cv_shrext=$shrext_cmds
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libltdl_cv_shlibext" >&5
+$as_echo "$libltdl_cv_shlibext" >&6; }
+if test -n "$libltdl_cv_shlibext"; then
+
+cat >>confdefs.h <<_ACEOF
+#define LT_MODULE_EXT "$libltdl_cv_shlibext"
+_ACEOF
+
+fi
+if test "$libltdl_cv_shrext" != "$libltdl_cv_shlibext"; then
+
+cat >>confdefs.h <<_ACEOF
+#define LT_SHARED_EXT "$libltdl_cv_shrext"
+_ACEOF
+
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking which variable specifies run-time module search path" >&5
+$as_echo_n "checking which variable specifies run-time module search path... " >&6; }
+if ${lt_cv_module_path_var+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_module_path_var="$shlibpath_var"
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_module_path_var" >&5
+$as_echo "$lt_cv_module_path_var" >&6; }
+if test -n "$lt_cv_module_path_var"; then
+
+cat >>confdefs.h <<_ACEOF
+#define LT_MODULE_PATH_VAR "$lt_cv_module_path_var"
+_ACEOF
+
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for the default library search path" >&5
+$as_echo_n "checking for the default library search path... " >&6; }
+if ${lt_cv_sys_dlsearch_path+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_sys_dlsearch_path="$sys_lib_dlsearch_path_spec"
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_dlsearch_path" >&5
+$as_echo "$lt_cv_sys_dlsearch_path" >&6; }
+if test -n "$lt_cv_sys_dlsearch_path"; then
+  sys_dlsearch_path=
+  for dir in $lt_cv_sys_dlsearch_path; do
+    if test -z "$sys_dlsearch_path"; then
+      sys_dlsearch_path="$dir"
+    else
+      sys_dlsearch_path="$sys_dlsearch_path$PATH_SEPARATOR$dir"
+    fi
+  done
+
+cat >>confdefs.h <<_ACEOF
+#define LT_DLSEARCH_PATH "$sys_dlsearch_path"
+_ACEOF
+
+fi
+
+
+LT_DLLOADERS=
+
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+LIBADD_DLOPEN=
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing dlopen" >&5
+$as_echo_n "checking for library containing dlopen... " >&6; }
+if ${ac_cv_search_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_func_search_save_LIBS=$LIBS
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+for ac_lib in '' dl; do
+  if test -z "$ac_lib"; then
+    ac_res="none required"
+  else
+    ac_res=-l$ac_lib
+    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
+  fi
+  if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_search_dlopen=$ac_res
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext
+  if ${ac_cv_search_dlopen+:} false; then :
+  break
+fi
+done
+if ${ac_cv_search_dlopen+:} false; then :
+
+else
+  ac_cv_search_dlopen=no
+fi
+rm conftest.$ac_ext
+LIBS=$ac_func_search_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_dlopen" >&5
+$as_echo "$ac_cv_search_dlopen" >&6; }
+ac_res=$ac_cv_search_dlopen
+if test "$ac_res" != no; then :
+  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
+
+$as_echo "#define HAVE_LIBDL 1" >>confdefs.h
+
+	if test "$ac_cv_search_dlopen" != "none required" ; then
+	  LIBADD_DLOPEN="-ldl"
+	fi
+	libltdl_cv_lib_dl_dlopen="yes"
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#if HAVE_DLFCN_H
+#  include <dlfcn.h>
+#endif
+
+int
+main ()
+{
+dlopen(0, 0);
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+$as_echo "#define HAVE_LIBDL 1" >>confdefs.h
+
+	    libltdl_cv_func_dlopen="yes"
+	    LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
+$as_echo_n "checking for dlopen in -lsvld... " >&6; }
+if ${ac_cv_lib_svld_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-lsvld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_svld_dlopen=yes
+else
+  ac_cv_lib_svld_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
+$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
+if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
+
+$as_echo "#define HAVE_LIBDL 1" >>confdefs.h
+
+	        LIBADD_DLOPEN="-lsvld" libltdl_cv_func_dlopen="yes"
+		LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dlopen.la"
+fi
+
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+fi
+
+if test x"$libltdl_cv_func_dlopen" = xyes || test x"$libltdl_cv_lib_dl_dlopen" = xyes
+then
+  lt_save_LIBS="$LIBS"
+  LIBS="$LIBS $LIBADD_DLOPEN"
+  for ac_func in dlerror
+do :
+  ac_fn_c_check_func "$LINENO" "dlerror" "ac_cv_func_dlerror"
+if test "x$ac_cv_func_dlerror" = xyes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_DLERROR 1
+_ACEOF
+
+fi
+done
+
+  LIBS="$lt_save_LIBS"
+fi
+
+
+LIBADD_SHL_LOAD=
+ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
+if test "x$ac_cv_func_shl_load" = xyes; then :
+
+$as_echo "#define HAVE_SHL_LOAD 1" >>confdefs.h
+
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}shl_load.la"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
+$as_echo_n "checking for shl_load in -ldld... " >&6; }
+if ${ac_cv_lib_dld_shl_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char shl_load ();
+int
+main ()
+{
+return shl_load ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_shl_load=yes
+else
+  ac_cv_lib_dld_shl_load=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
+$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
+if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
+
+$as_echo "#define HAVE_SHL_LOAD 1" >>confdefs.h
+
+	    LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}shl_load.la"
+	    LIBADD_SHL_LOAD="-ldld"
+fi
+
+fi
+
+
+
+case $host_os in
+darwin[1567].*)
+# We only want this for pre-Mac OS X 10.4.
+  ac_fn_c_check_func "$LINENO" "_dyld_func_lookup" "ac_cv_func__dyld_func_lookup"
+if test "x$ac_cv_func__dyld_func_lookup" = xyes; then :
+
+$as_echo "#define HAVE_DYLD 1" >>confdefs.h
+
+	LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dyld.la"
+fi
+
+  ;;
+beos*)
+  LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}load_add_on.la"
+  ;;
+cygwin* | mingw* | os2* | pw32*)
+  ac_fn_c_check_decl "$LINENO" "cygwin_conv_path" "ac_cv_have_decl_cygwin_conv_path" "#include <sys/cygwin.h>
+"
+if test "x$ac_cv_have_decl_cygwin_conv_path" = xyes; then :
+  ac_have_decl=1
+else
+  ac_have_decl=0
+fi
+
+cat >>confdefs.h <<_ACEOF
+#define HAVE_DECL_CYGWIN_CONV_PATH $ac_have_decl
+_ACEOF
+
+  LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}loadlibrary.la"
+  ;;
+esac
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
+$as_echo_n "checking for dld_link in -ldld... " >&6; }
+if ${ac_cv_lib_dld_dld_link+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dld_link ();
+int
+main ()
+{
+return dld_link ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_dld_link=yes
+else
+  ac_cv_lib_dld_dld_link=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
+$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
+if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
+
+$as_echo "#define HAVE_DLD 1" >>confdefs.h
+
+		LT_DLLOADERS="$LT_DLLOADERS ${lt_dlopen_dir+$lt_dlopen_dir/}dld_link.la"
+fi
+
+
+
+
+LT_DLPREOPEN=
+if test -n "$LT_DLLOADERS"
+then
+  for lt_loader in $LT_DLLOADERS; do
+    LT_DLPREOPEN="$LT_DLPREOPEN-dlpreopen $lt_loader "
+  done
+
+$as_echo "#define HAVE_LIBDLLOADER 1" >>confdefs.h
+
+fi
+
+
+LIBADD_DL="$LIBADD_DLOPEN $LIBADD_SHL_LOAD"
+
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for _ prefix in compiled symbols" >&5
+$as_echo_n "checking for _ prefix in compiled symbols... " >&6; }
+if ${lt_cv_sys_symbol_underscore+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_sys_symbol_underscore=no
+  cat > conftest.$ac_ext <<_LT_EOF
+void nm_test_func(){}
+int main(){nm_test_func;return 0;}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    # Now try to grab the symbols.
+    ac_nlist=conftest.nm
+    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $ac_nlist\""; } >&5
+  (eval $NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $ac_nlist) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s "$ac_nlist"; then
+      # See whether the symbols have a leading underscore.
+      if grep '^. _nm_test_func' "$ac_nlist" >/dev/null; then
+        lt_cv_sys_symbol_underscore=yes
+      else
+        if grep '^. nm_test_func ' "$ac_nlist" >/dev/null; then
+	  :
+        else
+	  echo "configure: cannot find nm_test_func in $ac_nlist" >&5
+        fi
+      fi
+    else
+      echo "configure: cannot run $lt_cv_sys_global_symbol_pipe" >&5
+    fi
+  else
+    echo "configure: failed program was:" >&5
+    cat conftest.c >&5
+  fi
+  rm -rf conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_symbol_underscore" >&5
+$as_echo "$lt_cv_sys_symbol_underscore" >&6; }
+  sys_symbol_underscore=$lt_cv_sys_symbol_underscore
+
+
+if test x"$lt_cv_sys_symbol_underscore" = xyes; then
+  if test x"$libltdl_cv_func_dlopen" = xyes ||
+     test x"$libltdl_cv_lib_dl_dlopen" = xyes ; then
+	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we have to add an underscore for dlsym" >&5
+$as_echo_n "checking whether we have to add an underscore for dlsym... " >&6; }
+if ${libltdl_cv_need_uscore+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  libltdl_cv_need_uscore=unknown
+          save_LIBS="$LIBS"
+          LIBS="$LIBS $LIBADD_DLOPEN"
+	  if test "$cross_compiling" = yes; then :
+  libltdl_cv_need_uscore=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) libltdl_cv_need_uscore=no ;;
+      x$lt_dlneed_uscore) libltdl_cv_need_uscore=yes ;;
+      x$lt_dlunknown|x*)  ;;
+    esac
+  else :
+    # compilation failed
+
+  fi
+fi
+rm -fr conftest*
+
+	  LIBS="$save_LIBS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libltdl_cv_need_uscore" >&5
+$as_echo "$libltdl_cv_need_uscore" >&6; }
+  fi
+fi
+
+if test x"$libltdl_cv_need_uscore" = xyes; then
+
+$as_echo "#define NEED_USCORE 1" >>confdefs.h
+
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether deplibs are loaded by dlopen" >&5
+$as_echo_n "checking whether deplibs are loaded by dlopen... " >&6; }
+if ${lt_cv_sys_dlopen_deplibs+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  # PORTME does your system automatically load deplibs for dlopen?
+  # or its logical equivalent (e.g. shl_load for HP-UX < 11)
+  # For now, we just catch OSes we know something about -- in the
+  # future, we'll try test this programmatically.
+  lt_cv_sys_dlopen_deplibs=unknown
+  case $host_os in
+  aix3*|aix4.1.*|aix4.2.*)
+    # Unknown whether this is true for these versions of AIX, but
+    # we want this `case' here to explicitly catch those versions.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  aix[4-9]*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  amigaos*)
+    case $host_cpu in
+    powerpc)
+      lt_cv_sys_dlopen_deplibs=no
+      ;;
+    esac
+    ;;
+  darwin*)
+    # Assuming the user has installed a libdl from somewhere, this is true
+    # If you are looking for one http://www.opendarwin.org/projects/dlcompat
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  freebsd* | dragonfly*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  gnu* | linux* | k*bsd*-gnu | kopensolaris*-gnu)
+    # GNU and its variants, using gnu ld.so (Glibc)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  hpux10*|hpux11*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  interix*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  irix[12345]*|irix6.[01]*)
+    # Catch all versions of IRIX before 6.2, and indicate that we don't
+    # know how it worked for any of those versions.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  irix*)
+    # The case above catches anything before 6.2, and it's known that
+    # at 6.2 and later dlopen does load deplibs.
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  netbsd*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  openbsd*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  osf[1234]*)
+    # dlopen did load deplibs (at least at 4.x), but until the 5.x series,
+    # it did *not* use an RPATH in a shared library to find objects the
+    # library depends on, so we explicitly say `no'.
+    lt_cv_sys_dlopen_deplibs=no
+    ;;
+  osf5.0|osf5.0a|osf5.1)
+    # dlopen *does* load deplibs and with the right loader patch applied
+    # it even uses RPATH in a shared library to search for shared objects
+    # that the library depends on, but there's no easy way to know if that
+    # patch is installed.  Since this is the case, all we can really
+    # say is unknown -- it depends on the patch being installed.  If
+    # it is, this changes to `yes'.  Without it, it would be `no'.
+    lt_cv_sys_dlopen_deplibs=unknown
+    ;;
+  osf*)
+    # the two cases above should catch all versions of osf <= 5.1.  Read
+    # the comments above for what we know about them.
+    # At > 5.1, deplibs are loaded *and* any RPATH in a shared library
+    # is used to find them so we can finally say `yes'.
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  qnx*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  solaris*)
+    lt_cv_sys_dlopen_deplibs=yes
+    ;;
+  sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+    libltdl_cv_sys_dlopen_deplibs=yes
+    ;;
+  esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_dlopen_deplibs" >&5
+$as_echo "$lt_cv_sys_dlopen_deplibs" >&6; }
+if test "$lt_cv_sys_dlopen_deplibs" != yes; then
+
+$as_echo "#define LTDL_DLOPEN_DEPLIBS 1" >>confdefs.h
+
+fi
+
+:
+
+for ac_header in argz.h
+do :
+  ac_fn_c_check_header_compile "$LINENO" "argz.h" "ac_cv_header_argz_h" "$ac_includes_default
+"
+if test "x$ac_cv_header_argz_h" = xyes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_ARGZ_H 1
+_ACEOF
+
+fi
+
+done
+
+
+ac_fn_c_check_type "$LINENO" "error_t" "ac_cv_type_error_t" "#if defined(HAVE_ARGZ_H)
+#  include <argz.h>
+#endif
+"
+if test "x$ac_cv_type_error_t" = xyes; then :
+
+cat >>confdefs.h <<_ACEOF
+#define HAVE_ERROR_T 1
+_ACEOF
+
+
+else
+
+$as_echo "#define error_t int" >>confdefs.h
+
+
+$as_echo "#define __error_t_defined 1" >>confdefs.h
+
+fi
+
+
+ARGZ_H=
+for ac_func in argz_add argz_append argz_count argz_create_sep argz_insert \
+	argz_next argz_stringify
+do :
+  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+_ACEOF
+
+else
+  ARGZ_H=argz.h; case " $LIBOBJS " in
+  *" argz.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS argz.$ac_objext"
+ ;;
+esac
+
+fi
+done
+
+
+if test -z "$ARGZ_H"; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if argz actually works" >&5
+$as_echo_n "checking if argz actually works... " >&6; }
+if ${lt_cv_sys_argz_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $host_os in #(
+	 *cygwin*)
+	   lt_cv_sys_argz_works=no
+	   if test "$cross_compiling" != no; then
+	     lt_cv_sys_argz_works="guessing no"
+	   else
+	     lt_sed_extract_leading_digits='s/^\([0-9\.]*\).*/\1/'
+	     save_IFS=$IFS
+	     IFS=-.
+	     set x `uname -r | sed -e "$lt_sed_extract_leading_digits"`
+	     IFS=$save_IFS
+	     lt_os_major=${2-0}
+	     lt_os_minor=${3-0}
+	     lt_os_micro=${4-0}
+	     if test "$lt_os_major" -gt 1 \
+		|| { test "$lt_os_major" -eq 1 \
+		  && { test "$lt_os_minor" -gt 5 \
+		    || { test "$lt_os_minor" -eq 5 \
+		      && test "$lt_os_micro" -gt 24; }; }; }; then
+	       lt_cv_sys_argz_works=yes
+	     fi
+	   fi
+	   ;; #(
+	 *) lt_cv_sys_argz_works=yes ;;
+	 esac
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_argz_works" >&5
+$as_echo "$lt_cv_sys_argz_works" >&6; }
+     if test "$lt_cv_sys_argz_works" = yes; then :
+
+$as_echo "#define HAVE_WORKING_ARGZ 1" >>confdefs.h
+
+else
+  ARGZ_H=argz.h
+        case " $LIBOBJS " in
+  *" argz.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS argz.$ac_objext"
+ ;;
+esac
+
+fi
+fi
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether libtool supports -dlopen/-dlpreopen" >&5
+$as_echo_n "checking whether libtool supports -dlopen/-dlpreopen... " >&6; }
+if ${libltdl_cv_preloaded_symbols+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$lt_cv_sys_global_symbol_pipe"; then
+    libltdl_cv_preloaded_symbols=yes
+  else
+    libltdl_cv_preloaded_symbols=no
+  fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libltdl_cv_preloaded_symbols" >&5
+$as_echo "$libltdl_cv_preloaded_symbols" >&6; }
+if test x"$libltdl_cv_preloaded_symbols" = xyes; then
+
+$as_echo "#define HAVE_PRELOADED_SYMBOLS 1" >>confdefs.h
+
+fi
+
+
+
+# Check whether --enable-ltdl-install was given.
+if test "${enable_ltdl_install+set}" = set; then :
+  enableval=$enable_ltdl_install;
+fi
+
+
+case ,${enable_ltdl_install},${enable_ltdl_convenience} in
+  *yes*) ;;
+  *) enable_ltdl_convenience=yes ;;
+esac
+
+ if test x"${enable_ltdl_install-no}" != xno; then
+  INSTALL_LTDL_TRUE=
+  INSTALL_LTDL_FALSE='#'
+else
+  INSTALL_LTDL_TRUE='#'
+  INSTALL_LTDL_FALSE=
+fi
+
+  if test x"${enable_ltdl_convenience-no}" != xno; then
+  CONVENIENCE_LTDL_TRUE=
+  CONVENIENCE_LTDL_FALSE='#'
+else
+  CONVENIENCE_LTDL_TRUE='#'
+  CONVENIENCE_LTDL_FALSE=
+fi
+
+
+
+
+
+
+# In order that ltdl.c can compile, find out the first AC_CONFIG_HEADERS
+# the user used.  This is so that ltdl.h can pick up the parent projects
+# config.h file, The first file in AC_CONFIG_HEADERS must contain the
+# definitions required by ltdl.c.
+# FIXME: Remove use of undocumented AC_LIST_HEADERS (2.59 compatibility).
+
+
+
+for ac_header in unistd.h dl.h sys/dl.h dld.h mach-o/dyld.h dirent.h
+do :
+  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
+"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
+_ACEOF
+
+fi
+
+done
+
+
+for ac_func in closedir opendir readdir
+do :
+  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+_ACEOF
+
+else
+  case " $LIBOBJS " in
+  *" lt__dirent.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS lt__dirent.$ac_objext"
+ ;;
+esac
+
+fi
+done
+
+for ac_func in strlcat strlcpy
+do :
+  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+_ACEOF
+
+else
+  case " $LIBOBJS " in
+  *" lt__strl.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS lt__strl.$ac_objext"
+ ;;
+esac
+
+fi
+done
+
+
+
+cat >>confdefs.h <<_ACEOF
+#define LT_LIBEXT "$libext"
+_ACEOF
+
+
+name=
+eval "lt_libprefix=\"$libname_spec\""
+
+cat >>confdefs.h <<_ACEOF
+#define LT_LIBPREFIX "$lt_libprefix"
+_ACEOF
+
+
+name=ltdl
+eval "LTDLOPEN=\"$libname_spec\""
+
+
+
+
+## -------- ##
+## Outputs. ##
+## -------- ##
+ac_config_files="$ac_config_files Makefile"
+
+cat >confcache <<\_ACEOF
+# This file is a shell script that caches the results of configure
+# tests run on this system so they can be shared between configure
+# scripts and configure runs, see configure's option --config-cache.
+# It is not useful on other systems.  If it contains results you don't
+# want to keep, you may remove or edit it.
+#
+# config.status only pays attention to the cache file if you give it
+# the --recheck option to rerun configure.
+#
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
+# following values.
+
+_ACEOF
+
+# The following way of writing the cache mishandles newlines in values,
+# but we know of no workaround that is simple, portable, and efficient.
+# So, we kill variables containing newlines.
+# Ultrix sh set writes to stderr and can't be redirected directly,
+# and sets the high bit in the cache file unless we assign to the vars.
+(
+  for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+
+  (set) 2>&1 |
+    case $as_nl`(ac_space=' '; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      # `set' does not quote correctly, so add quotes: double-quote
+      # substitution turns \\\\ into \\, and sed turns \\ into \.
+      sed -n \
+	"s/'/'\\\\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
+      ;; #(
+    *)
+      # `set' quotes correctly as required by POSIX, so do not add quotes.
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+) |
+  sed '
+     /^ac_cv_env_/b end
+     t clear
+     :clear
+     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
+     t end
+     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
+     :end' >>confcache
+if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
+  if test -w "$cache_file"; then
+    if test "x$cache_file" != "x/dev/null"; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
+$as_echo "$as_me: updating cache $cache_file" >&6;}
+      if test ! -f "$cache_file" || test -h "$cache_file"; then
+	cat confcache >"$cache_file"
+      else
+        case $cache_file in #(
+        */* | ?:*)
+	  mv -f confcache "$cache_file"$$ &&
+	  mv -f "$cache_file"$$ "$cache_file" ;; #(
+        *)
+	  mv -f confcache "$cache_file" ;;
+	esac
+      fi
+    fi
+  else
+    { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
+$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
+  fi
+fi
+rm -f confcache
+
+test "x$prefix" = xNONE && prefix=$ac_default_prefix
+# Let make expand exec_prefix.
+test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
+
+DEFS=-DHAVE_CONFIG_H
+
+ac_libobjs=
+ac_ltlibobjs=
+U=
+for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
+  # 1. Remove the extension, and $U if already installed.
+  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
+  ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
+  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
+  #    will be set to the directory where LIBOBJS objects are built.
+  as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
+  as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
+done
+LIBOBJS=$ac_libobjs
+
+LTLIBOBJS=$ac_ltlibobjs
+
+
+ if test -n "$EXEEXT"; then
+  am__EXEEXT_TRUE=
+  am__EXEEXT_FALSE='#'
+else
+  am__EXEEXT_TRUE='#'
+  am__EXEEXT_FALSE=
+fi
+
+if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
+  as_fn_error $? "conditional \"AMDEP\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
+  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${INSTALL_LTDL_TRUE}" && test -z "${INSTALL_LTDL_FALSE}"; then
+  as_fn_error $? "conditional \"INSTALL_LTDL\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${CONVENIENCE_LTDL_TRUE}" && test -z "${CONVENIENCE_LTDL_FALSE}"; then
+  as_fn_error $? "conditional \"CONVENIENCE_LTDL\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+LT_CONFIG_H=config.h
+
+: "${CONFIG_STATUS=./config.status}"
+ac_write_fail=0
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files $CONFIG_STATUS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
+$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
+as_write_fail=0
+cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+# Run this file to recreate the current configuration.
+# Compiler output produced by configure, useful for debugging
+# configure, is in config.log if it exists.
+
+debug=false
+ac_cs_recheck=false
+ac_cs_silent=false
+
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+
+exec 6>&1
+## ----------------------------------- ##
+## Main body of $CONFIG_STATUS script. ##
+## ----------------------------------- ##
+_ASEOF
+test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# Save the log message, to keep $0 and so on meaningful, and to
+# report actual input values of CONFIG_FILES etc. instead of their
+# values after options handling.
+ac_log="
+This file was extended by libltdl $as_me 2.4.2, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  CONFIG_FILES    = $CONFIG_FILES
+  CONFIG_HEADERS  = $CONFIG_HEADERS
+  CONFIG_LINKS    = $CONFIG_LINKS
+  CONFIG_COMMANDS = $CONFIG_COMMANDS
+  $ $0 $@
+
+on `(hostname || uname -n) 2>/dev/null | sed 1q`
+"
+
+_ACEOF
+
+case $ac_config_files in *"
+"*) set x $ac_config_files; shift; ac_config_files=$*;;
+esac
+
+case $ac_config_headers in *"
+"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
+esac
+
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+# Files that config.status was made for.
+config_files="$ac_config_files"
+config_headers="$ac_config_headers"
+config_commands="$ac_config_commands"
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ac_cs_usage="\
+\`$as_me' instantiates files and other configuration actions
+from templates according to the current configuration.  Unless the files
+and actions are specified as TAGs, all are instantiated by default.
+
+Usage: $0 [OPTION]... [TAG]...
+
+  -h, --help       print this help, then exit
+  -V, --version    print version number and configuration settings, then exit
+      --config     print configuration, then exit
+  -q, --quiet, --silent
+                   do not print progress messages
+  -d, --debug      don't remove temporary files
+      --recheck    update $as_me by reconfiguring in the same conditions
+      --file=FILE[:TEMPLATE]
+                   instantiate the configuration file FILE
+      --header=FILE[:TEMPLATE]
+                   instantiate the configuration header FILE
+
+Configuration files:
+$config_files
+
+Configuration headers:
+$config_headers
+
+Configuration commands:
+$config_commands
+
+Report bugs to <bug-libtool@gnu.org>."
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
+ac_cs_version="\\
+libltdl config.status 2.4.2
+configured by $0, generated by GNU Autoconf 2.68,
+  with options \\"\$ac_cs_config\\"
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This config.status script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it."
+
+ac_pwd='$ac_pwd'
+srcdir='$srcdir'
+INSTALL='$INSTALL'
+MKDIR_P='$MKDIR_P'
+AWK='$AWK'
+test -n "\$AWK" || AWK=awk
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# The default lists apply if the user does not specify any file.
+ac_need_defaults=:
+while test $# != 0
+do
+  case $1 in
+  --*=?*)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
+    ac_shift=:
+    ;;
+  --*=)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=
+    ac_shift=:
+    ;;
+  *)
+    ac_option=$1
+    ac_optarg=$2
+    ac_shift=shift
+    ;;
+  esac
+
+  case $ac_option in
+  # Handling of the options.
+  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
+    ac_cs_recheck=: ;;
+  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
+    $as_echo "$ac_cs_version"; exit ;;
+  --config | --confi | --conf | --con | --co | --c )
+    $as_echo "$ac_cs_config"; exit ;;
+  --debug | --debu | --deb | --de | --d | -d )
+    debug=: ;;
+  --file | --fil | --fi | --f )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    '') as_fn_error $? "missing file argument" ;;
+    esac
+    as_fn_append CONFIG_FILES " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --header | --heade | --head | --hea )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    as_fn_append CONFIG_HEADERS " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --he | --h)
+    # Conflict between --help and --header
+    as_fn_error $? "ambiguous option: \`$1'
+Try \`$0 --help' for more information.";;
+  --help | --hel | -h )
+    $as_echo "$ac_cs_usage"; exit ;;
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil | --si | --s)
+    ac_cs_silent=: ;;
+
+  # This is an error.
+  -*) as_fn_error $? "unrecognized option: \`$1'
+Try \`$0 --help' for more information." ;;
+
+  *) as_fn_append ac_config_targets " $1"
+     ac_need_defaults=false ;;
+
+  esac
+  shift
+done
+
+ac_configure_extra_args=
+
+if $ac_cs_silent; then
+  exec 6>/dev/null
+  ac_configure_extra_args="$ac_configure_extra_args --silent"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+if \$ac_cs_recheck; then
+  set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
+  shift
+  \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
+  CONFIG_SHELL='$SHELL'
+  export CONFIG_SHELL
+  exec "\$@"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+exec 5>>config.log
+{
+  echo
+  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
+## Running $as_me. ##
+_ASBOX
+  $as_echo "$ac_log"
+} >&5
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+#
+# INIT-COMMANDS
+#
+AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
+
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`'
+macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`'
+AS='`$ECHO "$AS" | $SED "$delay_single_quote_subst"`'
+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
+OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`'
+enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`'
+pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`'
+enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`'
+SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`'
+ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`'
+PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`'
+host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`'
+host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`'
+host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`'
+build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`'
+build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`'
+build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`'
+SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`'
+Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`'
+GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`'
+EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`'
+FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`'
+LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`'
+NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`'
+LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`'
+max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`'
+ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`'
+exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`'
+lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`'
+CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`'
+CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`'
+compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`'
+GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`'
+OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`'
+libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`'
+shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`'
+extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`'
+enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`'
+export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`'
+whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`'
+compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`'
+old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`'
+archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`'
+module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`'
+allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`'
+no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`'
+hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`'
+hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`'
+hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`'
+hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`'
+hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`'
+version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`'
+runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`'
+libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`'
+library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`'
+soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`'
+install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`'
+postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`'
+finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`'
+hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`'
+sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`'
+sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`'
+enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`'
+old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`'
+striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`'
+
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in AS \
+DLLTOOL \
+OBJDUMP \
+SHELL \
+ECHO \
+PATH_SEPARATOR \
+SED \
+GREP \
+EGREP \
+FGREP \
+LD \
+NM \
+LN_S \
+lt_SP2NL \
+lt_NL2SP \
+reload_flag \
+deplibs_check_method \
+file_magic_cmd \
+file_magic_glob \
+want_nocaseglob \
+sharedlib_from_linklib_cmd \
+AR \
+AR_FLAGS \
+archiver_list_spec \
+STRIP \
+RANLIB \
+CC \
+CFLAGS \
+compiler \
+lt_cv_sys_global_symbol_pipe \
+lt_cv_sys_global_symbol_to_cdecl \
+lt_cv_sys_global_symbol_to_c_name_address \
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
+nm_file_list_spec \
+lt_prog_compiler_no_builtin_flag \
+lt_prog_compiler_pic \
+lt_prog_compiler_wl \
+lt_prog_compiler_static \
+lt_cv_prog_compiler_c_o \
+need_locks \
+MANIFEST_TOOL \
+DSYMUTIL \
+NMEDIT \
+LIPO \
+OTOOL \
+OTOOL64 \
+shrext_cmds \
+export_dynamic_flag_spec \
+whole_archive_flag_spec \
+compiler_needs_object \
+with_gnu_ld \
+allow_undefined_flag \
+no_undefined_flag \
+hardcode_libdir_flag_spec \
+hardcode_libdir_separator \
+exclude_expsyms \
+include_expsyms \
+file_list_spec \
+variables_saved_for_relink \
+libname_spec \
+library_names_spec \
+soname_spec \
+install_override_mode \
+finish_eval \
+old_striplib \
+striplib; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in reload_cmds \
+old_postinstall_cmds \
+old_postuninstall_cmds \
+old_archive_cmds \
+extract_expsyms_cmds \
+old_archive_from_new_cmds \
+old_archive_from_expsyms_cmds \
+archive_cmds \
+archive_expsym_cmds \
+module_cmds \
+module_expsym_cmds \
+export_symbols_cmds \
+prelink_cmds \
+postlink_cmds \
+postinstall_cmds \
+postuninstall_cmds \
+finish_cmds \
+sys_lib_search_path_spec \
+sys_lib_dlsearch_path_spec; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+ac_aux_dir='$ac_aux_dir'
+xsi_shell='$xsi_shell'
+lt_shell_append='$lt_shell_append'
+
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'
+
+
+
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+
+# Handling of arguments.
+for ac_config_target in $ac_config_targets
+do
+  case $ac_config_target in
+    "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h:config-h.in" ;;
+    "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
+    "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
+    "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
+
+  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+  esac
+done
+
+
+# If the user did not use the arguments to specify the items to instantiate,
+# then the envvar interface is used.  Set only those that are not.
+# We use the long form for the default assignment because of an extremely
+# bizarre bug on SunOS 4.1.3.
+if $ac_need_defaults; then
+  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
+  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
+fi
+
+# Have a temporary directory for convenience.  Make it in the build tree
+# simply because there is no reason against having it here, and in addition,
+# creating and moving files from /tmp can sometimes cause problems.
+# Hook for its removal unless debugging.
+# Note that there is a small window in which the directory will not be cleaned:
+# after its creation but before its name has been assigned to `$tmp'.
+$debug ||
+{
+  tmp= ac_tmp=
+  trap 'exit_status=$?
+  : "${ac_tmp:=$tmp}"
+  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+' 0
+  trap 'as_fn_exit 1' 1 2 13 15
+}
+# Create a (secure) tmp directory for tmp files.
+
+{
+  tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
+  test -d "$tmp"
+}  ||
+{
+  tmp=./conf$$-$RANDOM
+  (umask 077 && mkdir "$tmp")
+} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
+ac_tmp=$tmp
+
+# Set up the scripts for CONFIG_FILES section.
+# No need to generate them if there are no CONFIG_FILES.
+# This happens for instance with `./config.status config.h'.
+if test -n "$CONFIG_FILES"; then
+
+
+ac_cr=`echo X | tr X '\015'`
+# On cygwin, bash can eat \r inside `` if the user requested igncr.
+# But we know of no other shell where ac_cr would be empty at this
+# point, so we can use a bashism as a fallback.
+if test "x$ac_cr" = x; then
+  eval ac_cr=\$\'\\r\'
+fi
+ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
+if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
+  ac_cs_awk_cr='\\r'
+else
+  ac_cs_awk_cr=$ac_cr
+fi
+
+echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+_ACEOF
+
+
+{
+  echo "cat >conf$$subs.awk <<_ACEOF" &&
+  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
+  echo "_ACEOF"
+} >conf$$subs.sh ||
+  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+ac_delim='%!_!# '
+for ac_last_try in false false false false false :; do
+  . ./conf$$subs.sh ||
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+
+  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
+  if test $ac_delim_n = $ac_delim_num; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+rm -f conf$$subs.sh
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+_ACEOF
+sed -n '
+h
+s/^/S["/; s/!.*/"]=/
+p
+g
+s/^[^!]*!//
+:repl
+t repl
+s/'"$ac_delim"'$//
+t delim
+:nl
+h
+s/\(.\{148\}\)..*/\1/
+t more1
+s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
+p
+n
+b repl
+:more1
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t nl
+:delim
+h
+s/\(.\{148\}\)..*/\1/
+t more2
+s/["\\]/\\&/g; s/^/"/; s/$/"/
+p
+b
+:more2
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t delim
+' <conf$$subs.awk | sed '
+/^[^""]/{
+  N
+  s/\n//
+}
+' >>$CONFIG_STATUS || ac_write_fail=1
+rm -f conf$$subs.awk
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACAWK
+cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+  for (key in S) S_is_set[key] = 1
+  FS = ""
+
+}
+{
+  line = $ 0
+  nfields = split(line, field, "@")
+  substed = 0
+  len = length(field[1])
+  for (i = 2; i < nfields; i++) {
+    key = field[i]
+    keylen = length(key)
+    if (S_is_set[key]) {
+      value = S[key]
+      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
+      len += length(value) + length(field[++i])
+      substed = 1
+    } else
+      len += 1 + keylen
+  }
+
+  print line
+}
+
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
+  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
+else
+  cat
+fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+# trailing colons and then remove the whole line if VPATH becomes empty
+# (actually we leave an empty line to preserve line numbers).
+if test "x$srcdir" = x.; then
+  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
+h
+s///
+s/^/:/
+s/[	 ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
+s/:*$//
+x
+s/\(=[	 ]*\).*/\1/
+G
+s/\n//
+s/^[^=]*=[	 ]*$//
+}'
+fi
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+fi # test -n "$CONFIG_FILES"
+
+# Set up the scripts for CONFIG_HEADERS section.
+# No need to generate them if there are no CONFIG_HEADERS.
+# This happens for instance with `./config.status Makefile'.
+if test -n "$CONFIG_HEADERS"; then
+cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+BEGIN {
+_ACEOF
+
+# Transform confdefs.h into an awk script `defines.awk', embedded as
+# here-document in config.status, that substitutes the proper values into
+# config.h.in to produce config.h.
+
+# Create a delimiter string that does not exist in confdefs.h, to ease
+# handling of long lines.
+ac_delim='%!_!# '
+for ac_last_try in false false :; do
+  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
+  if test -z "$ac_tt"; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+
+# For the awk script, D is an array of macro values keyed by name,
+# likewise P contains macro parameters if any.  Preserve backslash
+# newline sequences.
+
+ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
+sed -n '
+s/.\{148\}/&'"$ac_delim"'/g
+t rset
+:rset
+s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
+t def
+d
+:def
+s/\\$//
+t bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3"/p
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
+d
+:bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3\\\\\\n"\\/p
+t cont
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
+t cont
+d
+:cont
+n
+s/.\{148\}/&'"$ac_delim"'/g
+t clear
+:clear
+s/\\$//
+t bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/"/p
+d
+:bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
+b cont
+' <confdefs.h | sed '
+s/'"$ac_delim"'/"\\\
+"/g' >>$CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  for (key in D) D_is_set[key] = 1
+  FS = ""
+}
+/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
+  line = \$ 0
+  split(line, arg, " ")
+  if (arg[1] == "#") {
+    defundef = arg[2]
+    mac1 = arg[3]
+  } else {
+    defundef = substr(arg[1], 2)
+    mac1 = arg[2]
+  }
+  split(mac1, mac2, "(") #)
+  macro = mac2[1]
+  prefix = substr(line, 1, index(line, defundef) - 1)
+  if (D_is_set[macro]) {
+    # Preserve the white space surrounding the "#".
+    print prefix "define", macro P[macro] D[macro]
+    next
+  } else {
+    # Replace #undef with comments.  This is necessary, for example,
+    # in the case of _POSIX_SOURCE, which is predefined and required
+    # on some systems where configure will not decide to define it.
+    if (defundef == "undef") {
+      print "/*", prefix defundef, macro, "*/"
+      next
+    }
+  }
+}
+{ print }
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
+fi # test -n "$CONFIG_HEADERS"
+
+
+eval set X "  :F $CONFIG_FILES  :H $CONFIG_HEADERS    :C $CONFIG_COMMANDS"
+shift
+for ac_tag
+do
+  case $ac_tag in
+  :[FHLC]) ac_mode=$ac_tag; continue;;
+  esac
+  case $ac_mode$ac_tag in
+  :[FHL]*:*);;
+  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
+  :[FH]-) ac_tag=-:-;;
+  :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
+  esac
+  ac_save_IFS=$IFS
+  IFS=:
+  set x $ac_tag
+  IFS=$ac_save_IFS
+  shift
+  ac_file=$1
+  shift
+
+  case $ac_mode in
+  :L) ac_source=$1;;
+  :[FH])
+    ac_file_inputs=
+    for ac_f
+    do
+      case $ac_f in
+      -) ac_f="$ac_tmp/stdin";;
+      *) # Look for the file first in the build tree, then in the source tree
+	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
+	 # because $ac_f cannot contain `:'.
+	 test -f "$ac_f" ||
+	   case $ac_f in
+	   [\\/$]*) false;;
+	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
+	   esac ||
+	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+      esac
+      case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
+      as_fn_append ac_file_inputs " '$ac_f'"
+    done
+
+    # Let's still pretend it is `configure' which instantiates (i.e., don't
+    # use $as_me), people would be surprised to read:
+    #    /* config.h.  Generated by config.status.  */
+    configure_input='Generated from '`
+	  $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
+	`' by configure.'
+    if test x"$ac_file" != x-; then
+      configure_input="$ac_file.  $configure_input"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
+$as_echo "$as_me: creating $ac_file" >&6;}
+    fi
+    # Neutralize special characters interpreted by sed in replacement strings.
+    case $configure_input in #(
+    *\&* | *\|* | *\\* )
+       ac_sed_conf_input=`$as_echo "$configure_input" |
+       sed 's/[\\\\&|]/\\\\&/g'`;; #(
+    *) ac_sed_conf_input=$configure_input;;
+    esac
+
+    case $ac_tag in
+    *:-:* | *:-) cat >"$ac_tmp/stdin" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
+    esac
+    ;;
+  esac
+
+  ac_dir=`$as_dirname -- "$ac_file" ||
+$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$ac_file" : 'X\(//\)[^/]' \| \
+	 X"$ac_file" : 'X\(//\)$' \| \
+	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$ac_file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  as_dir="$ac_dir"; as_fn_mkdir_p
+  ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+
+  case $ac_mode in
+  :F)
+  #
+  # CONFIG_FILE
+  #
+
+  case $INSTALL in
+  [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
+  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
+  esac
+  ac_MKDIR_P=$MKDIR_P
+  case $MKDIR_P in
+  [\\/$]* | ?:[\\/]* ) ;;
+  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
+  esac
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# If the template does not know about datarootdir, expand it.
+# FIXME: This hack should be removed a few years after 2.60.
+ac_datarootdir_hack=; ac_datarootdir_seen=
+ac_sed_dataroot='
+/datarootdir/ {
+  p
+  q
+}
+/@datadir@/p
+/@docdir@/p
+/@infodir@/p
+/@localedir@/p
+/@mandir@/p'
+case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
+*datarootdir*) ac_datarootdir_seen=yes;;
+*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
+$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  ac_datarootdir_hack='
+  s&@datadir@&$datadir&g
+  s&@docdir@&$docdir&g
+  s&@infodir@&$infodir&g
+  s&@localedir@&$localedir&g
+  s&@mandir@&$mandir&g
+  s&\\\${datarootdir}&$datarootdir&g' ;;
+esac
+_ACEOF
+
+# Neutralize VPATH when `$srcdir' = `.'.
+# Shell code in configure.ac might set extrasub.
+# FIXME: do we really want to maintain this feature?
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_sed_extra="$ac_vpsub
+$extrasub
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+:t
+/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
+s|@configure_input@|$ac_sed_conf_input|;t t
+s&@top_builddir@&$ac_top_builddir_sub&;t t
+s&@top_build_prefix@&$ac_top_build_prefix&;t t
+s&@srcdir@&$ac_srcdir&;t t
+s&@abs_srcdir@&$ac_abs_srcdir&;t t
+s&@top_srcdir@&$ac_top_srcdir&;t t
+s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
+s&@builddir@&$ac_builddir&;t t
+s&@abs_builddir@&$ac_abs_builddir&;t t
+s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
+s&@INSTALL@&$ac_INSTALL&;t t
+s&@MKDIR_P@&$ac_MKDIR_P&;t t
+$ac_datarootdir_hack
+"
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
+  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+
+test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
+  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
+  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
+      "$ac_tmp/out"`; test -z "$ac_out"; } &&
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&5
+$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&2;}
+
+  rm -f "$ac_tmp/stdin"
+  case $ac_file in
+  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
+  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+  esac \
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ ;;
+  :H)
+  #
+  # CONFIG_HEADER
+  #
+  if test x"$ac_file" != x-; then
+    {
+      $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
+    } >"$ac_tmp/config.h" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
+$as_echo "$as_me: $ac_file is unchanged" >&6;}
+    else
+      rm -f "$ac_file"
+      mv "$ac_tmp/config.h" "$ac_file" \
+	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    fi
+  else
+    $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+      || as_fn_error $? "could not create -" "$LINENO" 5
+  fi
+# Compute "$ac_file"'s index in $config_headers.
+_am_arg="$ac_file"
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $_am_arg | $_am_arg:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
+$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$_am_arg" : 'X\(//\)[^/]' \| \
+	 X"$_am_arg" : 'X\(//\)$' \| \
+	 X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$_am_arg" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`/stamp-h$_am_stamp_count
+ ;;
+
+  :C)  { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
+$as_echo "$as_me: executing $ac_file commands" >&6;}
+ ;;
+  esac
+
+
+  case $ac_file$ac_mode in
+    "depfiles":C) test x"$AMDEP_TRUE" != x"" || {
+  # Autoconf 2.62 quotes --file arguments for eval, but not when files
+  # are listed without --file.  Let's play safe and only enable the eval
+  # if we detect the quoting.
+  case $CONFIG_FILES in
+  *\'*) eval set x "$CONFIG_FILES" ;;
+  *)   set x $CONFIG_FILES ;;
+  esac
+  shift
+  for mf
+  do
+    # Strip MF so we end up with the name of the file.
+    mf=`echo "$mf" | sed -e 's/:.*$//'`
+    # Check whether this is an Automake generated Makefile or not.
+    # We used to match only the files named `Makefile.in', but
+    # some people rename them; so instead we look at the file content.
+    # Grep'ing the first line is not enough: some people post-process
+    # each Makefile.in and add a new line on top of each file to say so.
+    # Grep'ing the whole file is not good either: AIX grep has a line
+    # limit of 2048, but all sed's we know have understand at least 4000.
+    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+      dirpart=`$as_dirname -- "$mf" ||
+$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$mf" : 'X\(//\)[^/]' \| \
+	 X"$mf" : 'X\(//\)$' \| \
+	 X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$mf" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+    else
+      continue
+    fi
+    # Extract the definition of DEPDIR, am__include, and am__quote
+    # from the Makefile without running `make'.
+    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+    test -z "$DEPDIR" && continue
+    am__include=`sed -n 's/^am__include = //p' < "$mf"`
+    test -z "am__include" && continue
+    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+    # When using ansi2knr, U may be empty or an underscore; expand it
+    U=`sed -n 's/^U = //p' < "$mf"`
+    # Find all dependency output files, they are included files with
+    # $(DEPDIR) in their names.  We invoke sed twice because it is the
+    # simplest approach to changing $(DEPDIR) to its actual value in the
+    # expansion.
+    for file in `sed -n "
+      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
+      # Make sure the directory exists.
+      test -f "$dirpart/$file" && continue
+      fdir=`$as_dirname -- "$file" ||
+$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$file" : 'X\(//\)[^/]' \| \
+	 X"$file" : 'X\(//\)$' \| \
+	 X"$file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      as_dir=$dirpart/$fdir; as_fn_mkdir_p
+      # echo "creating $dirpart/$file"
+      echo '# dummy' > "$dirpart/$file"
+    done
+  done
+}
+ ;;
+    "libtool":C)
+
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags=""
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=$macro_version
+macro_revision=$macro_revision
+
+# Assembler program.
+AS=$lt_AS
+
+# DLL creation program.
+DLLTOOL=$lt_DLLTOOL
+
+# Object dumper program.
+OBJDUMP=$lt_OBJDUMP
+
+# Whether or not to build shared libraries.
+build_libtool_libs=$enable_shared
+
+# Whether or not to build static libraries.
+build_old_libs=$enable_static
+
+# What type of objects to build.
+pic_mode=$pic_mode
+
+# Whether or not to optimize for fast installation.
+fast_install=$enable_fast_install
+
+# Shell to use when invoking shell scripts.
+SHELL=$lt_SHELL
+
+# An echo program that protects backslashes.
+ECHO=$lt_ECHO
+
+# The PATH separator for the build system.
+PATH_SEPARATOR=$lt_PATH_SEPARATOR
+
+# The host system.
+host_alias=$host_alias
+host=$host
+host_os=$host_os
+
+# The build system.
+build_alias=$build_alias
+build=$build
+build_os=$build_os
+
+# A sed program that does not truncate output.
+SED=$lt_SED
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="\$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP=$lt_GREP
+
+# An ERE matcher.
+EGREP=$lt_EGREP
+
+# A literal string matcher.
+FGREP=$lt_FGREP
+
+# A BSD- or MS-compatible name lister.
+NM=$lt_NM
+
+# Whether we need soft or hard links.
+LN_S=$lt_LN_S
+
+# What is the maximum length of a command?
+max_cmd_len=$max_cmd_len
+
+# Object file suffix (normally "o").
+objext=$ac_objext
+
+# Executable file suffix (normally "").
+exeext=$exeext
+
+# whether the shell understands "unset".
+lt_unset=$lt_unset
+
+# turn spaces into newlines.
+SP2NL=$lt_lt_SP2NL
+
+# turn newlines into spaces.
+NL2SP=$lt_lt_NL2SP
+
+# convert \$build file names to \$host format.
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+
+# convert \$build files to toolchain format.
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method=$lt_deplibs_check_method
+
+# Command to use when deplibs_check_method = "file_magic".
+file_magic_cmd=$lt_file_magic_cmd
+
+# How to find potential files when deplibs_check_method = "file_magic".
+file_magic_glob=$lt_file_magic_glob
+
+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
+want_nocaseglob=$lt_want_nocaseglob
+
+# Command to associate shared and link libraries.
+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
+
+# The archiver.
+AR=$lt_AR
+
+# Flags to create an archive.
+AR_FLAGS=$lt_AR_FLAGS
+
+# How to feed a file listing to the archiver.
+archiver_list_spec=$lt_archiver_list_spec
+
+# A symbol stripping program.
+STRIP=$lt_STRIP
+
+# Commands used to install an old-style archive.
+RANLIB=$lt_RANLIB
+old_postinstall_cmds=$lt_old_postinstall_cmds
+old_postuninstall_cmds=$lt_old_postuninstall_cmds
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=$lock_old_archive_extraction
+
+# A C compiler.
+LTCC=$lt_CC
+
+# LTCC compiler flags.
+LTCFLAGS=$lt_CFLAGS
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
+# Specify filename containing input files for \$NM.
+nm_file_list_spec=$lt_nm_file_list_spec
+
+# The root where to search for dependent libraries,and in which our libraries should be installed.
+lt_sysroot=$lt_sysroot
+
+# The name of the directory that contains temporary libtool files.
+objdir=$objdir
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=$MAGIC_CMD
+
+# Must we lock files when doing compilation?
+need_locks=$lt_need_locks
+
+# Manifest tool.
+MANIFEST_TOOL=$lt_MANIFEST_TOOL
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL=$lt_DSYMUTIL
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT=$lt_NMEDIT
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO=$lt_LIPO
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL=$lt_OTOOL
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=$lt_OTOOL64
+
+# Old archive suffix (normally "a").
+libext=$libext
+
+# Shared library suffix (normally ".so").
+shrext_cmds=$lt_shrext_cmds
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=$lt_extract_expsyms_cmds
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink=$lt_variables_saved_for_relink
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=$need_lib_prefix
+
+# Do we need a version for libraries?
+need_version=$need_version
+
+# Library versioning type.
+version_type=$version_type
+
+# Shared library runtime path variable.
+runpath_var=$runpath_var
+
+# Shared library path variable.
+shlibpath_var=$shlibpath_var
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=$shlibpath_overrides_runpath
+
+# Format of library name prefix.
+libname_spec=$lt_libname_spec
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec=$lt_library_names_spec
+
+# The coded name of the library, if different from the real name.
+soname_spec=$lt_soname_spec
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=$lt_install_override_mode
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=$lt_postinstall_cmds
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=$lt_postuninstall_cmds
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds=$lt_finish_cmds
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=$lt_finish_eval
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=$hardcode_into_libs
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
+
+# Whether dlopen is supported.
+dlopen_support=$enable_dlopen
+
+# Whether dlopen of programs is supported.
+dlopen_self=$enable_dlopen_self
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=$enable_dlopen_self_static
+
+# Commands to strip libraries.
+old_striplib=$lt_old_striplib
+striplib=$lt_striplib
+
+
+# The linker used to build libraries.
+LD=$lt_LD
+
+# How to create reloadable object files.
+reload_flag=$lt_reload_flag
+reload_cmds=$lt_reload_cmds
+
+# Commands used to build an old-style archive.
+old_archive_cmds=$lt_old_archive_cmds
+
+# A language specific compiler.
+CC=$lt_compiler
+
+# Is the compiler the GNU compiler?
+with_gcc=$GCC
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+# Additional compiler flags for building library objects.
+pic_flag=$lt_lt_prog_compiler_pic
+
+# How to pass a linker flag through the compiler.
+wl=$lt_lt_prog_compiler_wl
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=$lt_lt_prog_compiler_static
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=$lt_lt_cv_prog_compiler_c_o
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=$archive_cmds_need_lc
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=$lt_whole_archive_flag_spec
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=$lt_compiler_needs_object
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
+
+# Commands used to build a shared archive.
+archive_cmds=$lt_archive_cmds
+archive_expsym_cmds=$lt_archive_expsym_cmds
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=$lt_module_cmds
+module_expsym_cmds=$lt_module_expsym_cmds
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=$lt_with_gnu_ld
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=$lt_allow_undefined_flag
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=$lt_no_undefined_flag
+
+# Flag to hardcode \$libdir into a binary during linking.
+# This must work even if \$libdir does not exist
+hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=$lt_hardcode_libdir_separator
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=$hardcode_direct
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=$hardcode_direct_absolute
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=$hardcode_minus_L
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=$hardcode_shlibpath_var
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=$hardcode_automatic
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=$inherit_rpath
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=$link_all_deplibs
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=$always_export_symbols
+
+# The commands to list exported symbols.
+export_symbols_cmds=$lt_export_symbols_cmds
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=$lt_exclude_expsyms
+
+# Symbols that must always be exported.
+include_expsyms=$lt_include_expsyms
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=$lt_prelink_cmds
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=$lt_postlink_cmds
+
+# Specify filename containing input files.
+file_list_spec=$lt_file_list_spec
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=$hardcode_action
+
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+  if test x"$xsi_shell" = xyes; then
+  sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
+func_dirname ()\
+{\
+\    case ${1} in\
+\      */*) func_dirname_result="${1%/*}${2}" ;;\
+\      *  ) func_dirname_result="${3}" ;;\
+\    esac\
+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_basename ()$/,/^} # func_basename /c\
+func_basename ()\
+{\
+\    func_basename_result="${1##*/}"\
+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
+func_dirname_and_basename ()\
+{\
+\    case ${1} in\
+\      */*) func_dirname_result="${1%/*}${2}" ;;\
+\      *  ) func_dirname_result="${3}" ;;\
+\    esac\
+\    func_basename_result="${1##*/}"\
+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
+func_stripname ()\
+{\
+\    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
+\    # positional parameters, so assign one to ordinary parameter first.\
+\    func_stripname_result=${3}\
+\    func_stripname_result=${func_stripname_result#"${1}"}\
+\    func_stripname_result=${func_stripname_result%"${2}"}\
+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
+func_split_long_opt ()\
+{\
+\    func_split_long_opt_name=${1%%=*}\
+\    func_split_long_opt_arg=${1#*=}\
+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
+func_split_short_opt ()\
+{\
+\    func_split_short_opt_arg=${1#??}\
+\    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
+func_lo2o ()\
+{\
+\    case ${1} in\
+\      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
+\      *)    func_lo2o_result=${1} ;;\
+\    esac\
+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_xform ()$/,/^} # func_xform /c\
+func_xform ()\
+{\
+    func_xform_result=${1%.*}.lo\
+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_arith ()$/,/^} # func_arith /c\
+func_arith ()\
+{\
+    func_arith_result=$(( $* ))\
+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_len ()$/,/^} # func_len /c\
+func_len ()\
+{\
+    func_len_result=${#1}\
+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+fi
+
+if test x"$lt_shell_append" = xyes; then
+  sed -e '/^func_append ()$/,/^} # func_append /c\
+func_append ()\
+{\
+    eval "${1}+=\\${2}"\
+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
+func_append_quoted ()\
+{\
+\    func_quote_for_eval "${2}"\
+\    eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
+  && mv -f "$cfgfile.tmp" "$cfgfile" \
+    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+
+
+  # Save a `func_append' function call where possible by direct use of '+='
+  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+else
+  # Save a `func_append' function call even when '+=' is not available
+  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+    && mv -f "$cfgfile.tmp" "$cfgfile" \
+      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+  test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
+fi
+
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+
+ ;;
+
+  esac
+done # for ac_tag
+
+
+as_fn_exit 0
+_ACEOF
+ac_clean_files=$ac_clean_files_save
+
+test $ac_write_fail = 0 ||
+  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
+
+
+# configure is writing to config.log, and then calls config.status.
+# config.status does its own redirection, appending to config.log.
+# Unfortunately, on DOS this fails, as config.log is still kept open
+# by configure, so config.status won't be able to write to it; its
+# output is simply discarded.  So we exec the FD to /dev/null,
+# effectively closing config.log, so it can be properly (re)opened and
+# appended to by config.status.  When coming back to configure, we
+# need to make the FD available again.
+if test "$no_create" != yes; then
+  ac_cs_success=:
+  ac_config_status_args=
+  test "$silent" = yes &&
+    ac_config_status_args="$ac_config_status_args --quiet"
+  exec 5>/dev/null
+  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
+  exec 5>>config.log
+  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
+  # would make configure fail if this is the last instruction.
+  $ac_cs_success || as_fn_exit 1
+fi
+if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
+$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
+fi
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure.ac b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure.ac
new file mode 100644
index 0000000..b6e9ea9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/configure.ac
@@ -0,0 +1,75 @@
+# Process this file with autoconf to create configure. -*- autoconf -*-
+#
+#    Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+#    Written by Gary V. Vaughan, 2004
+#
+#    NOTE: The canonical source of this file is maintained with the
+#    GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+#
+# GNU Libltdl is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# As a special exception to the GNU Lesser General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libltdl is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU LesserGeneral Public
+# License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+# copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+####
+
+# This configure.ac is not used at all by the libtool bootstrap, but
+# is copied to the ltdl subdirectory if you libtoolize --ltdl your own
+# project.  Adding LT_WITH_LTDL to your project configure.ac will then
+# configure this directory if your user doesn't want to use the installed
+# libltdl.
+
+AC_PREREQ(2.59)dnl We use AS_HELP_STRING
+
+
+## ------------------------ ##
+## Autoconf initialisation. ##
+## ------------------------ ##
+AC_INIT([libltdl], [2.4.2], [bug-libtool@gnu.org])
+AC_CONFIG_HEADERS([config.h:config-h.in])
+AC_CONFIG_SRCDIR([ltdl.c])
+AC_CONFIG_AUX_DIR([config])
+AC_CONFIG_MACRO_DIR([m4])
+LT_CONFIG_LTDL_DIR([.]) # I am me!
+
+
+## ------------------------ ##
+## Automake Initialisation. ##
+## ------------------------ ##
+
+AM_INIT_AUTOMAKE([gnu])
+
+
+## ------------------------------- ##
+## Libtool specific configuration. ##
+## ------------------------------- ##
+pkgdatadir='${datadir}'"/${PACKAGE}"
+
+
+## ----------------------- ##
+## Libtool initialisation. ##
+## ----------------------- ##
+LT_INIT([dlopen win32-dll])
+_LTDL_SETUP
+
+
+## -------- ##
+## Outputs. ##
+## -------- ##
+AC_CONFIG_FILES([Makefile])
+AC_OUTPUT
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__alloc.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__alloc.h
new file mode 100644
index 0000000..1ceddf0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__alloc.h
@@ -0,0 +1,58 @@
+/* lt__alloc.h -- internal memory management interface
+
+   Copyright (C) 2004 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+*/
+
+#if !defined(LT__ALLOC_H)
+#define LT__ALLOC_H 1
+
+#include "lt_system.h"
+
+LT_BEGIN_C_DECLS
+
+#define MALLOC(tp, n)		(tp*) lt__malloc((n) * sizeof(tp))
+#define REALLOC(tp, mem, n)	(tp*) lt__realloc((mem), (n) * sizeof(tp))
+#define FREE(mem)					LT_STMT_START {	\
+	if (mem) { free ((void *)mem); mem = NULL; }	} LT_STMT_END
+#define MEMREASSIGN(p, q)				LT_STMT_START {	\
+	if ((p) != (q)) { if (p) free (p); (p) = (q); (q) = 0; }	\
+								} LT_STMT_END
+
+/* If set, this function is called when memory allocation has failed.  */
+LT_SCOPE void (*lt__alloc_die) (void);
+
+LT_SCOPE void *lt__malloc (size_t n);
+LT_SCOPE void *lt__zalloc (size_t n);
+LT_SCOPE void *lt__realloc (void *mem, size_t n);
+LT_SCOPE void *lt__memdup (void const *mem, size_t n);
+
+LT_SCOPE char *lt__strdup (const char *string);
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT__ALLOC_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__dirent.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__dirent.h
new file mode 100644
index 0000000..4f24f82
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__dirent.h
@@ -0,0 +1,87 @@
+/* lt__dirent.h -- internal directory entry scanning interface
+
+   Copyright (C) 2001, 2004, 2006 Free Software Foundation, Inc.
+   Written by Bob Friesenhahn, 2001
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+*/
+
+#if !defined(LT__DIRENT_H)
+#define LT__DIRENT_H 1
+
+#if defined(LT_CONFIG_H)
+#  include LT_CONFIG_H
+#else
+#  include <config.h>
+#endif
+
+#include "lt_system.h"
+
+#ifdef HAVE_DIRENT_H
+/* We have a fully operational dirent subsystem.  */
+#  include <dirent.h>
+#  define D_NAMLEN(dirent) (strlen((dirent)->d_name))
+
+#elif defined __WINDOWS__
+/* Use some wrapper code to emulate dirent on windows..  */
+#  define WINDOWS_DIRENT_EMULATION 1
+
+#  include <windows.h>
+
+#  define D_NAMLEN(dirent)	(strlen((dirent)->d_name))
+#  define dirent		lt__dirent
+#  define DIR			lt__DIR
+#  define opendir		lt__opendir
+#  define readdir		lt__readdir
+#  define closedir		lt__closedir
+
+LT_BEGIN_C_DECLS
+
+struct dirent
+{
+  char d_name[LT_FILENAME_MAX];
+  int  d_namlen;
+};
+
+typedef struct
+{
+  HANDLE hSearch;
+  WIN32_FIND_DATA Win32FindData;
+  BOOL firsttime;
+  struct dirent file_info;
+} DIR;
+
+
+LT_SCOPE DIR *		opendir		(const char *path);
+LT_SCOPE struct dirent *readdir		(DIR *entry);
+LT_SCOPE void		closedir	(DIR *entry);
+
+LT_END_C_DECLS
+
+#else /* !defined(__WINDOWS__)*/
+ERROR - cannot find dirent
+#endif /*!defined(__WINDOWS__)*/
+
+#endif /*!defined(LT__DIRENT_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__glibc.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__glibc.h
new file mode 100644
index 0000000..f284773
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__glibc.h
@@ -0,0 +1,83 @@
+/* lt__glibc.h -- support for non glibc environments
+
+   Copyright (C) 2004, 2006, 2007 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+*/
+
+#if !defined(LT__GLIBC_H)
+#define LT__GLIBC_H 1
+
+#if defined(LT_CONFIG_H)
+#  include LT_CONFIG_H
+#else
+#  include <config.h>
+#endif
+
+#if !defined(HAVE_ARGZ_H) || !defined(HAVE_WORKING_ARGZ)
+/* Redefine any glibc symbols we reimplement to import the
+   implementations into our lt__ namespace so we don't ever
+   clash with the system library if our clients use argz_*
+   from there in addition to libltdl.  */
+#  undef  argz_append
+#  define argz_append		lt__argz_append
+#  undef  argz_create_sep
+#  define argz_create_sep	lt__argz_create_sep
+#  undef  argz_insert
+#  define argz_insert		lt__argz_insert
+#  undef  argz_next
+#  define argz_next		lt__argz_next
+#  undef  argz_stringify
+#  define argz_stringify	lt__argz_stringify
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <argz.h>
+
+#ifdef __cplusplus
+}
+#endif
+
+# define slist_concat	lt__slist_concat
+# define slist_cons	lt__slist_cons
+# define slist_delete	lt__slist_delete
+# define slist_remove	lt__slist_remove
+# define slist_reverse	lt__slist_reverse
+# define slist_sort	lt__slist_sort
+# define slist_tail	lt__slist_tail
+# define slist_nth	lt__slist_nth
+# define slist_find	lt__slist_find
+# define slist_length	lt__slist_length
+# define slist_foreach	lt__slist_foreach
+# define slist_box	lt__slist_box
+# define slist_unbox	lt__slist_unbox
+
+#include <slist.h>
+
+#endif /*!defined(LT__GLIBC_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__private.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__private.h
new file mode 100644
index 0000000..f4c4a3d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__private.h
@@ -0,0 +1,149 @@
+/* lt__private.h -- internal apis for libltdl
+
+   Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy con be downloaded from http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+*/
+
+#if !defined(LT__PRIVATE_H)
+#define LT__PRIVATE_H 1
+
+#if defined(LT_CONFIG_H)
+#  include LT_CONFIG_H
+#else
+#  include <config.h>
+#endif
+
+#include <stdio.h>
+#include <ctype.h>
+#include <assert.h>
+#include <errno.h>
+#include <string.h>
+
+#if defined(HAVE_UNISTD_H)
+#  include <unistd.h>
+#endif
+
+/* Import internal interfaces...  */
+#include "lt__alloc.h"
+#include "lt__dirent.h"
+#include "lt__strl.h"
+#include "lt__glibc.h"
+
+/* ...and all exported interfaces.  */
+#include "ltdl.h"
+
+#if defined(WITH_DMALLOC)
+#  include <dmalloc.h>
+#endif
+
+/* DLL building support on win32 hosts;  mostly to workaround their
+   ridiculous implementation of data symbol exporting. */
+#ifndef LT_GLOBAL_DATA
+# if defined(__WINDOWS__) || defined(__CYGWIN__)
+#  if defined(DLL_EXPORT)	/* defined by libtool (if required) */
+#   define LT_GLOBAL_DATA	__declspec(dllexport)
+#  endif
+# endif
+# ifndef LT_GLOBAL_DATA
+#  define LT_GLOBAL_DATA	/* static linking or !__WINDOWS__ */
+# endif
+#endif
+
+#ifndef __attribute__
+# if __GNUC__ < 2 || (__GNUC__ == 2 && __GNUC_MINOR__ < 8) || __STRICT_ANSI__
+#  define __attribute__(x)
+# endif
+#endif
+
+#ifndef LT__UNUSED
+# define LT__UNUSED __attribute__ ((__unused__))
+#endif
+
+
+LT_BEGIN_C_DECLS
+
+#if !defined(errno)
+extern int errno;
+#endif
+
+LT_SCOPE void	lt__alloc_die_callback (void);
+
+
+/* For readability:  */
+#define strneq(s1, s2)	(strcmp((s1), (s2)) != 0)
+#define streq(s1, s2)	(!strcmp((s1), (s2)))
+
+
+
+/* --- OPAQUE STRUCTURES DECLARED IN LTDL.H --- */
+
+/* This type is used for the array of interface data sets in each handler. */
+typedef struct {
+  lt_dlinterface_id	key;
+  void *		data;
+} lt_interface_data;
+
+struct lt__handle {
+  lt_dlhandle		next;
+  const lt_dlvtable *	vtable;		/* dlopening interface */
+  lt_dlinfo		info;		/* user visible fields */
+  int			depcount;	/* number of dependencies */
+  lt_dlhandle *		deplibs;	/* dependencies */
+  lt_module		module;		/* system module handle */
+  void *		system;		/* system specific data */
+  lt_interface_data *	interface_data;	/* per caller associated data */
+  int			flags;		/* various boolean stats */
+};
+
+struct lt__advise {
+  unsigned int	try_ext:1;	/* try system library extensions.  */
+  unsigned int	is_resident:1;	/* module can't be unloaded. */
+  unsigned int	is_symglobal:1;	/* module symbols can satisfy
+				   subsequently loaded modules.  */
+  unsigned int	is_symlocal:1;	/* module symbols are only available
+				   locally. */
+  unsigned int	try_preload_only:1;/* only preloaded modules will be tried. */
+};
+
+/* --- ERROR HANDLING --- */
+
+/* Extract the diagnostic strings from the error table macro in the same
+   order as the enumerated indices in lt_error.h. */
+
+#define LT__STRERROR(name)	lt__error_string(LT_CONC(LT_ERROR_,name))
+
+#define LT__GETERROR(lvalue)	      (lvalue) = lt__get_last_error()
+#define LT__SETERRORSTR(errormsg)     lt__set_last_error(errormsg)
+#define LT__SETERROR(errorcode)	      LT__SETERRORSTR(LT__STRERROR(errorcode))
+
+LT_SCOPE const char *lt__error_string	(int errorcode);
+LT_SCOPE const char *lt__get_last_error	(void);
+LT_SCOPE const char *lt__set_last_error	(const char *errormsg);
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT__PRIVATE_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__strl.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__strl.h
new file mode 100644
index 0000000..5799dc8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt__strl.h
@@ -0,0 +1,53 @@
+/* lt__strl.h -- size-bounded string copying and concatenation
+
+   Copyright (C) 2004, 2006 Free Software Foundation, Inc.
+   Written by Bob Friesenhahn, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if !defined(LT__STRL_H)
+#define LT__STRL_H 1
+
+#if defined(LT_CONFIG_H)
+#  include LT_CONFIG_H
+#else
+#  include <config.h>
+#endif
+
+#include <string.h>
+#include "lt_system.h"
+
+#if !defined(HAVE_STRLCAT)
+#  define strlcat(dst,src,dstsize) lt_strlcat(dst,src,dstsize)
+LT_SCOPE size_t lt_strlcat(char *dst, const char *src, const size_t dstsize);
+#endif /* !defined(HAVE_STRLCAT) */
+
+#if !defined(HAVE_STRLCPY)
+#  define strlcpy(dst,src,dstsize) lt_strlcpy(dst,src,dstsize)
+LT_SCOPE size_t lt_strlcpy(char *dst, const char *src, const size_t dstsize);
+#endif /* !defined(HAVE_STRLCPY) */
+
+#endif /*!defined(LT__STRL_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_dlloader.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_dlloader.h
new file mode 100644
index 0000000..589fd0d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_dlloader.h
@@ -0,0 +1,90 @@
+/* lt_dlloader.h -- dynamic library loader interface
+
+   Copyright (C) 2004, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if !defined(LT_DLLOADER_H)
+#define LT_DLLOADER_H 1
+
+#include <libltdl/lt_system.h>
+
+LT_BEGIN_C_DECLS
+
+typedef	void *	lt_dlloader;
+typedef void *	lt_module;
+typedef void *	lt_user_data;
+typedef struct lt__advise *	lt_dladvise;
+
+/* Function pointer types for module loader vtable entries:  */
+typedef lt_module   lt_module_open	(lt_user_data data,
+					 const char *filename,
+					 lt_dladvise advise);
+typedef int	    lt_module_close	(lt_user_data data,
+					 lt_module module);
+typedef void *	    lt_find_sym		(lt_user_data data, lt_module module,
+					 const char *symbolname);
+typedef int	    lt_dlloader_init	(lt_user_data data);
+typedef int	    lt_dlloader_exit	(lt_user_data data);
+
+/* Default priority is LT_DLLOADER_PREPEND if none is explicitly given.  */
+typedef enum {
+  LT_DLLOADER_PREPEND = 0, LT_DLLOADER_APPEND
+} lt_dlloader_priority;
+
+/* This structure defines a module loader, as populated by the get_vtable
+   entry point of each loader.  */
+typedef struct {
+  const char *		name;
+  const char *		sym_prefix;
+  lt_module_open *	module_open;
+  lt_module_close *	module_close;
+  lt_find_sym *		find_sym;
+  lt_dlloader_init *	dlloader_init;
+  lt_dlloader_exit *	dlloader_exit;
+  lt_user_data		dlloader_data;
+  lt_dlloader_priority	priority;
+} lt_dlvtable;
+
+LT_SCOPE int		lt_dlloader_add	   (const lt_dlvtable *vtable);
+LT_SCOPE lt_dlloader	lt_dlloader_next   (const lt_dlloader loader);
+
+LT_SCOPE lt_dlvtable *	lt_dlloader_remove	(const char *name);
+LT_SCOPE const lt_dlvtable *lt_dlloader_find	(const char *name);
+LT_SCOPE const lt_dlvtable *lt_dlloader_get	(lt_dlloader loader);
+
+
+/* Type of a function to get a loader's vtable:  */
+typedef  const lt_dlvtable *lt_get_vtable	(lt_user_data data);
+
+#ifdef LT_DEBUG_LOADERS
+LT_SCOPE void		lt_dlloader_dump	(void);
+#endif
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT_DLLOADER_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_error.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_error.h
new file mode 100644
index 0000000..e789b3a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_error.h
@@ -0,0 +1,85 @@
+/* lt_error.h -- error propogation interface
+
+   Copyright (C) 1999, 2000, 2001, 2004, 2007 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1999
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+/* Only include this header file once. */
+#if !defined(LT_ERROR_H)
+#define LT_ERROR_H 1
+
+#include <libltdl/lt_system.h>
+
+LT_BEGIN_C_DECLS
+
+/* Defining error strings alongside their symbolic names in a macro in
+   this way allows us to expand the macro in different contexts with
+   confidence that the enumeration of symbolic names will map correctly
+   onto the table of error strings.  \0 is appended to the strings to
+   expilicitely initialize the string terminator. */
+#define lt_dlerror_table						\
+    LT_ERROR(UNKNOWN,		    "unknown error\0")			\
+    LT_ERROR(DLOPEN_NOT_SUPPORTED,  "dlopen support not available\0")	\
+    LT_ERROR(INVALID_LOADER,	    "invalid loader\0")			\
+    LT_ERROR(INIT_LOADER,	    "loader initialization failed\0")	\
+    LT_ERROR(REMOVE_LOADER,	    "loader removal failed\0")		\
+    LT_ERROR(FILE_NOT_FOUND,	    "file not found\0")			\
+    LT_ERROR(DEPLIB_NOT_FOUND,	    "dependency library not found\0")	\
+    LT_ERROR(NO_SYMBOLS,	    "no symbols defined\0")		\
+    LT_ERROR(CANNOT_OPEN,	    "can't open the module\0")		\
+    LT_ERROR(CANNOT_CLOSE,	    "can't close the module\0")		\
+    LT_ERROR(SYMBOL_NOT_FOUND,	    "symbol not found\0")		\
+    LT_ERROR(NO_MEMORY,		    "not enough memory\0")		\
+    LT_ERROR(INVALID_HANDLE,	    "invalid module handle\0")		\
+    LT_ERROR(BUFFER_OVERFLOW,	    "internal buffer overflow\0")	\
+    LT_ERROR(INVALID_ERRORCODE,	    "invalid errorcode\0")		\
+    LT_ERROR(SHUTDOWN,		    "library already shutdown\0")	\
+    LT_ERROR(CLOSE_RESIDENT_MODULE, "can't close resident module\0")	\
+    LT_ERROR(INVALID_MUTEX_ARGS,    "internal error (code withdrawn)\0")\
+    LT_ERROR(INVALID_POSITION,	    "invalid search path insert position\0")\
+    LT_ERROR(CONFLICTING_FLAGS,	    "symbol visibility can be global or local\0")
+
+/* Enumerate the symbolic error names. */
+enum {
+#define LT_ERROR(name, diagnostic)	LT_CONC(LT_ERROR_, name),
+	lt_dlerror_table
+#undef LT_ERROR
+
+	LT_ERROR_MAX
+};
+
+/* Should be max of the error string lengths above (plus one for C++) */
+#define LT_ERROR_LEN_MAX (41)
+
+/* These functions are only useful from inside custom module loaders. */
+LT_SCOPE int	lt_dladderror	(const char *diagnostic);
+LT_SCOPE int	lt_dlseterror	(int errorcode);
+
+
+LT_END_C_DECLS
+
+#endif /*!defined(LT_ERROR_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_system.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_system.h
new file mode 100644
index 0000000..f1545ce
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/lt_system.h
@@ -0,0 +1,166 @@
+/* lt_system.h -- system portability abstraction layer
+
+   Copyright (C) 2004, 2007, 2010 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#if !defined(LT_SYSTEM_H)
+#define LT_SYSTEM_H 1
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <sys/types.h>
+
+/* Some systems do not define EXIT_*, even with STDC_HEADERS.  */
+#if !defined(EXIT_SUCCESS)
+# define EXIT_SUCCESS 0
+#endif
+#if !defined(EXIT_FAILURE)
+# define EXIT_FAILURE 1
+#endif
+
+/* Just pick a big number... */
+#define LT_FILENAME_MAX 2048
+
+
+/* Saves on those hard to debug '\0' typos....  */
+#define LT_EOS_CHAR	'\0'
+
+/* LTDL_BEGIN_C_DECLS should be used at the beginning of your declarations,
+   so that C++ compilers don't mangle their names.  Use LTDL_END_C_DECLS at
+   the end of C declarations. */
+#if defined(__cplusplus)
+# define LT_BEGIN_C_DECLS	extern "C" {
+# define LT_END_C_DECLS		}
+#else
+# define LT_BEGIN_C_DECLS	/* empty */
+# define LT_END_C_DECLS		/* empty */
+#endif
+
+/* LT_STMT_START/END are used to create macros which expand to a
+   a single compound statement in a portable way.  */
+#if defined (__GNUC__) && !defined (__STRICT_ANSI__) && !defined (__cplusplus)
+#  define LT_STMT_START        (void)(
+#  define LT_STMT_END          )
+#else
+#  if (defined (sun) || defined (__sun__))
+#    define LT_STMT_START      if (1)
+#    define LT_STMT_END        else (void)0
+#  else
+#    define LT_STMT_START      do
+#    define LT_STMT_END        while (0)
+#  endif
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* Canonicalise Windows and Cygwin recognition macros.
+   To match the values set by recent Cygwin compilers, make sure that if
+   __CYGWIN__ is defined (after canonicalisation), __WINDOWS__ is NOT!  */
+#if defined(__CYGWIN32__) && !defined(__CYGWIN__)
+# define __CYGWIN__ __CYGWIN32__
+#endif
+#if defined(__CYGWIN__)
+# if defined(__WINDOWS__)
+#   undef __WINDOWS__
+# endif
+#elif defined(_WIN32)
+# define __WINDOWS__ _WIN32
+#elif defined(WIN32)
+# define __WINDOWS__ WIN32
+#endif
+#if defined(__CYGWIN__) && defined(__WINDOWS__)
+# undef __WINDOWS__
+#endif
+
+
+/* DLL building support on win32 hosts;  mostly to workaround their
+   ridiculous implementation of data symbol exporting. */
+#if !defined(LT_SCOPE)
+#  if defined(__WINDOWS__) || defined(__CYGWIN__)
+#    if defined(DLL_EXPORT)		/* defined by libtool (if required) */
+#      define LT_SCOPE	extern __declspec(dllexport)
+#    endif
+#    if defined(LIBLTDL_DLL_IMPORT)	/* define if linking with this dll */
+       /* note: cygwin/mingw compilers can rely instead on auto-import */
+#      define LT_SCOPE	extern __declspec(dllimport)
+#    endif
+#  endif
+#  if !defined(LT_SCOPE)		/* static linking or !__WINDOWS__ */
+#    define LT_SCOPE	extern
+#  endif
+#endif
+
+#if defined(__WINDOWS__)
+/* LT_DIRSEP_CHAR is accepted *in addition* to '/' as a directory
+   separator when it is set. */
+# define LT_DIRSEP_CHAR		'\\'
+# define LT_PATHSEP_CHAR	';'
+#else
+# define LT_PATHSEP_CHAR	':'
+#endif
+
+#if defined(_MSC_VER) /* Visual Studio */
+#  define R_OK 4
+#endif
+
+/* fopen() mode flags for reading a text file */
+#undef	LT_READTEXT_MODE
+#if defined(__WINDOWS__) || defined(__CYGWIN__)
+#  define LT_READTEXT_MODE "rt"
+#else
+#  define LT_READTEXT_MODE "r"
+#endif
+
+/* The extra indirection to the LT__STR and LT__CONC macros is required so
+   that if the arguments to LT_STR() (or LT_CONC()) are themselves macros,
+   they will be expanded before being quoted.   */
+#ifndef LT_STR
+#  define LT__STR(arg)		#arg
+#  define LT_STR(arg)		LT__STR(arg)
+#endif
+
+#ifndef LT_CONC
+#  define LT__CONC(a, b)	a##b
+#  define LT_CONC(a, b)		LT__CONC(a, b)
+#endif
+#ifndef LT_CONC3
+#  define LT__CONC3(a, b, c)	a##b##c
+#  define LT_CONC3(a, b, c)	LT__CONC3(a, b, c)
+#endif
+
+#endif /*!defined(LT_SYSTEM_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/slist.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/slist.h
new file mode 100644
index 0000000..4d56509
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/libltdl/slist.h
@@ -0,0 +1,96 @@
+/* slist.h -- generalised singly linked lists
+
+   Copyright (C) 2000, 2004, 2009 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2000
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+/* A generalised list.  This is deliberately transparent so that you
+   can make the NEXT field of all your chained data structures first,
+   and then cast them to `(SList *)' so that they can be manipulated
+   by this API.
+
+   Alternatively, you can generate raw SList elements using slist_new(),
+   and put the element data in the USERDATA field.  Either way you
+   get to manage the memory involved by yourself.
+*/
+
+#if !defined(SLIST_H)
+#define SLIST_H 1
+
+#if defined(LTDL)
+#  include <libltdl/lt__glibc.h>
+#  include <libltdl/lt_system.h>
+#else
+#  define LT_SCOPE
+#endif
+
+#include <stddef.h>
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+typedef struct slist {
+  struct slist *next;		/* chain forward pointer*/
+  const void *userdata;		/* for boxed `SList' item */
+} SList;
+
+typedef void *	SListCallback	(SList *item, void *userdata);
+typedef int	SListCompare	(const SList *item1, const SList *item2,
+				 void *userdata);
+
+LT_SCOPE SList *slist_concat	(SList *head, SList *tail);
+LT_SCOPE SList *slist_cons	(SList *item, SList *slist);
+
+LT_SCOPE SList *slist_delete	(SList *slist, void (*delete_fct) (void *item));
+LT_SCOPE SList *slist_remove	(SList **phead, SListCallback *find,
+				 void *matchdata);
+LT_SCOPE SList *slist_reverse	(SList *slist);
+LT_SCOPE SList *slist_sort	(SList *slist, SListCompare *compare,
+				 void *userdata);
+
+LT_SCOPE SList *slist_tail	(SList *slist);
+LT_SCOPE SList *slist_nth	(SList *slist, size_t n);
+LT_SCOPE void *	slist_find	(SList *slist, SListCallback *find,
+				 void *matchdata);
+LT_SCOPE size_t slist_length	(SList *slist);
+
+LT_SCOPE void *	slist_foreach   (SList *slist, SListCallback *foreach,
+				 void *userdata);
+
+LT_SCOPE SList *slist_box	(const void *userdata);
+LT_SCOPE void *	slist_unbox	(SList *item);
+
+#if defined(__cplusplus)
+}
+#endif
+
+#if !defined(LTDL)
+#  undef LT_SCOPE
+#endif
+
+#endif /*!defined(SLIST_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dld_link.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dld_link.c
new file mode 100644
index 0000000..7e882c9
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dld_link.c
@@ -0,0 +1,158 @@
+/* loader-dld_link.c -- dynamic linking with dld
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	dld_link_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_dld_link";
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_APPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#if defined(HAVE_DLD_H)
+#  include <dld.h>
+#endif
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  return 0;
+}
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  lt_module module = lt__strdup (filename);
+
+  if (dld_link (filename) != 0)
+    {
+      LT__SETERROR (CANNOT_OPEN);
+      FREE (module);
+    }
+
+  return module;
+}
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (dld_unlink_by_file ((char*)(module), 1) != 0)
+    {
+      LT__SETERROR (CANNOT_CLOSE);
+      ++errors;
+    }
+  else
+    {
+      FREE (module);
+    }
+
+  return errors;
+}
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module LT__UNUSED module,
+	const char *name)
+{
+  void *address = dld_get_func (name);
+
+  if (!address)
+    {
+      LT__SETERROR (SYMBOL_NOT_FOUND);
+    }
+
+  return address;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dlopen.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dlopen.c
new file mode 100644
index 0000000..1d052b4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dlopen.c
@@ -0,0 +1,235 @@
+/* loader-dlopen.c --  dynamic linking with dlopen/dlsym
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	dlopen_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = (lt_dlvtable *) lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_dlopen";
+#if defined(DLSYM_USCORE)
+      vtable->sym_prefix	= "_";
+#endif
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_PREPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#if defined(HAVE_DLFCN_H)
+#  include <dlfcn.h>
+#endif
+
+#if defined(HAVE_SYS_DL_H)
+#  include <sys/dl.h>
+#endif
+
+
+/* We may have to define LT_LAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#if !defined(LT_LAZY_OR_NOW)
+#  if defined(RTLD_LAZY)
+#    define LT_LAZY_OR_NOW	RTLD_LAZY
+#  else
+#    if defined(DL_LAZY)
+#      define LT_LAZY_OR_NOW	DL_LAZY
+#    endif
+#  endif /* !RTLD_LAZY */
+#endif
+#if !defined(LT_LAZY_OR_NOW)
+#  if defined(RTLD_NOW)
+#    define LT_LAZY_OR_NOW	RTLD_NOW
+#  else
+#    if defined(DL_NOW)
+#      define LT_LAZY_OR_NOW	DL_NOW
+#    endif
+#  endif /* !RTLD_NOW */
+#endif
+#if !defined(LT_LAZY_OR_NOW)
+#  define LT_LAZY_OR_NOW	0
+#endif /* !LT_LAZY_OR_NOW */
+
+/* We only support local and global symbols from modules for loaders
+   that provide such a thing, otherwise the system default is used.  */
+#if !defined(RTLD_GLOBAL)
+#  if defined(DL_GLOBAL)
+#    define RTLD_GLOBAL		DL_GLOBAL
+#  endif
+#endif /* !RTLD_GLOBAL */
+#if !defined(RTLD_LOCAL)
+#  if defined(DL_LOCAL)
+#    define RTLD_LOCAL		DL_LOCAL
+#  endif
+#endif /* !RTLD_LOCAL */
+
+#if defined(HAVE_DLERROR)
+#  define DLERROR(arg)	dlerror ()
+#else
+#  define DLERROR(arg)	LT__STRERROR (arg)
+#endif
+
+#define DL__SETERROR(errorcode) \
+	LT__SETERRORSTR (DLERROR (errorcode))
+
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  return 0;
+}
+
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise advise)
+{
+  int		module_flags = LT_LAZY_OR_NOW;
+  lt_module	module;
+
+  if (advise)
+    {
+#ifdef RTLD_GLOBAL
+      /* If there is some means of asking for global symbol resolution,
+         do so.  */
+      if (advise->is_symglobal)
+        module_flags |= RTLD_GLOBAL;
+#else
+      /* Otherwise, reset that bit so the caller can tell it wasn't
+         acted on.  */
+      advise->is_symglobal = 0;
+#endif
+
+/* And similarly for local only symbol resolution.  */
+#ifdef RTLD_LOCAL
+      if (advise->is_symlocal)
+        module_flags |= RTLD_LOCAL;
+#else
+      advise->is_symlocal = 0;
+#endif
+    }
+
+  module = dlopen (filename, module_flags);
+
+  if (!module)
+    {
+      DL__SETERROR (CANNOT_OPEN);
+    }
+
+  return module;
+}
+
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (dlclose (module) != 0)
+    {
+      DL__SETERROR (CANNOT_CLOSE);
+      ++errors;
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module module, const char *name)
+{
+  void *address = dlsym (module, name);
+
+  if (!address)
+    {
+      DL__SETERROR (SYMBOL_NOT_FOUND);
+    }
+
+  return address;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dyld.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dyld.c
new file mode 100644
index 0000000..b139d6c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/dyld.c
@@ -0,0 +1,511 @@
+/* loader-dyld.c -- dynamic linking on darwin and OS X
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Peter O'Gorman, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	dyld_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_init  (lt_user_data loader_data);
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_dyld";
+      vtable->sym_prefix	= "_";
+      vtable->dlloader_init	= vl_init;
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_APPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#if defined(HAVE_MACH_O_DYLD_H)
+#  if !defined(__APPLE_CC__) && !defined(__MWERKS__) && !defined(__private_extern__)
+  /* Is this correct? Does it still function properly? */
+#    define __private_extern__ extern
+#  endif
+#  include <mach-o/dyld.h>
+#endif
+
+#include <mach-o/getsect.h>
+
+/* We have to put some stuff here that isn't in older dyld.h files */
+#if !defined(ENUM_DYLD_BOOL)
+# define ENUM_DYLD_BOOL
+# undef FALSE
+# undef TRUE
+ enum DYLD_BOOL {
+    FALSE,
+    TRUE
+ };
+#endif
+#if !defined(LC_REQ_DYLD)
+# define LC_REQ_DYLD 0x80000000
+#endif
+#if !defined(LC_LOAD_WEAK_DYLIB)
+# define LC_LOAD_WEAK_DYLIB (0x18 | LC_REQ_DYLD)
+#endif
+
+#if !defined(NSADDIMAGE_OPTION_NONE)
+#  define NSADDIMAGE_OPTION_NONE                          0x0
+#endif
+#if !defined(NSADDIMAGE_OPTION_RETURN_ON_ERROR)
+#  define NSADDIMAGE_OPTION_RETURN_ON_ERROR               0x1
+#endif
+#if !defined(NSADDIMAGE_OPTION_WITH_SEARCHING)
+#  define NSADDIMAGE_OPTION_WITH_SEARCHING                0x2
+#endif
+#if !defined(NSADDIMAGE_OPTION_RETURN_ONLY_IF_LOADED)
+#  define NSADDIMAGE_OPTION_RETURN_ONLY_IF_LOADED         0x4
+#endif
+#if !defined(NSADDIMAGE_OPTION_MATCH_FILENAME_BY_INSTALLNAME)
+#  define NSADDIMAGE_OPTION_MATCH_FILENAME_BY_INSTALLNAME 0x8
+#endif
+
+#if !defined(NSLOOKUPSYMBOLINIMAGE_OPTION_BIND)
+#  define NSLOOKUPSYMBOLINIMAGE_OPTION_BIND               0x0
+#endif
+#if !defined(NSLOOKUPSYMBOLINIMAGE_OPTION_BIND_NOW)
+#  define NSLOOKUPSYMBOLINIMAGE_OPTION_BIND_NOW           0x1
+#endif
+#if !defined(NSLOOKUPSYMBOLINIMAGE_OPTION_BIND_FULLY)
+#  define NSLOOKUPSYMBOLINIMAGE_OPTION_BIND_FULLY         0x2
+#endif
+#if !defined(NSLOOKUPSYMBOLINIMAGE_OPTION_RETURN_ON_ERROR)
+#  define NSLOOKUPSYMBOLINIMAGE_OPTION_RETURN_ON_ERROR    0x4
+#endif
+
+#define LT__SYMLOOKUP_OPTS	(NSLOOKUPSYMBOLINIMAGE_OPTION_BIND_NOW \
+				| NSLOOKUPSYMBOLINIMAGE_OPTION_RETURN_ON_ERROR)
+
+#if defined(__BIG_ENDIAN__)
+#  define LT__MAGIC	MH_MAGIC
+#else
+#  define LT__MAGIC	MH_CIGAM
+#endif
+
+#define DYLD__SETMYERROR(errmsg)    LT__SETERRORSTR (dylderror (errmsg))
+#define DYLD__SETERROR(errcode)	    DYLD__SETMYERROR (LT__STRERROR (errcode))
+
+typedef struct mach_header mach_header;
+typedef struct dylib_command dylib_command;
+
+static const char *dylderror (const char *errmsg);
+static const mach_header *lt__nsmodule_get_header (NSModule module);
+static const char *lt__header_get_instnam (const mach_header *mh);
+static const mach_header *lt__match_loadedlib (const char *name);
+static NSSymbol lt__linkedlib_symbol (const char *symname, const mach_header *mh);
+
+static const mach_header *(*lt__addimage)	(const char *image_name,
+						 unsigned long options) = 0;
+static NSSymbol	(*lt__image_symbol)		(const mach_header *image,
+						 const char *symbolName,
+						 unsigned long options) = 0;
+static enum DYLD_BOOL (*lt__image_symbol_p)	(const mach_header *image,
+						 const char *symbolName) = 0;
+static enum DYLD_BOOL (*lt__module_export)	(NSModule module) = 0;
+
+static int dyld_cannot_close				  = 0;
+
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  return 0;
+}
+
+/* A function called through the vtable to initialise this loader.  */
+static int
+vl_init (lt_user_data loader_data)
+{
+  int errors = 0;
+
+  if (! dyld_cannot_close)
+    {
+      if (!_dyld_present ())
+	{
+	  ++errors;
+	}
+      else
+	{
+	  (void) _dyld_func_lookup ("__dyld_NSAddImage",
+				    (unsigned long*) &lt__addimage);
+	  (void) _dyld_func_lookup ("__dyld_NSLookupSymbolInImage",
+				    (unsigned long*)&lt__image_symbol);
+	  (void) _dyld_func_lookup ("__dyld_NSIsSymbolNameDefinedInImage",
+				    (unsigned long*) &lt__image_symbol_p);
+	  (void) _dyld_func_lookup ("__dyld_NSMakePrivateModulePublic",
+				    (unsigned long*) &lt__module_export);
+	  dyld_cannot_close = lt_dladderror ("can't close a dylib");
+	}
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  lt_module module = 0;
+  NSObjectFileImage ofi = 0;
+
+  if (!filename)
+    {
+      return (lt_module) -1;
+    }
+
+  switch (NSCreateObjectFileImageFromFile (filename, &ofi))
+    {
+    case NSObjectFileImageSuccess:
+      module = NSLinkModule (ofi, filename, NSLINKMODULE_OPTION_RETURN_ON_ERROR
+			     		    | NSLINKMODULE_OPTION_PRIVATE
+			     		    | NSLINKMODULE_OPTION_BINDNOW);
+      NSDestroyObjectFileImage (ofi);
+
+      if (module)
+	{
+	  lt__module_export (module);
+	}
+      break;
+
+    case NSObjectFileImageInappropriateFile:
+      if (lt__image_symbol_p && lt__image_symbol)
+	{
+	  module = (lt_module) lt__addimage(filename,
+					    NSADDIMAGE_OPTION_RETURN_ON_ERROR);
+	}
+      break;
+
+    case NSObjectFileImageFailure:
+    case NSObjectFileImageArch:
+    case NSObjectFileImageFormat:
+    case NSObjectFileImageAccess:
+      /*NOWORK*/
+      break;
+    }
+
+  if (!module)
+    {
+      DYLD__SETERROR (CANNOT_OPEN);
+    }
+
+  return module;
+}
+
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (module != (lt_module) -1)
+    {
+      const mach_header *mh = (const mach_header *) module;
+      int flags = 0;
+      if (mh->magic == LT__MAGIC)
+	{
+	  lt_dlseterror (dyld_cannot_close);
+	  ++errors;
+	}
+      else
+	{
+	  /* Currently, if a module contains c++ static destructors and it
+	     is unloaded, we get a segfault in atexit(), due to compiler and
+	     dynamic loader differences of opinion, this works around that.  */
+	  if ((const struct section *) NULL !=
+	      getsectbynamefromheader (lt__nsmodule_get_header (module),
+				       "__DATA", "__mod_term_func"))
+	    {
+	      flags |= NSUNLINKMODULE_OPTION_KEEP_MEMORY_MAPPED;
+	    }
+#if defined(__ppc__)
+	  flags |= NSUNLINKMODULE_OPTION_RESET_LAZY_REFERENCES;
+#endif
+	  if (!NSUnLinkModule (module, flags))
+	    {
+	      DYLD__SETERROR (CANNOT_CLOSE);
+	      ++errors;
+	    }
+	}
+    }
+
+  return errors;
+}
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data loader_data, lt_module module, const char *name)
+{
+  NSSymbol *nssym = 0;
+  const mach_header *mh = (const mach_header *) module;
+  char saveError[256] = "Symbol not found";
+
+  if (module == (lt_module) -1)
+    {
+      void *address, *unused;
+      _dyld_lookup_and_bind (name, (unsigned long*) &address, &unused);
+      return address;
+    }
+
+  if (mh->magic == LT__MAGIC)
+    {
+      if (lt__image_symbol_p && lt__image_symbol)
+	{
+	  if (lt__image_symbol_p (mh, name))
+	    {
+	      nssym = lt__image_symbol (mh, name, LT__SYMLOOKUP_OPTS);
+	    }
+	}
+
+    }
+  else
+    {
+      nssym = NSLookupSymbolInModule (module, name);
+    }
+
+  if (!nssym)
+    {
+      strncpy (saveError, dylderror (LT__STRERROR (SYMBOL_NOT_FOUND)), 255);
+      saveError[255] = 0;
+      if (!mh)
+	{
+	  mh = (mach_header *)lt__nsmodule_get_header (module);
+	}
+      nssym = lt__linkedlib_symbol (name, mh);
+    }
+
+  if (!nssym)
+    {
+      LT__SETERRORSTR (saveError);
+    }
+
+  return nssym ? NSAddressOfSymbol (nssym) : 0;
+}
+
+
+
+
+/* --- HELPER FUNCTIONS --- */
+
+
+/* Return the dyld error string, or the passed in error string if none. */
+static const char *
+dylderror (const char *errmsg)
+{
+  NSLinkEditErrors ler;
+  int lerno;
+  const char *file;
+  const char *errstr;
+
+  NSLinkEditError (&ler, &lerno, &file, &errstr);
+
+  if (! (errstr && *errstr))
+    {
+      errstr = errmsg;
+    }
+
+  return errstr;
+}
+
+/* There should probably be an apple dyld api for this. */
+static const mach_header *
+lt__nsmodule_get_header (NSModule module)
+{
+  int i = _dyld_image_count();
+  const char *modname = NSNameOfModule (module);
+  const mach_header *mh = 0;
+
+  if (!modname)
+    return NULL;
+
+  while (i > 0)
+    {
+      --i;
+      if (strneq (_dyld_get_image_name (i), modname))
+	{
+	  mh = _dyld_get_image_header (i);
+	  break;
+	}
+    }
+
+  return mh;
+}
+
+/* NSAddImage is also used to get the loaded image, but it only works if
+   the lib is installed, for uninstalled libs we need to check the
+   install_names against each other.  Note that this is still broken if
+   DYLD_IMAGE_SUFFIX is set and a different lib was loaded as a result.  */
+static const char *
+lt__header_get_instnam (const mach_header *mh)
+{
+  unsigned long offset = sizeof(mach_header);
+  const char* result   = 0;
+  int j;
+
+  for (j = 0; j < mh->ncmds; j++)
+    {
+      struct load_command *lc;
+
+      lc = (struct load_command*) (((unsigned long) mh) + offset);
+      if (LC_ID_DYLIB == lc->cmd)
+	{
+	  result=(char*)(((dylib_command*) lc)->dylib.name.offset +
+			 (unsigned long) lc);
+	}
+      offset += lc->cmdsize;
+    }
+
+  return result;
+}
+
+static const mach_header *
+lt__match_loadedlib (const char *name)
+{
+  const mach_header *mh	= 0;
+  int i = _dyld_image_count();
+
+  while (i > 0)
+    {
+      const char *id;
+
+      --i;
+      id = lt__header_get_instnam (_dyld_get_image_header (i));
+      if (id && strneq (id, name))
+	{
+	  mh = _dyld_get_image_header (i);
+	  break;
+	}
+    }
+
+  return mh;
+}
+
+/* Safe to assume our mh is good. */
+static NSSymbol
+lt__linkedlib_symbol (const char *symname, const mach_header *mh)
+{
+  NSSymbol symbol = 0;
+
+  if (lt__image_symbol && NSIsSymbolNameDefined (symname))
+    {
+      unsigned long offset = sizeof(mach_header);
+      struct load_command *lc;
+      int j;
+
+      for (j = 0; j < mh->ncmds; j++)
+	{
+	  lc = (struct load_command*) (((unsigned long) mh) + offset);
+	  if ((LC_LOAD_DYLIB == lc->cmd) || (LC_LOAD_WEAK_DYLIB == lc->cmd))
+	    {
+	      unsigned long base = ((dylib_command *) lc)->dylib.name.offset;
+	      char *name = (char *) (base + (unsigned long) lc);
+	      const mach_header *mh1 = lt__match_loadedlib (name);
+
+	      if (!mh1)
+		{
+		  /* Maybe NSAddImage can find it */
+		  mh1 = lt__addimage (name,
+				      NSADDIMAGE_OPTION_RETURN_ONLY_IF_LOADED
+				      | NSADDIMAGE_OPTION_WITH_SEARCHING
+				      | NSADDIMAGE_OPTION_RETURN_ON_ERROR);
+		}
+
+	      if (mh1)
+		{
+		  symbol = lt__image_symbol (mh1, symname, LT__SYMLOOKUP_OPTS);
+		  if (symbol)
+		    break;
+		}
+	    }
+
+	  offset += lc->cmdsize;
+	}
+    }
+
+  return symbol;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/load_add_on.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/load_add_on.c
new file mode 100644
index 0000000..379f9ba
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/load_add_on.c
@@ -0,0 +1,167 @@
+/* loader-load_add_on.c --  dynamic linking for BeOS
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	load_add_on_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_load_add_on";
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_APPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#include <kernel/image.h>
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  return 0;
+}
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  image_id image = 0;
+
+  if (filename)
+    {
+      image = load_add_on (filename);
+    }
+  else
+    {
+      image_info info;
+      int32 cookie = 0;
+      if (get_next_image_info (0, &cookie, &info) == B_OK)
+	image = load_add_on (info.name);
+    }
+
+  if (image <= 0)
+    {
+      LT__SETERROR (CANNOT_OPEN);
+      image = 0;
+    }
+
+  return (lt_module) image;
+}
+
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (unload_add_on ((image_id) module) != B_OK)
+    {
+      LT__SETERROR (CANNOT_CLOSE);
+      ++errors;
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module module, const char *name)
+{
+  void *address = 0;
+  image_id image = (image_id) module;
+
+  if (get_image_symbol (image, name, B_SYMBOL_TYPE_ANY, address) != B_OK)
+    {
+      LT__SETERROR (SYMBOL_NOT_FOUND);
+      address = 0;
+    }
+
+  return address;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/loadlibrary.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/loadlibrary.c
new file mode 100644
index 0000000..179c009
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/loadlibrary.c
@@ -0,0 +1,369 @@
+/* loader-loadlibrary.c --  dynamic linking for Win32
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2005, 2006,
+                 2007, 2008, 2010 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+#if defined(__CYGWIN__)
+# include <sys/cygwin.h>
+#endif
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	loadlibrary_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlinterface_id iface_id = 0;
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = (lt_dlvtable *) lt__zalloc (sizeof *vtable);
+      iface_id = lt_dlinterface_register ("ltdl loadlibrary", NULL);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_loadlibrary";
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_APPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#include <windows.h>
+
+#define LOCALFREE(mem)					     LT_STMT_START { \
+	if (mem) { LocalFree ((void *)mem); mem = NULL; }    } LT_STMT_END
+#define LOADLIB__SETERROR(errmsg) LT__SETERRORSTR (loadlibraryerror (errmsg))
+#define LOADLIB_SETERROR(errcode) LOADLIB__SETERROR (LT__STRERROR (errcode))
+
+static const char *loadlibraryerror (const char *default_errmsg);
+static DWORD WINAPI wrap_getthreaderrormode (void);
+static DWORD WINAPI fallback_getthreaderrormode (void);
+static BOOL WINAPI wrap_setthreaderrormode (DWORD mode, DWORD *oldmode);
+static BOOL WINAPI fallback_setthreaderrormode (DWORD mode, DWORD *oldmode);
+
+typedef DWORD (WINAPI getthreaderrormode_type) (void);
+typedef BOOL (WINAPI setthreaderrormode_type) (DWORD, DWORD *);
+
+static getthreaderrormode_type *getthreaderrormode = wrap_getthreaderrormode;
+static setthreaderrormode_type *setthreaderrormode = wrap_setthreaderrormode;
+static char *error_message = 0;
+
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  LOCALFREE (error_message);
+  return 0;
+}
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  lt_module	module	   = 0;
+  char		*ext;
+  char		wpath[MAX_PATH];
+  size_t	len;
+
+  if (!filename)
+    {
+      /* Get the name of main module */
+      *wpath = 0;
+      GetModuleFileName (NULL, wpath, sizeof (wpath));
+      filename = wpath;
+    }
+  else
+    {
+      len = LT_STRLEN (filename);
+
+      if (len >= MAX_PATH)
+        {
+	  LT__SETERROR (CANNOT_OPEN);
+	  return 0;
+	}
+
+#if HAVE_DECL_CYGWIN_CONV_PATH
+      if (cygwin_conv_path (CCP_POSIX_TO_WIN_A, filename, wpath, MAX_PATH))
+	{
+	  LT__SETERROR (CANNOT_OPEN);
+	  return 0;
+	}
+      len = 0;
+#elif defined(__CYGWIN__)
+      cygwin_conv_to_full_win32_path (filename, wpath);
+      len = 0;
+#else
+      strcpy(wpath, filename);
+#endif
+
+      ext = strrchr (wpath, '.');
+      if (!ext)
+	{
+	  /* Append a `.' to stop Windows from adding an
+	     implicit `.dll' extension. */
+	  if (!len)
+	    len = strlen (wpath);
+
+	  if (len + 1 >= MAX_PATH)
+	    {
+	      LT__SETERROR (CANNOT_OPEN);
+	      return 0;
+	    }
+
+	  wpath[len] = '.';
+	  wpath[len+1] = '\0';
+	}
+    }
+
+  {
+    /* Silence dialog from LoadLibrary on some failures. */
+    DWORD errormode = getthreaderrormode ();
+    DWORD last_error;
+
+    setthreaderrormode (errormode | SEM_FAILCRITICALERRORS, NULL);
+
+    module = LoadLibrary (wpath);
+
+    /* Restore the error mode. */
+    last_error = GetLastError ();
+    setthreaderrormode (errormode, NULL);
+    SetLastError (last_error);
+  }
+
+  /* libltdl expects this function to fail if it is unable
+     to physically load the library.  Sadly, LoadLibrary
+     will search the loaded libraries for a match and return
+     one of them if the path search load fails.
+
+     We check whether LoadLibrary is returning a handle to
+     an already loaded module, and simulate failure if we
+     find one. */
+  {
+    lt_dlhandle cur = 0;
+
+    while ((cur = lt_dlhandle_iterate (iface_id, cur)))
+      {
+        if (!cur->module)
+          {
+            cur = 0;
+            break;
+          }
+
+        if (cur->module == module)
+          {
+            break;
+          }
+      }
+
+    if (!module)
+      LOADLIB_SETERROR (CANNOT_OPEN);
+    else if (cur)
+      {
+        LT__SETERROR (CANNOT_OPEN);
+        module = 0;
+      }
+  }
+
+  return module;
+}
+
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (FreeLibrary ((HMODULE) module) == 0)
+    {
+      LOADLIB_SETERROR (CANNOT_CLOSE);
+      ++errors;
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module module, const char *name)
+{
+  void *address = (void *) GetProcAddress ((HMODULE) module, name);
+
+  if (!address)
+    {
+      LOADLIB_SETERROR (SYMBOL_NOT_FOUND);
+    }
+
+  return address;
+}
+
+
+
+/* --- HELPER FUNCTIONS --- */
+
+
+/* Return the windows error message, or the passed in error message on
+   failure. */
+static const char *
+loadlibraryerror (const char *default_errmsg)
+{
+  size_t len;
+  LOCALFREE (error_message);
+
+  FormatMessageA (FORMAT_MESSAGE_ALLOCATE_BUFFER |
+                  FORMAT_MESSAGE_FROM_SYSTEM |
+                  FORMAT_MESSAGE_IGNORE_INSERTS,
+                  NULL,
+                  GetLastError (),
+                  0,
+                  (char *) &error_message,
+                  0, NULL);
+
+  /* Remove trailing CRNL */
+  len = LT_STRLEN (error_message);
+  if (len && error_message[len - 1] == '\n')
+    error_message[--len] = LT_EOS_CHAR;
+  if (len && error_message[len - 1] == '\r')
+    error_message[--len] = LT_EOS_CHAR;
+
+  return len ? error_message : default_errmsg;
+}
+
+/* A function called through the getthreaderrormode variable which checks
+   if the system supports GetThreadErrorMode (or GetErrorMode) and arranges
+   for it or a fallback implementation to be called directly in the future.
+   The selected version is then called. */
+static DWORD WINAPI
+wrap_getthreaderrormode (void)
+{
+  HMODULE kernel32 = GetModuleHandleA ("kernel32.dll");
+  getthreaderrormode
+    = (getthreaderrormode_type *) GetProcAddress (kernel32,
+						  "GetThreadErrorMode");
+  if (!getthreaderrormode)
+    getthreaderrormode
+      = (getthreaderrormode_type *) GetProcAddress (kernel32,
+						    "GetErrorMode");
+  if (!getthreaderrormode)
+    getthreaderrormode = fallback_getthreaderrormode;
+  return getthreaderrormode ();
+}
+
+/* A function called through the getthreaderrormode variable for cases
+   where the system does not support GetThreadErrorMode or GetErrorMode */
+static DWORD WINAPI
+fallback_getthreaderrormode (void)
+{
+  /* Prior to Windows Vista, the only way to get the current error
+     mode was to set a new one. In our case, we are setting a new
+     error mode right after "getting" it while ignoring the error
+     mode in effect when setting the new error mode, so that's
+     fairly ok. */
+  return (DWORD) SetErrorMode (SEM_FAILCRITICALERRORS);
+}
+
+/* A function called through the setthreaderrormode variable which checks
+   if the system supports SetThreadErrorMode and arranges for it or a
+   fallback implementation to be called directly in the future.
+   The selected version is then called. */
+static BOOL WINAPI
+wrap_setthreaderrormode (DWORD mode, DWORD *oldmode)
+{
+  HMODULE kernel32 = GetModuleHandleA ("kernel32.dll");
+  setthreaderrormode
+    = (setthreaderrormode_type *) GetProcAddress (kernel32,
+						  "SetThreadErrorMode");
+  if (!setthreaderrormode)
+    setthreaderrormode = fallback_setthreaderrormode;
+  return setthreaderrormode (mode, oldmode);
+}
+
+/* A function called through the setthreaderrormode variable for cases
+   where the system does not support SetThreadErrorMode. */
+static BOOL WINAPI
+fallback_setthreaderrormode (DWORD mode, DWORD *oldmode)
+{
+  /* Prior to Windows 7, there was no way to set the thread local error
+     mode, so set the process global error mode instead. */
+  DWORD old = (DWORD) SetErrorMode (mode);
+  if (oldmode)
+    *oldmode = old;
+  return TRUE;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/preopen.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/preopen.c
new file mode 100644
index 0000000..7149287
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/preopen.c
@@ -0,0 +1,375 @@
+/* loader-preopen.c -- emulate dynamic linking using preloaded_symbols
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	preopen_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_init  (lt_user_data loader_data);
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = (lt_dlvtable *) lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_preopen";
+      vtable->sym_prefix	= 0;
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_init	= vl_init;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_PREPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+/* Wrapper type to chain together symbol lists of various origins.  */
+typedef struct symlist_chain
+{
+  struct symlist_chain *next;
+  const lt_dlsymlist   *symlist;
+} symlist_chain;
+
+
+static int add_symlist   (const lt_dlsymlist *symlist);
+static int free_symlists (void);
+
+/* The start of the symbol lists chain.  */
+static symlist_chain	       *preloaded_symlists		= 0;
+
+/* A symbol list preloaded before lt_init() was called.  */
+static const	lt_dlsymlist   *default_preloaded_symbols	= 0;
+
+
+/* A function called through the vtable to initialise this loader.  */
+static int
+vl_init (lt_user_data LT__UNUSED loader_data)
+{
+  int errors = 0;
+
+  preloaded_symlists = 0;
+  if (default_preloaded_symbols)
+    {
+      errors = lt_dlpreload (default_preloaded_symbols);
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  free_symlists ();
+  return 0;
+}
+
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  symlist_chain *lists;
+  lt_module	 module = 0;
+
+  if (!preloaded_symlists)
+    {
+      LT__SETERROR (NO_SYMBOLS);
+      goto done;
+    }
+
+  /* Can't use NULL as the reflective symbol header, as NULL is
+     used to mark the end of the entire symbol list.  Self-dlpreopened
+     symbols follow this magic number, chosen to be an unlikely
+     clash with a real module name.  */
+  if (!filename)
+    {
+      filename = "@PROGRAM@";
+    }
+
+  for (lists = preloaded_symlists; lists; lists = lists->next)
+    {
+      const lt_dlsymlist *symbol;
+      for (symbol= lists->symlist; symbol->name; ++symbol)
+	{
+	  if (!symbol->address && streq (symbol->name, filename))
+	    {
+	      /* If the next symbol's name and address is 0, it means
+		 the module just contains the originator and no symbols.
+		 In this case we pretend that we never saw the module and
+	         hope that some other loader will be able to load the module
+	         and have access to its symbols */
+	      const lt_dlsymlist *next_symbol = symbol +1;
+	      if (next_symbol->address && next_symbol->name)
+		{
+	          module = (lt_module) lists->symlist;
+	          goto done;
+		}
+	    }
+	}
+    }
+
+  LT__SETERROR (FILE_NOT_FOUND);
+
+ done:
+  return module;
+}
+
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module LT__UNUSED module)
+{
+  /* Just to silence gcc -Wall */
+  module = 0;
+  return 0;
+}
+
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module module, const char *name)
+{
+  lt_dlsymlist	       *symbol = (lt_dlsymlist*) module;
+
+  symbol +=2;			/* Skip header (originator then libname). */
+
+  while (symbol->name)
+    {
+      if (streq (symbol->name, name))
+	{
+	  return symbol->address;
+	}
+
+    ++symbol;
+  }
+
+  LT__SETERROR (SYMBOL_NOT_FOUND);
+
+  return 0;
+}
+
+
+
+/* --- HELPER FUNCTIONS --- */
+
+
+/* The symbol lists themselves are not allocated from the heap, but
+   we can unhook them and free up the chain of links between them.  */
+static int
+free_symlists (void)
+{
+  symlist_chain *lists;
+
+  lists = preloaded_symlists;
+  while (lists)
+    {
+      symlist_chain *next = lists->next;
+      FREE (lists);
+      lists = next;
+    }
+  preloaded_symlists = 0;
+
+  return 0;
+}
+
+/* Add a new symbol list to the global chain.  */
+static int
+add_symlist (const lt_dlsymlist *symlist)
+{
+  symlist_chain *lists;
+  int		 errors   = 0;
+
+  /* Search for duplicate entries:  */
+  for (lists = preloaded_symlists;
+       lists && lists->symlist != symlist; lists = lists->next)
+    /*NOWORK*/;
+
+  /* Don't add the same list twice:  */
+  if (!lists)
+    {
+      symlist_chain *tmp = (symlist_chain *) lt__zalloc (sizeof *tmp);
+
+      if (tmp)
+	{
+	  tmp->symlist = symlist;
+	  tmp->next = preloaded_symlists;
+	  preloaded_symlists = tmp;
+	}
+      else
+	{
+	  ++errors;
+	}
+    }
+
+  return errors;
+}
+
+
+
+/* --- PRELOADING API CALL IMPLEMENTATIONS --- */
+
+
+/* Save a default symbol list for later.  */
+int
+lt_dlpreload_default (const lt_dlsymlist *preloaded)
+{
+  default_preloaded_symbols = preloaded;
+  return 0;
+}
+
+
+/* Add a symbol list to the global chain, or with a NULL argument,
+   revert to just the default list.  */
+int
+lt_dlpreload (const lt_dlsymlist *preloaded)
+{
+  int errors = 0;
+
+  if (preloaded)
+    {
+      errors = add_symlist (preloaded);
+    }
+  else
+    {
+      free_symlists();
+
+      if (default_preloaded_symbols)
+	{
+	  errors = lt_dlpreload (default_preloaded_symbols);
+	}
+    }
+
+  return errors;
+}
+
+
+/* Open all the preloaded modules from the named originator, executing
+   a callback for each one.  If ORIGINATOR is NULL, then call FUNC for
+   each preloaded module from the program itself.  */
+int
+lt_dlpreload_open (const char *originator, lt_dlpreload_callback_func *func)
+{
+  symlist_chain *list;
+  int		 errors = 0;
+  int		 found  = 0;
+
+  /* For each symlist in the chain...  */
+  for (list = preloaded_symlists; list; list = list->next)
+    {
+      /* ...that was preloaded by the requesting ORIGINATOR... */
+      if ((originator && streq (list->symlist->name, originator))
+          || (!originator && streq (list->symlist->name, "@PROGRAM@")))
+	{
+	  const lt_dlsymlist *symbol;
+	  unsigned int idx = 0;
+
+	  ++found;
+
+	  /* ...load the symbols per source compilation unit:
+	     (we preincrement the index to skip over the originator entry)  */
+	  while ((symbol = &list->symlist[++idx])->name != 0)
+	    {
+	      if ((symbol->address == 0)
+		  && (strneq (symbol->name, "@PROGRAM@")))
+		{
+		  lt_dlhandle handle = lt_dlopen (symbol->name);
+		  if (handle == 0)
+		    {
+		      ++errors;
+		    }
+		  else
+		    {
+		      errors += (*func) (handle);
+		    }
+		}
+	    }
+	}
+    }
+
+  if (!found)
+    {
+      LT__SETERROR(CANNOT_OPEN);
+      ++errors;
+    }
+
+  return errors;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/shl_load.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/shl_load.c
new file mode 100644
index 0000000..5a09d87
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/loaders/shl_load.c
@@ -0,0 +1,222 @@
+/* loader-shl_load.c --  dynamic linking with shl_load (HP-UX)
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2006,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+/* Use the preprocessor to rename non-static symbols to avoid namespace
+   collisions when the loader code is statically linked into libltdl.
+   Use the "<module_name>_LTX_" prefix so that the symbol addresses can
+   be fetched from the preloaded symbol list by lt_dlsym():  */
+#define get_vtable	shl_load_LTX_get_vtable
+
+LT_BEGIN_C_DECLS
+LT_SCOPE lt_dlvtable *get_vtable (lt_user_data loader_data);
+LT_END_C_DECLS
+
+
+/* Boilerplate code to set up the vtable for hooking this loader into
+   libltdl's loader list:  */
+static int	 vl_exit  (lt_user_data loader_data);
+static lt_module vm_open  (lt_user_data loader_data, const char *filename,
+                           lt_dladvise advise);
+static int	 vm_close (lt_user_data loader_data, lt_module module);
+static void *	 vm_sym   (lt_user_data loader_data, lt_module module,
+			  const char *symbolname);
+
+static lt_dlvtable *vtable = 0;
+
+/* Return the vtable for this loader, only the name and sym_prefix
+   attributes (plus the virtual function implementations, obviously)
+   change between loaders.  */
+lt_dlvtable *
+get_vtable (lt_user_data loader_data)
+{
+  if (!vtable)
+    {
+      vtable = lt__zalloc (sizeof *vtable);
+    }
+
+  if (vtable && !vtable->name)
+    {
+      vtable->name		= "lt_shl_load";
+      vtable->module_open	= vm_open;
+      vtable->module_close	= vm_close;
+      vtable->find_sym		= vm_sym;
+      vtable->dlloader_exit	= vl_exit;
+      vtable->dlloader_data	= loader_data;
+      vtable->priority		= LT_DLLOADER_APPEND;
+    }
+
+  if (vtable && (vtable->dlloader_data != loader_data))
+    {
+      LT__SETERROR (INIT_LOADER);
+      return 0;
+    }
+
+  return vtable;
+}
+
+
+
+/* --- IMPLEMENTATION --- */
+
+
+#if defined(HAVE_DL_H)
+#  include <dl.h>
+#endif
+
+/* some flags are missing on some systems, so we provide
+ * harmless defaults.
+ *
+ * Mandatory:
+ * BIND_IMMEDIATE  - Resolve symbol references when the library is loaded.
+ * BIND_DEFERRED   - Delay code symbol resolution until actual reference.
+ *
+ * Optionally:
+ * BIND_FIRST	   - Place the library at the head of the symbol search
+ * 		     order.
+ * BIND_NONFATAL   - The default BIND_IMMEDIATE behavior is to treat all
+ * 		     unsatisfied symbols as fatal.  This flag allows
+ * 		     binding of unsatisfied code symbols to be deferred
+ * 		     until use.
+ *		     [Perl: For certain libraries, like DCE, deferred
+ *		     binding often causes run time problems. Adding
+ *		     BIND_NONFATAL to BIND_IMMEDIATE still allows
+ *		     unresolved references in situations like this.]
+ * BIND_NOSTART	   - Do not call the initializer for the shared library
+ *		     when the library is loaded, nor on a future call to
+ *		     shl_unload().
+ * BIND_VERBOSE	   - Print verbose messages concerning possible
+ *		     unsatisfied symbols.
+ *
+ * hp9000s700/hp9000s800:
+ * BIND_RESTRICTED - Restrict symbols visible by the library to those
+ *		     present at library load time.
+ * DYNAMIC_PATH	   - Allow the loader to dynamically search for the
+ *		     library specified by the path argument.
+ */
+
+#if !defined(DYNAMIC_PATH)
+#  define DYNAMIC_PATH		0
+#endif
+#if !defined(BIND_RESTRICTED)
+#  define BIND_RESTRICTED	0
+#endif
+
+#define	LT_BIND_FLAGS	(BIND_IMMEDIATE | BIND_NONFATAL | DYNAMIC_PATH)
+
+
+/* A function called through the vtable when this loader is no
+   longer needed by the application.  */
+static int
+vl_exit (lt_user_data LT__UNUSED loader_data)
+{
+  vtable = NULL;
+  return 0;
+}
+
+/* A function called through the vtable to open a module with this
+   loader.  Returns an opaque representation of the newly opened
+   module for processing with this loader's other vtable functions.  */
+static lt_module
+vm_open (lt_user_data LT__UNUSED loader_data, const char *filename,
+         lt_dladvise LT__UNUSED advise)
+{
+  static shl_t self = (shl_t) 0;
+  lt_module module = shl_load (filename, LT_BIND_FLAGS, 0L);
+
+  /* Since searching for a symbol against a NULL module handle will also
+     look in everything else that was already loaded and exported with
+     the -E compiler flag, we always cache a handle saved before any
+     modules are loaded.  */
+  if (!self)
+    {
+      void *address;
+      shl_findsym (&self, "main", TYPE_UNDEFINED, &address);
+    }
+
+  if (!filename)
+    {
+      module = self;
+    }
+  else
+    {
+      module = shl_load (filename, LT_BIND_FLAGS, 0L);
+
+      if (!module)
+	{
+	  LT__SETERROR (CANNOT_OPEN);
+	}
+    }
+
+  return module;
+}
+
+/* A function called through the vtable when a particular module
+   should be unloaded.  */
+static int
+vm_close (lt_user_data LT__UNUSED loader_data, lt_module module)
+{
+  int errors = 0;
+
+  if (module && (shl_unload ((shl_t) (module)) != 0))
+    {
+      LT__SETERROR (CANNOT_CLOSE);
+      ++errors;
+    }
+
+  return errors;
+}
+
+
+/* A function called through the vtable to get the address of
+   a symbol loaded from a particular module.  */
+static void *
+vm_sym (lt_user_data LT__UNUSED loader_data, lt_module module, const char *name)
+{
+  void *address = 0;
+
+  /* sys_shl_open should never return a NULL module handle */
+  if (module == (lt_module) 0)
+  {
+    LT__SETERROR (INVALID_HANDLE);
+  }
+  else if (!shl_findsym((shl_t*) &module, name, TYPE_UNDEFINED, &address))
+    {
+      if (!address)
+	{
+	  LT__SETERROR (SYMBOL_NOT_FOUND);
+	}
+    }
+
+  return address;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__alloc.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__alloc.c
new file mode 100644
index 0000000..d39e17e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__alloc.c
@@ -0,0 +1,95 @@
+/* lt__alloc.c -- internal memory management interface
+
+   Copyright (C) 2004, 2006, 2007 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+
+#include <stdio.h>
+
+#include "lt__alloc.h"
+
+static void alloc_die_default (void);
+
+void (*lt__alloc_die) (void) = alloc_die_default;
+
+/* Unless overridden, exit on memory failure.  */
+static void
+alloc_die_default (void)
+{
+  fprintf (stderr, "Out of memory.\n");
+  exit (EXIT_FAILURE);
+}
+
+void *
+lt__malloc (size_t n)
+{
+  void *mem;
+
+  if (! (mem = malloc (n)))
+    (*lt__alloc_die) ();
+
+  return mem;
+}
+
+void *
+lt__zalloc (size_t n)
+{
+  void *mem;
+
+  if ((mem = lt__malloc (n)))
+    memset (mem, 0, n);
+
+  return mem;
+}
+
+void *
+lt__realloc (void *mem, size_t n)
+{
+  if (! (mem = realloc (mem, n)))
+    (*lt__alloc_die) ();
+
+  return mem;
+}
+
+void *
+lt__memdup (void const *mem, size_t n)
+{
+  void *newmem;
+
+  if ((newmem = lt__malloc (n)))
+    return memcpy (newmem, mem, n);
+
+  return 0;
+}
+
+char *
+lt__strdup (const char *string)
+{
+  return (char *) lt__memdup (string, strlen (string) +1);
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__dirent.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__dirent.c
new file mode 100644
index 0000000..30dc072
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__dirent.c
@@ -0,0 +1,107 @@
+/* lt__dirent.c -- internal directory entry scanning interface
+
+   Copyright (C) 2001, 2004 Free Software Foundation, Inc.
+   Written by Bob Friesenhahn, 2001
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+
+#include <assert.h>
+#include <stddef.h>
+
+#include "lt__dirent.h"
+
+#if defined(__WINDOWS__)
+
+void
+closedir (DIR *entry)
+{
+  assert (entry != (DIR *) NULL);
+  FindClose (entry->hSearch);
+  free ((void *) entry);
+}
+
+
+DIR *
+opendir (const char *path)
+{
+  char file_spec[LT_FILENAME_MAX];
+  DIR *entry;
+
+  assert (path != (char *) 0);
+  if (lt_strlcpy (file_spec, path, sizeof file_spec) >= sizeof file_spec
+      || lt_strlcat (file_spec, "\\", sizeof file_spec) >= sizeof file_spec)
+    return (DIR *) 0;
+  entry = (DIR *) malloc (sizeof(DIR));
+  if (entry != (DIR *) 0)
+    {
+      entry->firsttime = TRUE;
+      entry->hSearch = FindFirstFile (file_spec, &entry->Win32FindData);
+
+      if (entry->hSearch == INVALID_HANDLE_VALUE)
+	{
+	  if (lt_strlcat (file_spec, "\\*.*", sizeof file_spec) < sizeof file_spec)
+	    {
+	      entry->hSearch = FindFirstFile (file_spec, &entry->Win32FindData);
+	    }
+
+	  if (entry->hSearch == INVALID_HANDLE_VALUE)
+	    {
+	      entry = (free (entry), (DIR *) 0);
+	    }
+	}
+    }
+
+  return entry;
+}
+
+
+struct dirent *
+readdir (DIR *entry)
+{
+  int status;
+
+  if (entry == (DIR *) 0)
+    return (struct dirent *) 0;
+
+  if (!entry->firsttime)
+    {
+      status = FindNextFile (entry->hSearch, &entry->Win32FindData);
+      if (status == 0)
+        return (struct dirent *) 0;
+    }
+
+  entry->firsttime = FALSE;
+  if (lt_strlcpy (entry->file_info.d_name, entry->Win32FindData.cFileName,
+	sizeof entry->file_info.d_name) >= sizeof entry->file_info.d_name)
+    return (struct dirent *) 0;
+  entry->file_info.d_namlen = strlen (entry->file_info.d_name);
+
+  return &entry->file_info;
+}
+
+#endif /*defined(__WINDOWS__)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__strl.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__strl.c
new file mode 100644
index 0000000..c2cee58
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt__strl.c
@@ -0,0 +1,127 @@
+/* lt__strl.c -- size-bounded string copying and concatenation
+
+   Copyright (C) 2004 Free Software Foundation, Inc.
+   Written by Bob Friesenhahn, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include <assert.h>
+#include <string.h>
+
+#include "lt__strl.h"
+
+/*
+ lt_strlcat appends the NULL-terminated string src to the end of dst.
+ It will append at most dstsize - strlen(dst) - 1 bytes,
+ NULL-terminating the result. The total length of the string which
+ would have been created given sufficient buffer size (may be longer
+ than dstsize) is returned.  This function substitutes for strlcat()
+ which is available under NetBSD, FreeBSD and Solaris 9.
+
+ Buffer overflow can be checked as follows:
+
+   if (lt_strlcat(dst, src, dstsize) >= dstsize)
+     return -1;
+*/
+#if !defined(HAVE_STRLCAT)
+size_t
+lt_strlcat(char *dst, const char *src, const size_t dstsize)
+{
+  size_t length;
+  char *p;
+  const char *q;
+
+  assert(dst != NULL);
+  assert(src != (const char *) NULL);
+  assert(dstsize >= 1);
+
+  length=strlen(dst);
+
+  /*
+    Copy remaining characters from src while constraining length to
+    size - 1.
+  */
+  for ( p = dst + length, q = src;
+        (*q != 0) && (length < dstsize - 1) ;
+        length++, p++, q++ )
+    *p = *q;
+
+  dst[length]='\0';
+
+  /*
+    Add remaining length of src to length.
+  */
+  while (*q++)
+    length++;
+
+  return length;
+}
+#endif /* !defined(HAVE_STRLCAT) */
+
+/*
+  lt_strlcpy copies up to dstsize - 1 characters from the NULL-terminated
+  string src to dst, NULL-terminating the result. The total length of
+  the string which would have been created given sufficient buffer
+  size (may be longer than dstsize) is returned. This function
+  substitutes for strlcpy() which is available under OpenBSD, FreeBSD
+  and Solaris 9.
+
+  Buffer overflow can be checked as  follows:
+
+    if (lt_strlcpy(dst, src, dstsize) >= dstsize)
+      return -1;
+*/
+#if !defined(HAVE_STRLCPY)
+size_t
+lt_strlcpy(char *dst, const char *src, const size_t dstsize)
+{
+  size_t length=0;
+  char *p;
+  const char *q;
+
+  assert(dst != NULL);
+  assert(src != (const char *) NULL);
+  assert(dstsize >= 1);
+
+  /*
+    Copy src to dst within bounds of size-1.
+  */
+  for ( p=dst, q=src, length=0 ;
+        (*q != 0) && (length < dstsize-1) ;
+        length++, p++, q++ )
+    *p = *q;
+
+  dst[length]='\0';
+
+  /*
+    Add remaining length of src to length.
+  */
+  while (*q++)
+    length++;
+
+  return length;
+}
+#endif /* !defined(HAVE_STRLCPY) */
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_dlloader.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_dlloader.c
new file mode 100644
index 0000000..2c99a22
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_dlloader.c
@@ -0,0 +1,210 @@
+/* lt_dlloader.c -- dynamic library loader interface
+
+   Copyright (C) 2004, 2007, 2008 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2004
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_dlloader.h"
+
+#define RETURN_SUCCESS 0
+#define RETURN_FAILURE 1
+
+static void *	loader_callback (SList *item, void *userdata);
+
+/* A list of all the dlloaders we know about, each stored as a boxed
+   SList item:  */
+static	SList    *loaders		= 0;
+
+
+/* Return NULL, unless the loader in this ITEM has a matching name,
+   in which case we return the matching item so that its address is
+   passed back out (for possible freeing) by slist_remove.  */
+static void *
+loader_callback (SList *item, void *userdata)
+{
+  const lt_dlvtable *vtable = (const lt_dlvtable *) item->userdata;
+  const char *	    name    = (const char *) userdata;
+
+  assert (vtable);
+
+  return streq (vtable->name, name) ? (void *) item : NULL;
+}
+
+
+/* Hook VTABLE into our global LOADERS list according to its own
+   PRIORITY field value.  */
+int
+lt_dlloader_add (const lt_dlvtable *vtable)
+{
+  SList *item;
+
+  if ((vtable == 0)	/* diagnose invalid vtable fields */
+      || (vtable->module_open == 0)
+      || (vtable->module_close == 0)
+      || (vtable->find_sym == 0)
+      || ((vtable->priority != LT_DLLOADER_PREPEND) &&
+	  (vtable->priority != LT_DLLOADER_APPEND)))
+    {
+      LT__SETERROR (INVALID_LOADER);
+      return RETURN_FAILURE;
+    }
+
+  item = slist_box (vtable);
+  if (!item)
+    {
+      (*lt__alloc_die) ();
+
+      /* Let the caller know something went wrong if lt__alloc_die
+	 doesn't abort.  */
+      return RETURN_FAILURE;
+    }
+
+  if (vtable->priority == LT_DLLOADER_PREPEND)
+    {
+      loaders = slist_cons (item, loaders);
+    }
+  else
+    {
+      assert (vtable->priority == LT_DLLOADER_APPEND);
+      loaders = slist_concat (loaders, item);
+    }
+
+  return RETURN_SUCCESS;
+}
+
+#ifdef LT_DEBUG_LOADERS
+static void *
+loader_dump_callback (SList *item, void *userdata)
+{
+  const lt_dlvtable *vtable = (const lt_dlvtable *) item->userdata;
+  fprintf (stderr, ", %s", (vtable && vtable->name) ? vtable->name : "(null)");
+  return 0;
+}
+
+void
+lt_dlloader_dump (void)
+{
+  fprintf (stderr, "loaders: ");
+  if (!loaders)
+    {
+      fprintf (stderr, "(empty)");
+    }
+  else
+    {
+      const lt_dlvtable *head = (const lt_dlvtable *) loaders->userdata;
+      fprintf (stderr, "%s", (head && head->name) ? head->name : "(null)");
+      if (slist_tail (loaders))
+	slist_foreach (slist_tail (loaders), loader_dump_callback, NULL);
+    }
+  fprintf (stderr, "\n");
+}
+#endif
+
+/* An iterator for the global loader list: if LOADER is NULL, then
+   return the first element, otherwise the following element.  */
+lt_dlloader
+lt_dlloader_next (lt_dlloader loader)
+{
+  SList *item = (SList *) loader;
+  return (lt_dlloader) (item ? item->next : loaders);
+}
+
+
+/* Non-destructive unboxing of a loader.  */
+const lt_dlvtable *
+lt_dlloader_get	(lt_dlloader loader)
+{
+  return (const lt_dlvtable *) (loader ? ((SList *) loader)->userdata : NULL);
+}
+
+
+/* Return the contents of the first item in the global loader list
+   with a matching NAME after removing it from that list.  If there
+   was no match, return NULL; if there is an error, return NULL and
+   set an error for lt_dlerror; do not set an error if only resident
+   modules need this loader; in either case, the loader list is not
+   changed if NULL is returned.  */
+lt_dlvtable *
+lt_dlloader_remove (const char *name)
+{
+  const lt_dlvtable *	vtable	= lt_dlloader_find (name);
+  static const char	id_string[] = "lt_dlloader_remove";
+  lt_dlinterface_id	iface;
+  lt_dlhandle		handle = 0;
+  int			in_use = 0;
+  int			in_use_by_resident = 0;
+
+  if (!vtable)
+    {
+      LT__SETERROR (INVALID_LOADER);
+      return 0;
+    }
+
+  /* Fail if there are any open modules which use this loader.  */
+  iface = lt_dlinterface_register (id_string, NULL);
+  while ((handle = lt_dlhandle_iterate (iface, handle)))
+    {
+      lt_dlhandle cur = handle;
+      if (cur->vtable == vtable)
+	{
+	  in_use = 1;
+	  if (lt_dlisresident (handle))
+	    in_use_by_resident = 1;
+	}
+    }
+  lt_dlinterface_free (iface);
+  if (in_use)
+    {
+      if (!in_use_by_resident)
+	LT__SETERROR (REMOVE_LOADER);
+      return 0;
+    }
+
+  /* Call the loader finalisation function.  */
+  if (vtable && vtable->dlloader_exit)
+    {
+      if ((*vtable->dlloader_exit) (vtable->dlloader_data) != 0)
+	{
+	  /* If there is an exit function, and it returns non-zero
+	     then it must set an error, and we will not remove it
+	     from the list.  */
+	  return 0;
+	}
+    }
+
+  /* If we got this far, remove the loader from our global list.  */
+  return (lt_dlvtable *)
+      slist_unbox ((SList *) slist_remove (&loaders, loader_callback, (void *) name));
+}
+
+
+const lt_dlvtable *
+lt_dlloader_find (const char *name)
+{
+  return lt_dlloader_get (slist_find (loaders, loader_callback, (void *) name));
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_error.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_error.c
new file mode 100644
index 0000000..d7af36d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/lt_error.c
@@ -0,0 +1,110 @@
+/* lt_error.c -- error propogation interface
+
+   Copyright (C) 1999, 2000, 2001, 2004, 2005, 2007 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1999
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_error.h"
+
+static const char	*last_error	= 0;
+static const char	error_strings[LT_ERROR_MAX][LT_ERROR_LEN_MAX + 1] =
+  {
+#define LT_ERROR(name, diagnostic)	diagnostic,
+    lt_dlerror_table
+#undef LT_ERROR
+  };
+
+static	const char    **user_error_strings	= 0;
+static	int		errorcount		= LT_ERROR_MAX;
+
+int
+lt_dladderror (const char *diagnostic)
+{
+  int		errindex = 0;
+  int		result	 = -1;
+  const char  **temp     = (const char **) 0;
+
+  assert (diagnostic);
+
+  errindex = errorcount - LT_ERROR_MAX;
+  temp = REALLOC (const char *, user_error_strings, 1 + errindex);
+  if (temp)
+    {
+      user_error_strings		= temp;
+      user_error_strings[errindex]	= diagnostic;
+      result				= errorcount++;
+    }
+
+  return result;
+}
+
+int
+lt_dlseterror (int errindex)
+{
+  int		errors	 = 0;
+
+  if (errindex >= errorcount || errindex < 0)
+    {
+      /* Ack!  Error setting the error message! */
+      LT__SETERROR (INVALID_ERRORCODE);
+      ++errors;
+    }
+  else if (errindex < LT_ERROR_MAX)
+    {
+      /* No error setting the error message! */
+      LT__SETERRORSTR (error_strings[errindex]);
+    }
+  else
+    {
+      /* No error setting the error message! */
+      LT__SETERRORSTR (user_error_strings[errindex - LT_ERROR_MAX]);
+    }
+
+  return errors;
+}
+
+const char *
+lt__error_string (int errorcode)
+{
+  assert (errorcode >= 0);
+  assert (errorcode < LT_ERROR_MAX);
+
+  return error_strings[errorcode];
+}
+
+const char *
+lt__get_last_error (void)
+{
+  return last_error;
+}
+
+const char *
+lt__set_last_error (const char *errormsg)
+{
+  return last_error = errormsg;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.c
new file mode 100644
index 0000000..01853e0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.c
@@ -0,0 +1,2464 @@
+/* ltdl.c -- system independent dlopen wrapper
+
+   Copyright (C) 1998, 1999, 2000, 2004, 2005, 2006,
+		 2007, 2008, 2011 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include "lt__private.h"
+#include "lt_system.h"
+#include "lt_dlloader.h"
+
+
+/* --- MANIFEST CONSTANTS --- */
+
+
+/* Standard libltdl search path environment variable name  */
+#undef  LTDL_SEARCHPATH_VAR
+#define LTDL_SEARCHPATH_VAR	"LTDL_LIBRARY_PATH"
+
+/* Standard libtool archive file extension.  */
+#undef  LT_ARCHIVE_EXT
+#define LT_ARCHIVE_EXT	".la"
+
+/* max. filename length */
+#if !defined(LT_FILENAME_MAX)
+#  define LT_FILENAME_MAX	1024
+#endif
+
+#if !defined(LT_LIBEXT)
+#  define LT_LIBEXT "a"
+#endif
+
+#if !defined(LT_LIBPREFIX)
+#  define LT_LIBPREFIX "lib"
+#endif
+
+/* This is the maximum symbol size that won't require malloc/free */
+#undef	LT_SYMBOL_LENGTH
+#define LT_SYMBOL_LENGTH	128
+
+/* This accounts for the _LTX_ separator */
+#undef	LT_SYMBOL_OVERHEAD
+#define LT_SYMBOL_OVERHEAD	5
+
+/* Various boolean flags can be stored in the flags field of an
+   lt_dlhandle... */
+#define LT_DLIS_RESIDENT(handle)  ((handle)->info.is_resident)
+#define LT_DLIS_SYMGLOBAL(handle) ((handle)->info.is_symglobal)
+#define LT_DLIS_SYMLOCAL(handle)  ((handle)->info.is_symlocal)
+
+
+static	const char	objdir[]		= LT_OBJDIR;
+static	const char	archive_ext[]		= LT_ARCHIVE_EXT;
+static  const char	libext[]		= LT_LIBEXT;
+static  const char	libprefix[]		= LT_LIBPREFIX;
+#if defined(LT_MODULE_EXT)
+static	const char	shlib_ext[]		= LT_MODULE_EXT;
+#endif
+/* If the loadable module suffix is not the same as the linkable
+ * shared library suffix, this will be defined. */
+#if defined(LT_SHARED_EXT)
+static	const char	shared_ext[]		= LT_SHARED_EXT;
+#endif
+#if defined(LT_DLSEARCH_PATH)
+static	const char	sys_dlsearch_path[]	= LT_DLSEARCH_PATH;
+#endif
+
+
+
+
+/* --- DYNAMIC MODULE LOADING --- */
+
+
+/* The type of a function used at each iteration of  foreach_dirinpath().  */
+typedef int	foreach_callback_func (char *filename, void *data1,
+				       void *data2);
+/* foreachfile_callback itself calls a function of this type: */
+typedef int	file_worker_func      (const char *filename, void *data);
+
+
+static	int	foreach_dirinpath     (const char *search_path,
+				       const char *base_name,
+				       foreach_callback_func *func,
+				       void *data1, void *data2);
+static	int	find_file_callback    (char *filename, void *data1,
+				       void *data2);
+static	int	find_handle_callback  (char *filename, void *data,
+				       void *ignored);
+static	int	foreachfile_callback  (char *filename, void *data1,
+				       void *data2);
+
+
+static	int     canonicalize_path     (const char *path, char **pcanonical);
+static	int	argzize_path	      (const char *path,
+				       char **pargz, size_t *pargz_len);
+static	FILE   *find_file	      (const char *search_path,
+				       const char *base_name, char **pdir);
+static	lt_dlhandle *find_handle      (const char *search_path,
+				       const char *base_name,
+				       lt_dlhandle *handle,
+				       lt_dladvise advise);
+static	int	find_module	      (lt_dlhandle *handle, const char *dir,
+				       const char *libdir, const char *dlname,
+				       const char *old_name, int installed,
+				       lt_dladvise advise);
+static  int     has_library_ext       (const char *filename);
+static	int	load_deplibs	      (lt_dlhandle handle,  char *deplibs);
+static	int	trim		      (char **dest, const char *str);
+static	int	try_dlopen	      (lt_dlhandle *handle,
+				       const char *filename, const char *ext,
+				       lt_dladvise advise);
+static	int	tryall_dlopen	      (lt_dlhandle *handle,
+				       const char *filename,
+				       lt_dladvise padvise,
+				       const lt_dlvtable *vtable);
+static	int	unload_deplibs	      (lt_dlhandle handle);
+static	int	lt_argz_insert	      (char **pargz, size_t *pargz_len,
+				       char *before, const char *entry);
+static	int	lt_argz_insertinorder (char **pargz, size_t *pargz_len,
+				       const char *entry);
+static	int	lt_argz_insertdir     (char **pargz, size_t *pargz_len,
+				       const char *dirnam, struct dirent *dp);
+static	int	lt_dlpath_insertdir   (char **ppath, char *before,
+				       const char *dir);
+static	int	list_files_by_dir     (const char *dirnam,
+				       char **pargz, size_t *pargz_len);
+static	int	file_not_found	      (void);
+
+#ifdef HAVE_LIBDLLOADER
+static	int	loader_init_callback  (lt_dlhandle handle);
+#endif /* HAVE_LIBDLLOADER */
+
+static	int	loader_init	      (lt_get_vtable *vtable_func,
+				       lt_user_data data);
+
+static	char	       *user_search_path= 0;
+static	lt_dlhandle	handles	= 0;
+static	int		initialized	= 0;
+
+/* Our memory failure callback sets the error message to be passed back
+   up to the client, so we must be careful to return from mallocation
+   callers if allocation fails (as this callback returns!!).  */
+void
+lt__alloc_die_callback (void)
+{
+  LT__SETERROR (NO_MEMORY);
+}
+
+#ifdef HAVE_LIBDLLOADER
+/* This function is called to initialise each preloaded module loader,
+   and hook it into the list of loaders to be used when attempting to
+   dlopen an application module.  */
+static int
+loader_init_callback (lt_dlhandle handle)
+{
+  lt_get_vtable *vtable_func = (lt_get_vtable *) lt_dlsym (handle, "get_vtable");
+  return loader_init (vtable_func, 0);
+}
+#endif /* HAVE_LIBDLLOADER */
+
+static int
+loader_init (lt_get_vtable *vtable_func, lt_user_data data)
+{
+  const lt_dlvtable *vtable = 0;
+  int errors = 0;
+
+  if (vtable_func)
+    {
+      vtable = (*vtable_func) (data);
+    }
+
+  /* lt_dlloader_add will LT__SETERROR if it fails.  */
+  errors += lt_dlloader_add (vtable);
+
+  assert (errors || vtable);
+
+  if ((!errors) && vtable->dlloader_init)
+    {
+      if ((*vtable->dlloader_init) (vtable->dlloader_data))
+	{
+	  LT__SETERROR (INIT_LOADER);
+	  ++errors;
+	}
+    }
+
+  return errors;
+}
+
+/* Bootstrap the loader loading with the preopening loader.  */
+#define get_vtable		preopen_LTX_get_vtable
+#define preloaded_symbols	LT_CONC3(lt_, LTDLOPEN, _LTX_preloaded_symbols)
+
+LT_BEGIN_C_DECLS
+LT_SCOPE const lt_dlvtable *	get_vtable (lt_user_data data);
+LT_END_C_DECLS
+#ifdef HAVE_LIBDLLOADER
+extern LT_DLSYM_CONST lt_dlsymlist preloaded_symbols[];
+#endif
+
+/* Initialize libltdl. */
+int
+lt_dlinit (void)
+{
+  int	errors	= 0;
+
+  /* Initialize only at first call. */
+  if (++initialized == 1)
+    {
+      lt__alloc_die	= lt__alloc_die_callback;
+      handles		= 0;
+      user_search_path	= 0; /* empty search path */
+
+      /* First set up the statically loaded preload module loader, so
+	 we can use it to preopen the other loaders we linked in at
+	 compile time.  */
+      errors += loader_init (get_vtable, 0);
+
+      /* Now open all the preloaded module loaders, so the application
+	 can use _them_ to lt_dlopen its own modules.  */
+#ifdef HAVE_LIBDLLOADER
+      if (!errors)
+	{
+	  errors += lt_dlpreload (preloaded_symbols);
+	}
+
+      if (!errors)
+	{
+	  errors += lt_dlpreload_open (LT_STR(LTDLOPEN), loader_init_callback);
+	}
+#endif /* HAVE_LIBDLLOADER */
+    }
+
+#ifdef LT_DEBUG_LOADERS
+  lt_dlloader_dump();
+#endif
+
+  return errors;
+}
+
+int
+lt_dlexit (void)
+{
+  /* shut down libltdl */
+  lt_dlloader *loader   = 0;
+  lt_dlhandle  handle   = handles;
+  int	       errors   = 0;
+
+  if (!initialized)
+    {
+      LT__SETERROR (SHUTDOWN);
+      ++errors;
+      goto done;
+    }
+
+  /* shut down only at last call. */
+  if (--initialized == 0)
+    {
+      int	level;
+
+      while (handles && LT_DLIS_RESIDENT (handles))
+	{
+	  handles = handles->next;
+	}
+
+      /* close all modules */
+      for (level = 1; handle; ++level)
+	{
+	  lt_dlhandle cur = handles;
+	  int saw_nonresident = 0;
+
+	  while (cur)
+	    {
+	      lt_dlhandle tmp = cur;
+	      cur = cur->next;
+	      if (!LT_DLIS_RESIDENT (tmp))
+		{
+		  saw_nonresident = 1;
+		  if (tmp->info.ref_count <= level)
+		    {
+		      if (lt_dlclose (tmp))
+			{
+			  ++errors;
+			}
+		      /* Make sure that the handle pointed to by 'cur' still exists.
+			 lt_dlclose recursively closes dependent libraries which removes
+			 them from the linked list.  One of these might be the one
+			 pointed to by 'cur'.  */
+		      if (cur)
+			{
+			  for (tmp = handles; tmp; tmp = tmp->next)
+			    if (tmp == cur)
+			      break;
+			  if (! tmp)
+			    cur = handles;
+			}
+		    }
+		}
+	    }
+	  /* done if only resident modules are left */
+	  if (!saw_nonresident)
+	    break;
+	}
+
+      /* When removing loaders, we can only find out failure by testing
+	 the error string, so avoid a spurious one from an earlier
+	 failed command. */
+      if (!errors)
+	LT__SETERRORSTR (0);
+
+      /* close all loaders */
+      for (loader = (lt_dlloader *) lt_dlloader_next (NULL); loader;)
+	{
+	  lt_dlloader *next   = (lt_dlloader *) lt_dlloader_next (loader);
+	  lt_dlvtable *vtable = (lt_dlvtable *) lt_dlloader_get (loader);
+
+	  if ((vtable = lt_dlloader_remove ((char *) vtable->name)))
+	    {
+	      FREE (vtable);
+	    }
+	  else
+	    {
+	      /* ignore errors due to resident modules */
+	      const char *err;
+	      LT__GETERROR (err);
+	      if (err)
+		++errors;
+	    }
+
+	  loader = next;
+	}
+
+      FREE(user_search_path);
+    }
+
+ done:
+  return errors;
+}
+
+
+/* Try VTABLE or, if VTABLE is NULL, all available loaders for FILENAME.
+   If the library is not successfully loaded, return non-zero.  Otherwise,
+   the dlhandle is stored at the address given in PHANDLE.  */
+static int
+tryall_dlopen (lt_dlhandle *phandle, const char *filename,
+	       lt_dladvise advise, const lt_dlvtable *vtable)
+{
+  lt_dlhandle	handle		= handles;
+  const char *	saved_error	= 0;
+  int		errors		= 0;
+
+#ifdef LT_DEBUG_LOADERS
+  fprintf (stderr, "tryall_dlopen (%s, %s)\n",
+	   filename ? filename : "(null)",
+	   vtable ? vtable->name : "(ALL)");
+#endif
+
+  LT__GETERROR (saved_error);
+
+  /* check whether the module was already opened */
+  for (;handle; handle = handle->next)
+    {
+      if ((handle->info.filename == filename) /* dlopen self: 0 == 0 */
+	  || (handle->info.filename && filename
+	      && streq (handle->info.filename, filename)))
+	{
+	  break;
+	}
+    }
+
+  if (handle)
+    {
+      ++handle->info.ref_count;
+      *phandle = handle;
+      goto done;
+    }
+
+  handle = *phandle;
+  if (filename)
+    {
+      /* Comment out the check of file permissions using access.
+	 This call seems to always return -1 with error EACCES.
+      */
+      /* We need to catch missing file errors early so that
+	 file_not_found() can detect what happened.
+      if (access (filename, R_OK) != 0)
+	{
+	  LT__SETERROR (FILE_NOT_FOUND);
+	  ++errors;
+	  goto done;
+	} */
+
+      handle->info.filename = lt__strdup (filename);
+      if (!handle->info.filename)
+	{
+	  ++errors;
+	  goto done;
+	}
+    }
+  else
+    {
+      handle->info.filename = 0;
+    }
+
+  {
+    lt_dlloader loader = lt_dlloader_next (0);
+    const lt_dlvtable *loader_vtable;
+
+    do
+      {
+	if (vtable)
+	  loader_vtable = vtable;
+	else
+	  loader_vtable = lt_dlloader_get (loader);
+
+#ifdef LT_DEBUG_LOADERS
+	fprintf (stderr, "Calling %s->module_open (%s)\n",
+		 (loader_vtable && loader_vtable->name) ? loader_vtable->name : "(null)",
+		 filename ? filename : "(null)");
+#endif
+	handle->module = (*loader_vtable->module_open) (loader_vtable->dlloader_data,
+							filename, advise);
+#ifdef LT_DEBUG_LOADERS
+	fprintf (stderr, "  Result: %s\n",
+		 handle->module ? "Success" : "Failed");
+#endif
+
+	if (handle->module != 0)
+	  {
+	    if (advise)
+	      {
+		handle->info.is_resident  = advise->is_resident;
+		handle->info.is_symglobal = advise->is_symglobal;
+		handle->info.is_symlocal  = advise->is_symlocal;
+	      }
+	    break;
+	  }
+      }
+    while (!vtable && (loader = lt_dlloader_next (loader)));
+
+    /* If VTABLE was given but couldn't open the module, or VTABLE wasn't
+       given but we exhausted all loaders without opening the module, bail
+       out!  */
+    if ((vtable && !handle->module)
+	|| (!vtable && !loader))
+      {
+	FREE (handle->info.filename);
+	++errors;
+	goto done;
+      }
+
+    handle->vtable = loader_vtable;
+  }
+
+  LT__SETERRORSTR (saved_error);
+
+ done:
+  return errors;
+}
+
+
+static int
+tryall_dlopen_module (lt_dlhandle *handle, const char *prefix,
+		      const char *dirname, const char *dlname,
+		      lt_dladvise advise)
+{
+  int      error	= 0;
+  char     *filename	= 0;
+  size_t   filename_len	= 0;
+  size_t   dirname_len	= LT_STRLEN (dirname);
+
+  assert (handle);
+  assert (dirname);
+  assert (dlname);
+#if defined(LT_DIRSEP_CHAR)
+  /* Only canonicalized names (i.e. with DIRSEP chars already converted)
+     should make it into this function:  */
+  assert (strchr (dirname, LT_DIRSEP_CHAR) == 0);
+#endif
+
+  if (dirname_len > 0)
+    if (dirname[dirname_len -1] == '/')
+      --dirname_len;
+  filename_len = dirname_len + 1 + LT_STRLEN (dlname);
+
+  /* Allocate memory, and combine DIRNAME and MODULENAME into it.
+     The PREFIX (if any) is handled below.  */
+  filename  = MALLOC (char, filename_len + 1);
+  if (!filename)
+    return 1;
+
+  sprintf (filename, "%.*s/%s", (int) dirname_len, dirname, dlname);
+
+  /* Now that we have combined DIRNAME and MODULENAME, if there is
+     also a PREFIX to contend with, simply recurse with the arguments
+     shuffled.  Otherwise, attempt to open FILENAME as a module.  */
+  if (prefix)
+    {
+      error += tryall_dlopen_module (handle, (const char *) 0,
+				     prefix, filename, advise);
+    }
+  else if (tryall_dlopen (handle, filename, advise, 0) != 0)
+    {
+      ++error;
+    }
+
+  FREE (filename);
+  return error;
+}
+
+static int
+find_module (lt_dlhandle *handle, const char *dir, const char *libdir,
+	     const char *dlname,  const char *old_name, int installed,
+	     lt_dladvise advise)
+{
+  /* Try to open the old library first; if it was dlpreopened,
+     we want the preopened version of it, even if a dlopenable
+     module is available.  */
+  if (old_name && tryall_dlopen (handle, old_name,
+			  advise, lt_dlloader_find ("lt_preopen") ) == 0)
+    {
+      return 0;
+    }
+
+  /* Try to open the dynamic library.  */
+  if (dlname)
+    {
+      /* try to open the installed module */
+      if (installed && libdir)
+	{
+	  if (tryall_dlopen_module (handle, (const char *) 0,
+				    libdir, dlname, advise) == 0)
+	    return 0;
+	}
+
+      /* try to open the not-installed module */
+      if (!installed)
+	{
+	  if (tryall_dlopen_module (handle, dir, objdir,
+				    dlname, advise) == 0)
+	    return 0;
+	}
+
+      /* maybe it was moved to another directory */
+      {
+	  if (dir && (tryall_dlopen_module (handle, (const char *) 0,
+					    dir, dlname, advise) == 0))
+	    return 0;
+      }
+    }
+
+  return 1;
+}
+
+
+static int
+canonicalize_path (const char *path, char **pcanonical)
+{
+  char *canonical = 0;
+
+  assert (path && *path);
+  assert (pcanonical);
+
+  canonical = MALLOC (char, 1+ LT_STRLEN (path));
+  if (!canonical)
+    return 1;
+
+  {
+    size_t dest = 0;
+    size_t src;
+    for (src = 0; path[src] != LT_EOS_CHAR; ++src)
+      {
+	/* Path separators are not copied to the beginning or end of
+	   the destination, or if another separator would follow
+	   immediately.  */
+	if (path[src] == LT_PATHSEP_CHAR)
+	  {
+	    if ((dest == 0)
+		|| (path[1+ src] == LT_PATHSEP_CHAR)
+		|| (path[1+ src] == LT_EOS_CHAR))
+	      continue;
+	  }
+
+	/* Anything other than a directory separator is copied verbatim.  */
+	if ((path[src] != '/')
+#if defined(LT_DIRSEP_CHAR)
+	    && (path[src] != LT_DIRSEP_CHAR)
+#endif
+	    )
+	  {
+	    canonical[dest++] = path[src];
+	  }
+	/* Directory separators are converted and copied only if they are
+	   not at the end of a path -- i.e. before a path separator or
+	   NULL terminator.  */
+	else if ((path[1+ src] != LT_PATHSEP_CHAR)
+		 && (path[1+ src] != LT_EOS_CHAR)
+#if defined(LT_DIRSEP_CHAR)
+		 && (path[1+ src] != LT_DIRSEP_CHAR)
+#endif
+		 && (path[1+ src] != '/'))
+	  {
+	    canonical[dest++] = '/';
+	  }
+      }
+
+    /* Add an end-of-string marker at the end.  */
+    canonical[dest] = LT_EOS_CHAR;
+  }
+
+  /* Assign new value.  */
+  *pcanonical = canonical;
+
+  return 0;
+}
+
+static int
+argzize_path (const char *path, char **pargz, size_t *pargz_len)
+{
+  error_t error;
+
+  assert (path);
+  assert (pargz);
+  assert (pargz_len);
+
+  if ((error = argz_create_sep (path, LT_PATHSEP_CHAR, pargz, pargz_len)))
+    {
+      switch (error)
+	{
+	case ENOMEM:
+	  LT__SETERROR (NO_MEMORY);
+	  break;
+	default:
+	  LT__SETERROR (UNKNOWN);
+	  break;
+	}
+
+      return 1;
+    }
+
+  return 0;
+}
+
+/* Repeatedly call FUNC with each LT_PATHSEP_CHAR delimited element
+   of SEARCH_PATH and references to DATA1 and DATA2, until FUNC returns
+   non-zero or all elements are exhausted.  If BASE_NAME is non-NULL,
+   it is appended to each SEARCH_PATH element before FUNC is called.  */
+static int
+foreach_dirinpath (const char *search_path, const char *base_name,
+		   foreach_callback_func *func, void *data1, void *data2)
+{
+  int	 result		= 0;
+  size_t filenamesize	= 0;
+  size_t lenbase	= LT_STRLEN (base_name);
+  size_t argz_len	= 0;
+  char *argz		= 0;
+  char *filename	= 0;
+  char *canonical	= 0;
+
+  if (!search_path || !*search_path)
+    {
+      LT__SETERROR (FILE_NOT_FOUND);
+      goto cleanup;
+    }
+
+  if (canonicalize_path (search_path, &canonical) != 0)
+    goto cleanup;
+
+  if (argzize_path (canonical, &argz, &argz_len) != 0)
+    goto cleanup;
+
+  {
+    char *dir_name = 0;
+    while ((dir_name = argz_next (argz, argz_len, dir_name)))
+      {
+	size_t lendir = LT_STRLEN (dir_name);
+
+	if (1+ lendir + lenbase >= filenamesize)
+	{
+	  FREE (filename);
+	  filenamesize	= 1+ lendir + 1+ lenbase; /* "/d" + '/' + "f" + '\0' */
+	  filename	= MALLOC (char, filenamesize);
+	  if (!filename)
+	    goto cleanup;
+	}
+
+	assert (filenamesize > lendir);
+	strcpy (filename, dir_name);
+
+	if (base_name && *base_name)
+	  {
+	    if (filename[lendir -1] != '/')
+	      filename[lendir++] = '/';
+	    strcpy (filename +lendir, base_name);
+	  }
+
+	if ((result = (*func) (filename, data1, data2)))
+	  {
+	    break;
+	  }
+      }
+  }
+
+ cleanup:
+  FREE (argz);
+  FREE (canonical);
+  FREE (filename);
+
+  return result;
+}
+
+/* If FILEPATH can be opened, store the name of the directory component
+   in DATA1, and the opened FILE* structure address in DATA2.  Otherwise
+   DATA1 is unchanged, but DATA2 is set to a pointer to NULL.  */
+static int
+find_file_callback (char *filename, void *data1, void *data2)
+{
+  char	     **pdir	= (char **) data1;
+  FILE	     **pfile	= (FILE **) data2;
+  int	     is_done	= 0;
+
+  assert (filename && *filename);
+  assert (pdir);
+  assert (pfile);
+
+  if ((*pfile = fopen (filename, LT_READTEXT_MODE)))
+    {
+      char *dirend = strrchr (filename, '/');
+
+      if (dirend > filename)
+	*dirend   = LT_EOS_CHAR;
+
+      FREE (*pdir);
+      *pdir   = lt__strdup (filename);
+      is_done = (*pdir == 0) ? -1 : 1;
+    }
+
+  return is_done;
+}
+
+static FILE *
+find_file (const char *search_path, const char *base_name, char **pdir)
+{
+  FILE *file = 0;
+
+  foreach_dirinpath (search_path, base_name, find_file_callback, pdir, &file);
+
+  return file;
+}
+
+static int
+find_handle_callback (char *filename, void *data, void *data2)
+{
+  lt_dlhandle  *phandle		= (lt_dlhandle *) data;
+  int		notfound	= access (filename, R_OK);
+  lt_dladvise   advise		= (lt_dladvise) data2;
+
+  /* Bail out if file cannot be read...  */
+  if (notfound)
+    return 0;
+
+  /* Try to dlopen the file, but do not continue searching in any
+     case.  */
+  if (tryall_dlopen (phandle, filename, advise, 0) != 0)
+    *phandle = 0;
+
+  return 1;
+}
+
+/* If HANDLE was found return it, otherwise return 0.  If HANDLE was
+   found but could not be opened, *HANDLE will be set to 0.  */
+static lt_dlhandle *
+find_handle (const char *search_path, const char *base_name,
+	     lt_dlhandle *phandle, lt_dladvise advise)
+{
+  if (!search_path)
+    return 0;
+
+  if (!foreach_dirinpath (search_path, base_name, find_handle_callback,
+			  phandle, advise))
+    return 0;
+
+  return phandle;
+}
+
+#if !defined(LTDL_DLOPEN_DEPLIBS)
+static int
+load_deplibs (lt_dlhandle handle, char * LT__UNUSED deplibs)
+{
+  handle->depcount = 0;
+  return 0;
+}
+
+#else /* defined(LTDL_DLOPEN_DEPLIBS) */
+static int
+load_deplibs (lt_dlhandle handle, char *deplibs)
+{
+  char	*p, *save_search_path = 0;
+  int   depcount = 0;
+  int	i;
+  char	**names = 0;
+  int	errors = 0;
+
+  handle->depcount = 0;
+
+  if (!deplibs)
+    {
+      return errors;
+    }
+  ++errors;
+
+  if (user_search_path)
+    {
+      save_search_path = lt__strdup (user_search_path);
+      if (!save_search_path)
+	goto cleanup;
+    }
+
+  /* extract search paths and count deplibs */
+  p = deplibs;
+  while (*p)
+    {
+      if (!isspace ((unsigned char) *p))
+	{
+	  char *end = p+1;
+	  while (*end && !isspace((unsigned char) *end))
+	    {
+	      ++end;
+	    }
+
+	  if (strncmp(p, "-L", 2) == 0 || strncmp(p, "-R", 2) == 0)
+	    {
+	      char save = *end;
+	      *end = 0; /* set a temporary string terminator */
+	      if (lt_dladdsearchdir(p+2))
+		{
+		  goto cleanup;
+		}
+	      *end = save;
+	    }
+	  else
+	    {
+	      ++depcount;
+	    }
+
+	  p = end;
+	}
+      else
+	{
+	  ++p;
+	}
+    }
+
+
+  if (!depcount)
+    {
+      errors = 0;
+      goto cleanup;
+    }
+
+  names = MALLOC (char *, depcount);
+  if (!names)
+    goto cleanup;
+
+  /* now only extract the actual deplibs */
+  depcount = 0;
+  p = deplibs;
+  while (*p)
+    {
+      if (isspace ((unsigned char) *p))
+	{
+	  ++p;
+	}
+      else
+	{
+	  char *end = p+1;
+	  while (*end && !isspace ((unsigned char) *end))
+	    {
+	      ++end;
+	    }
+
+	  if (strncmp(p, "-L", 2) != 0 && strncmp(p, "-R", 2) != 0)
+	    {
+	      char *name;
+	      char save = *end;
+	      *end = 0; /* set a temporary string terminator */
+	      if (strncmp(p, "-l", 2) == 0)
+		{
+		  size_t name_len = 3+ /* "lib" */ LT_STRLEN (p + 2);
+		  name = MALLOC (char, 1+ name_len);
+		  if (name)
+		    sprintf (name, "lib%s", p+2);
+		}
+	      else
+		name = lt__strdup(p);
+
+	      if (!name)
+		goto cleanup_names;
+
+	      names[depcount++] = name;
+	      *end = save;
+	    }
+	  p = end;
+	}
+    }
+
+  /* load the deplibs (in reverse order)
+     At this stage, don't worry if the deplibs do not load correctly,
+     they may already be statically linked into the loading application
+     for instance.  There will be a more enlightening error message
+     later on if the loaded module cannot resolve all of its symbols.  */
+  if (depcount)
+    {
+      lt_dlhandle cur = handle;
+      int	j = 0;
+
+      cur->deplibs = MALLOC (lt_dlhandle, depcount);
+      if (!cur->deplibs)
+	goto cleanup_names;
+
+      for (i = 0; i < depcount; ++i)
+	{
+	  cur->deplibs[j] = lt_dlopenext(names[depcount-1-i]);
+	  if (cur->deplibs[j])
+	    {
+	      ++j;
+	    }
+	}
+
+      cur->depcount	= j;	/* Number of successfully loaded deplibs */
+      errors		= 0;
+    }
+
+ cleanup_names:
+  for (i = 0; i < depcount; ++i)
+    {
+      FREE (names[i]);
+    }
+
+ cleanup:
+  FREE (names);
+  /* restore the old search path */
+  if (save_search_path) {
+    MEMREASSIGN (user_search_path, save_search_path);
+  }
+
+  return errors;
+}
+#endif /* defined(LTDL_DLOPEN_DEPLIBS) */
+
+static int
+unload_deplibs (lt_dlhandle handle)
+{
+  int i;
+  int errors = 0;
+  lt_dlhandle cur = handle;
+
+  if (cur->depcount)
+    {
+      for (i = 0; i < cur->depcount; ++i)
+	{
+	  if (!LT_DLIS_RESIDENT (cur->deplibs[i]))
+	    {
+	      errors += lt_dlclose (cur->deplibs[i]);
+	    }
+	}
+      FREE (cur->deplibs);
+    }
+
+  return errors;
+}
+
+static int
+trim (char **dest, const char *str)
+{
+  /* remove the leading and trailing "'" from str
+     and store the result in dest */
+  const char *end   = strrchr (str, '\'');
+  size_t len	    = LT_STRLEN (str);
+  char *tmp;
+
+  FREE (*dest);
+
+  if (!end || end == str)
+    return 1;
+
+  if (len > 3 && str[0] == '\'')
+    {
+      tmp = MALLOC (char, end - str);
+      if (!tmp)
+	return 1;
+
+      memcpy(tmp, &str[1], (end - str) - 1);
+      tmp[(end - str) - 1] = LT_EOS_CHAR;
+      *dest = tmp;
+    }
+  else
+    {
+      *dest = 0;
+    }
+
+  return 0;
+}
+
+/* Read the .la file FILE. */
+static int
+parse_dotla_file(FILE *file, char **dlname, char **libdir, char **deplibs,
+    char **old_name, int *installed)
+{
+  int		errors = 0;
+  size_t	line_len = LT_FILENAME_MAX;
+  char *	line = MALLOC (char, line_len);
+
+  if (!line)
+    {
+      LT__SETERROR (FILE_NOT_FOUND);
+      return 1;
+    }
+
+  while (!feof (file))
+    {
+      line[line_len-2] = '\0';
+      if (!fgets (line, (int) line_len, file))
+	{
+	  break;
+	}
+
+      /* Handle the case where we occasionally need to read a line
+	 that is longer than the initial buffer size.
+	 Behave even if the file contains NUL bytes due to corruption. */
+      while (line[line_len-2] != '\0' && line[line_len-2] != '\n' && !feof (file))
+	{
+	  line = REALLOC (char, line, line_len *2);
+	  if (!line)
+	    {
+	      ++errors;
+	      goto cleanup;
+	    }
+	  line[line_len * 2 - 2] = '\0';
+	  if (!fgets (&line[line_len -1], (int) line_len +1, file))
+	    {
+	      break;
+	    }
+	  line_len *= 2;
+	}
+
+      if (line[0] == '\n' || line[0] == '#')
+	{
+	  continue;
+	}
+
+#undef  STR_DLNAME
+#define STR_DLNAME	"dlname="
+      if (strncmp (line, STR_DLNAME, sizeof (STR_DLNAME) - 1) == 0)
+	{
+	  errors += trim (dlname, &line[sizeof (STR_DLNAME) - 1]);
+	}
+
+#undef  STR_OLD_LIBRARY
+#define STR_OLD_LIBRARY	"old_library="
+      else if (strncmp (line, STR_OLD_LIBRARY,
+	    sizeof (STR_OLD_LIBRARY) - 1) == 0)
+	{
+	  errors += trim (old_name, &line[sizeof (STR_OLD_LIBRARY) - 1]);
+	}
+
+      /* Windows native tools do not understand the POSIX paths we store
+	 in libdir. */
+#undef  STR_LIBDIR
+#define STR_LIBDIR	"libdir="
+      else if (strncmp (line, STR_LIBDIR, sizeof (STR_LIBDIR) - 1) == 0)
+	{
+	  errors += trim (libdir, &line[sizeof(STR_LIBDIR) - 1]);
+#ifdef __WINDOWS__
+	  /* Disallow following unix-style paths on MinGW.  */
+	  if (*libdir && (**libdir == '/' || **libdir == '\\'))
+	    **libdir = '\0';
+#endif
+	}
+
+#undef  STR_DL_DEPLIBS
+#define STR_DL_DEPLIBS	"dependency_libs="
+      else if (strncmp (line, STR_DL_DEPLIBS,
+	    sizeof (STR_DL_DEPLIBS) - 1) == 0)
+	{
+	  errors += trim (deplibs, &line[sizeof (STR_DL_DEPLIBS) - 1]);
+	}
+      else if (streq (line, "installed=yes\n"))
+	{
+	  *installed = 1;
+	}
+      else if (streq (line, "installed=no\n"))
+	{
+	  *installed = 0;
+	}
+
+#undef  STR_LIBRARY_NAMES
+#define STR_LIBRARY_NAMES "library_names="
+      else if (!*dlname && strncmp (line, STR_LIBRARY_NAMES,
+	    sizeof (STR_LIBRARY_NAMES) - 1) == 0)
+	{
+	  char *last_libname;
+	  errors += trim (dlname, &line[sizeof (STR_LIBRARY_NAMES) - 1]);
+	  if (!errors
+	      && *dlname
+	      && (last_libname = strrchr (*dlname, ' ')) != 0)
+	    {
+	      last_libname = lt__strdup (last_libname + 1);
+	      if (!last_libname)
+		{
+		  ++errors;
+		  goto cleanup;
+		}
+	      MEMREASSIGN (*dlname, last_libname);
+	    }
+	}
+
+      if (errors)
+	break;
+    }
+cleanup:
+  FREE (line);
+  return errors;
+}
+
+
+/* Try to open FILENAME as a module. */
+static int
+try_dlopen (lt_dlhandle *phandle, const char *filename, const char *ext,
+	    lt_dladvise advise)
+{
+  const char *	saved_error	= 0;
+  char *	archive_name	= 0;
+  char *	canonical	= 0;
+  char *	base_name	= 0;
+  char *	dir		= 0;
+  char *	name		= 0;
+  char *        attempt		= 0;
+  int		errors		= 0;
+  lt_dlhandle	newhandle;
+
+  assert (phandle);
+  assert (*phandle == 0);
+
+#ifdef LT_DEBUG_LOADERS
+  fprintf (stderr, "try_dlopen (%s, %s)\n",
+	   filename ? filename : "(null)",
+	   ext ? ext : "(null)");
+#endif
+
+  LT__GETERROR (saved_error);
+
+  /* dlopen self? */
+  if (!filename)
+    {
+      *phandle = (lt_dlhandle) lt__zalloc (sizeof (struct lt__handle));
+      if (*phandle == 0)
+	return 1;
+
+      newhandle	= *phandle;
+
+      /* lt_dlclose()ing yourself is very bad!  Disallow it.  */
+      newhandle->info.is_resident = 1;
+
+      if (tryall_dlopen (&newhandle, 0, advise, 0) != 0)
+	{
+	  FREE (*phandle);
+	  return 1;
+	}
+
+      goto register_handle;
+    }
+
+  assert (filename && *filename);
+
+  if (ext)
+    {
+      attempt = MALLOC (char, LT_STRLEN (filename) + LT_STRLEN (ext) + 1);
+      if (!attempt)
+	return 1;
+
+      sprintf(attempt, "%s%s", filename, ext);
+    }
+  else
+    {
+      attempt = lt__strdup (filename);
+      if (!attempt)
+	return 1;
+    }
+
+  /* Doing this immediately allows internal functions to safely
+     assume only canonicalized paths are passed.  */
+  if (canonicalize_path (attempt, &canonical) != 0)
+    {
+      ++errors;
+      goto cleanup;
+    }
+
+  /* If the canonical module name is a path (relative or absolute)
+     then split it into a directory part and a name part.  */
+  base_name = strrchr (canonical, '/');
+  if (base_name)
+    {
+      size_t dirlen = (1+ base_name) - canonical;
+
+      dir = MALLOC (char, 1+ dirlen);
+      if (!dir)
+	{
+	  ++errors;
+	  goto cleanup;
+	}
+
+      strncpy (dir, canonical, dirlen);
+      dir[dirlen] = LT_EOS_CHAR;
+
+      ++base_name;
+    }
+  else
+    MEMREASSIGN (base_name, canonical);
+
+  assert (base_name && *base_name);
+
+  ext = strrchr (base_name, '.');
+  if (!ext)
+    {
+      ext = base_name + LT_STRLEN (base_name);
+    }
+
+  /* extract the module name from the file name */
+  name = MALLOC (char, ext - base_name + 1);
+  if (!name)
+    {
+      ++errors;
+      goto cleanup;
+    }
+
+  /* canonicalize the module name */
+  {
+    int i;
+    for (i = 0; i < ext - base_name; ++i)
+      {
+	if (isalnum ((unsigned char)(base_name[i])))
+	  {
+	    name[i] = base_name[i];
+	  }
+	else
+	  {
+	    name[i] = '_';
+	  }
+      }
+    name[ext - base_name] = LT_EOS_CHAR;
+  }
+
+  /* Before trawling through the filesystem in search of a module,
+     check whether we are opening a preloaded module.  */
+  if (!dir)
+    {
+      const lt_dlvtable *vtable	= lt_dlloader_find ("lt_preopen");
+
+      if (vtable)
+	{
+	  /* libprefix + name + "." + libext + NULL */
+	  archive_name = MALLOC (char, strlen (libprefix) + LT_STRLEN (name) + strlen (libext) + 2);
+	  *phandle = (lt_dlhandle) lt__zalloc (sizeof (struct lt__handle));
+
+	  if ((*phandle == NULL) || (archive_name == NULL))
+	    {
+	      ++errors;
+	      goto cleanup;
+	    }
+	  newhandle = *phandle;
+
+	  /* Preloaded modules are always named according to their old
+	     archive name.  */
+	  if (strncmp(name, "lib", 3) == 0)
+	    {
+	      sprintf (archive_name, "%s%s.%s", libprefix, name + 3, libext);
+	    }
+	  else
+	    {
+	      sprintf (archive_name, "%s.%s", name, libext);
+	    }
+
+	  if (tryall_dlopen (&newhandle, archive_name, advise, vtable) == 0)
+	    {
+	      goto register_handle;
+	    }
+
+	  /* If we're still here, there was no matching preloaded module,
+	     so put things back as we found them, and continue searching.  */
+	  FREE (*phandle);
+	  newhandle = NULL;
+	}
+    }
+
+  /* If we are allowing only preloaded modules, and we didn't find
+     anything yet, give up on the search here.  */
+  if (advise && advise->try_preload_only)
+    {
+      goto cleanup;
+    }
+
+  /* Check whether we are opening a libtool module (.la extension).  */
+  if (ext && streq (ext, archive_ext))
+    {
+      /* this seems to be a libtool module */
+      FILE *	file	 = 0;
+      char *	dlname	 = 0;
+      char *	old_name = 0;
+      char *	libdir	 = 0;
+      char *	deplibs	 = 0;
+
+      /* if we can't find the installed flag, it is probably an
+	 installed libtool archive, produced with an old version
+	 of libtool */
+      int	installed = 1;
+
+      /* Now try to open the .la file.  If there is no directory name
+	 component, try to find it first in user_search_path and then other
+	 prescribed paths.  Otherwise (or in any case if the module was not
+	 yet found) try opening just the module name as passed.  */
+      if (!dir)
+	{
+	  const char *search_path = user_search_path;
+
+	  if (search_path)
+	    file = find_file (user_search_path, base_name, &dir);
+
+	  if (!file)
+	    {
+	      search_path = getenv (LTDL_SEARCHPATH_VAR);
+	      if (search_path)
+		file = find_file (search_path, base_name, &dir);
+	    }
+
+#if defined(LT_MODULE_PATH_VAR)
+	  if (!file)
+	    {
+	      search_path = getenv (LT_MODULE_PATH_VAR);
+	      if (search_path)
+		file = find_file (search_path, base_name, &dir);
+	    }
+#endif
+#if defined(LT_DLSEARCH_PATH)
+	  if (!file && *sys_dlsearch_path)
+	    {
+	      file = find_file (sys_dlsearch_path, base_name, &dir);
+	    }
+#endif
+	}
+      else
+	{
+	  file = fopen (attempt, LT_READTEXT_MODE);
+	}
+
+      /* If we didn't find the file by now, it really isn't there.  Set
+	 the status flag, and bail out.  */
+      if (!file)
+	{
+	  LT__SETERROR (FILE_NOT_FOUND);
+	  ++errors;
+	  goto cleanup;
+	}
+
+      /* read the .la file */
+      if (parse_dotla_file(file, &dlname, &libdir, &deplibs,
+	    &old_name, &installed) != 0)
+	++errors;
+
+      fclose (file);
+
+      /* allocate the handle */
+      *phandle = (lt_dlhandle) lt__zalloc (sizeof (struct lt__handle));
+      if (*phandle == 0)
+	++errors;
+
+      if (errors)
+	{
+	  FREE (dlname);
+	  FREE (old_name);
+	  FREE (libdir);
+	  FREE (deplibs);
+	  FREE (*phandle);
+	  goto cleanup;
+	}
+
+      assert (*phandle);
+
+      if (load_deplibs (*phandle, deplibs) == 0)
+	{
+	  newhandle = *phandle;
+	  /* find_module may replace newhandle */
+	  if (find_module (&newhandle, dir, libdir, dlname, old_name,
+			   installed, advise))
+	    {
+	      unload_deplibs (*phandle);
+	      ++errors;
+	    }
+	}
+      else
+	{
+	  ++errors;
+	}
+
+      FREE (dlname);
+      FREE (old_name);
+      FREE (libdir);
+      FREE (deplibs);
+
+      if (errors)
+	{
+	  FREE (*phandle);
+	  goto cleanup;
+	}
+
+      if (*phandle != newhandle)
+	{
+	  unload_deplibs (*phandle);
+	}
+    }
+  else
+    {
+      /* not a libtool module */
+      *phandle = (lt_dlhandle) lt__zalloc (sizeof (struct lt__handle));
+      if (*phandle == 0)
+	{
+	  ++errors;
+	  goto cleanup;
+	}
+
+      newhandle = *phandle;
+
+      /* If the module has no directory name component, try to find it
+	 first in user_search_path and then other prescribed paths.
+	 Otherwise (or in any case if the module was not yet found) try
+	 opening just the module name as passed.  */
+      if ((dir || (!find_handle (user_search_path, base_name,
+				 &newhandle, advise)
+		   && !find_handle (getenv (LTDL_SEARCHPATH_VAR), base_name,
+				    &newhandle, advise)
+#if defined(LT_MODULE_PATH_VAR)
+		   && !find_handle (getenv (LT_MODULE_PATH_VAR), base_name,
+				    &newhandle, advise)
+#endif
+#if defined(LT_DLSEARCH_PATH)
+		   && !find_handle (sys_dlsearch_path, base_name,
+				    &newhandle, advise)
+#endif
+		   )))
+	{
+	  if (tryall_dlopen (&newhandle, attempt, advise, 0) != 0)
+	    {
+	      newhandle = NULL;
+	    }
+	}
+
+      if (!newhandle)
+	{
+	  FREE (*phandle);
+	  ++errors;
+	  goto cleanup;
+	}
+    }
+
+ register_handle:
+  MEMREASSIGN (*phandle, newhandle);
+
+  if ((*phandle)->info.ref_count == 0)
+    {
+      (*phandle)->info.ref_count	= 1;
+      MEMREASSIGN ((*phandle)->info.name, name);
+
+      (*phandle)->next	= handles;
+      handles		= *phandle;
+    }
+
+  LT__SETERRORSTR (saved_error);
+
+ cleanup:
+  FREE (dir);
+  FREE (attempt);
+  FREE (name);
+  if (!canonical)		/* was MEMREASSIGNed */
+    FREE (base_name);
+  FREE (canonical);
+  FREE (archive_name);
+
+  return errors;
+}
+
+
+/* If the last error message stored was `FILE_NOT_FOUND', then return
+   non-zero.  */
+static int
+file_not_found (void)
+{
+  const char *error = 0;
+
+  LT__GETERROR (error);
+  if (error == LT__STRERROR (FILE_NOT_FOUND))
+    return 1;
+
+  return 0;
+}
+
+
+/* Unless FILENAME already bears a suitable library extension, then
+   return 0.  */
+static int
+has_library_ext (const char *filename)
+{
+  const char *	ext     = 0;
+
+  assert (filename);
+
+  ext = strrchr (filename, '.');
+
+  if (ext && ((streq (ext, archive_ext))
+#if defined(LT_MODULE_EXT)
+	     || (streq (ext, shlib_ext))
+#endif
+#if defined(LT_SHARED_EXT)
+	     || (streq (ext, shared_ext))
+#endif
+    ))
+    {
+      return 1;
+    }
+
+  return 0;
+}
+
+
+/* Initialise and configure a user lt_dladvise opaque object.  */
+
+int
+lt_dladvise_init (lt_dladvise *padvise)
+{
+  lt_dladvise advise = (lt_dladvise) lt__zalloc (sizeof (struct lt__advise));
+  *padvise = advise;
+  return (advise ? 0 : 1);
+}
+
+int
+lt_dladvise_destroy (lt_dladvise *padvise)
+{
+  if (padvise)
+    FREE(*padvise);
+  return 0;
+}
+
+int
+lt_dladvise_ext (lt_dladvise *padvise)
+{
+  assert (padvise && *padvise);
+  (*padvise)->try_ext = 1;
+  return 0;
+}
+
+int
+lt_dladvise_resident (lt_dladvise *padvise)
+{
+  assert (padvise && *padvise);
+  (*padvise)->is_resident = 1;
+  return 0;
+}
+
+int
+lt_dladvise_local (lt_dladvise *padvise)
+{
+  assert (padvise && *padvise);
+  (*padvise)->is_symlocal = 1;
+  return 0;
+}
+
+int
+lt_dladvise_global (lt_dladvise *padvise)
+{
+  assert (padvise && *padvise);
+  (*padvise)->is_symglobal = 1;
+  return 0;
+}
+
+int
+lt_dladvise_preload (lt_dladvise *padvise)
+{
+  assert (padvise && *padvise);
+  (*padvise)->try_preload_only = 1;
+  return 0;
+}
+
+/* Libtool-1.5.x interface for loading a new module named FILENAME.  */
+lt_dlhandle
+lt_dlopen (const char *filename)
+{
+  return lt_dlopenadvise (filename, NULL);
+}
+
+
+/* If FILENAME has an ARCHIVE_EXT or MODULE_EXT extension, try to
+   open the FILENAME as passed.  Otherwise try appending ARCHIVE_EXT,
+   and if a file is still not found try again with MODULE_EXT appended
+   instead.  */
+lt_dlhandle
+lt_dlopenext (const char *filename)
+{
+  lt_dlhandle	handle	= 0;
+  lt_dladvise	advise;
+
+  if (!lt_dladvise_init (&advise) && !lt_dladvise_ext (&advise))
+    handle = lt_dlopenadvise (filename, advise);
+
+  lt_dladvise_destroy (&advise);
+  return handle;
+}
+
+
+lt_dlhandle
+lt_dlopenadvise (const char *filename, lt_dladvise advise)
+{
+  lt_dlhandle	handle	= 0;
+  int		errors	= 0;
+  const char *	saved_error	= 0;
+
+  LT__GETERROR (saved_error);
+
+  /* Can't have symbols hidden and visible at the same time!  */
+  if (advise && advise->is_symlocal && advise->is_symglobal)
+    {
+      LT__SETERROR (CONFLICTING_FLAGS);
+      return 0;
+    }
+
+  if (!filename
+      || !advise
+      || !advise->try_ext
+      || has_library_ext (filename))
+    {
+      /* Just incase we missed a code path in try_dlopen() that reports
+	 an error, but forgot to reset handle... */
+      if (try_dlopen (&handle, filename, NULL, advise) != 0)
+	return 0;
+
+      return handle;
+    }
+  else if (filename && *filename)
+    {
+
+      /* First try appending ARCHIVE_EXT.  */
+      errors += try_dlopen (&handle, filename, archive_ext, advise);
+
+      /* If we found FILENAME, stop searching -- whether we were able to
+	 load the file as a module or not.  If the file exists but loading
+	 failed, it is better to return an error message here than to
+	 report FILE_NOT_FOUND when the alternatives (foo.so etc) are not
+	 in the module search path.  */
+      if (handle || ((errors > 0) && !file_not_found ()))
+	return handle;
+
+#if defined(LT_MODULE_EXT)
+      /* Try appending SHLIB_EXT.   */
+      LT__SETERRORSTR (saved_error);
+      errors = try_dlopen (&handle, filename, shlib_ext, advise);
+
+      /* As before, if the file was found but loading failed, return now
+	 with the current error message.  */
+      if (handle || ((errors > 0) && !file_not_found ()))
+	return handle;
+#endif
+
+#if defined(LT_SHARED_EXT)
+      /* Try appending SHARED_EXT.   */
+      LT__SETERRORSTR (saved_error);
+      errors = try_dlopen (&handle, filename, shared_ext, advise);
+
+      /* As before, if the file was found but loading failed, return now
+	 with the current error message.  */
+      if (handle || ((errors > 0) && !file_not_found ()))
+	return handle;
+#endif
+    }
+
+  /* Still here?  Then we really did fail to locate any of the file
+     names we tried.  */
+  LT__SETERROR (FILE_NOT_FOUND);
+  return 0;
+}
+
+
+static int
+lt_argz_insert (char **pargz, size_t *pargz_len, char *before,
+		const char *entry)
+{
+  error_t error;
+
+  /* Prior to Sep 8, 2005, newlib had a bug where argz_insert(pargz,
+     pargz_len, NULL, entry) failed with EINVAL.  */
+  if (before)
+    error = argz_insert (pargz, pargz_len, before, entry);
+  else
+    error = argz_append (pargz, pargz_len, entry, 1 + strlen (entry));
+
+  if (error)
+    {
+      switch (error)
+	{
+	case ENOMEM:
+	  LT__SETERROR (NO_MEMORY);
+	  break;
+	default:
+	  LT__SETERROR (UNKNOWN);
+	  break;
+	}
+      return 1;
+    }
+
+  return 0;
+}
+
+static int
+lt_argz_insertinorder (char **pargz, size_t *pargz_len, const char *entry)
+{
+  char *before = 0;
+
+  assert (pargz);
+  assert (pargz_len);
+  assert (entry && *entry);
+
+  if (*pargz)
+    while ((before = argz_next (*pargz, *pargz_len, before)))
+      {
+	int cmp = strcmp (entry, before);
+
+	if (cmp < 0)  break;
+	if (cmp == 0) return 0;	/* No duplicates! */
+      }
+
+  return lt_argz_insert (pargz, pargz_len, before, entry);
+}
+
+static int
+lt_argz_insertdir (char **pargz, size_t *pargz_len, const char *dirnam,
+		   struct dirent *dp)
+{
+  char   *buf	    = 0;
+  size_t buf_len    = 0;
+  char   *end	    = 0;
+  size_t end_offset = 0;
+  size_t dir_len    = 0;
+  int    errors	    = 0;
+
+  assert (pargz);
+  assert (pargz_len);
+  assert (dp);
+
+  dir_len = LT_STRLEN (dirnam);
+  end     = dp->d_name + D_NAMLEN(dp);
+
+  /* Ignore version numbers.  */
+  {
+    char *p;
+    for (p = end; p -1 > dp->d_name; --p)
+      if (strchr (".0123456789", p[-1]) == 0)
+	break;
+
+    if (*p == '.')
+      end = p;
+  }
+
+  /* Ignore filename extension.  */
+  {
+    char *p;
+    for (p = end -1; p > dp->d_name; --p)
+      if (*p == '.')
+	{
+	  end = p;
+	  break;
+	}
+  }
+
+  /* Prepend the directory name.  */
+  end_offset	= end - dp->d_name;
+  buf_len	= dir_len + 1+ end_offset;
+  buf		= MALLOC (char, 1+ buf_len);
+  if (!buf)
+    return ++errors;
+
+  assert (buf);
+
+  strcpy  (buf, dirnam);
+  strcat  (buf, "/");
+  strncat (buf, dp->d_name, end_offset);
+  buf[buf_len] = LT_EOS_CHAR;
+
+  /* Try to insert (in order) into ARGZ/ARGZ_LEN.  */
+  if (lt_argz_insertinorder (pargz, pargz_len, buf) != 0)
+    ++errors;
+
+  FREE (buf);
+
+  return errors;
+}
+
+static int
+list_files_by_dir (const char *dirnam, char **pargz, size_t *pargz_len)
+{
+  DIR	*dirp	  = 0;
+  int    errors	  = 0;
+
+  assert (dirnam && *dirnam);
+  assert (pargz);
+  assert (pargz_len);
+  assert (dirnam[LT_STRLEN(dirnam) -1] != '/');
+
+  dirp = opendir (dirnam);
+  if (dirp)
+    {
+      struct dirent *dp	= 0;
+
+      while ((dp = readdir (dirp)))
+	if (dp->d_name[0] != '.')
+	  if (lt_argz_insertdir (pargz, pargz_len, dirnam, dp))
+	    {
+	      ++errors;
+	      break;
+	    }
+
+      closedir (dirp);
+    }
+  else
+    ++errors;
+
+  return errors;
+}
+
+
+/* If there are any files in DIRNAME, call the function passed in
+   DATA1 (with the name of each file and DATA2 as arguments).  */
+static int
+foreachfile_callback (char *dirname, void *data1, void *data2)
+{
+  file_worker_func *func = *(file_worker_func **) data1;
+
+  int	  is_done  = 0;
+  char   *argz     = 0;
+  size_t  argz_len = 0;
+
+  if (list_files_by_dir (dirname, &argz, &argz_len) != 0)
+    goto cleanup;
+  if (!argz)
+    goto cleanup;
+
+  {
+    char *filename = 0;
+    while ((filename = argz_next (argz, argz_len, filename)))
+      if ((is_done = (*func) (filename, data2)))
+	break;
+  }
+
+ cleanup:
+  FREE (argz);
+
+  return is_done;
+}
+
+
+/* Call FUNC for each unique extensionless file in SEARCH_PATH, along
+   with DATA.  The filenames passed to FUNC would be suitable for
+   passing to lt_dlopenext.  The extensions are stripped so that
+   individual modules do not generate several entries (e.g. libfoo.la,
+   libfoo.so, libfoo.so.1, libfoo.so.1.0.0).  If SEARCH_PATH is NULL,
+   then the same directories that lt_dlopen would search are examined.  */
+int
+lt_dlforeachfile (const char *search_path,
+		  int (*func) (const char *filename, void *data),
+		  void *data)
+{
+  int is_done = 0;
+  file_worker_func **fpptr = &func;
+
+  if (search_path)
+    {
+      /* If a specific path was passed, search only the directories
+	 listed in it.  */
+      is_done = foreach_dirinpath (search_path, 0,
+				   foreachfile_callback, fpptr, data);
+    }
+  else
+    {
+      /* Otherwise search the default paths.  */
+      is_done = foreach_dirinpath (user_search_path, 0,
+				   foreachfile_callback, fpptr, data);
+      if (!is_done)
+	{
+	  is_done = foreach_dirinpath (getenv(LTDL_SEARCHPATH_VAR), 0,
+				       foreachfile_callback, fpptr, data);
+	}
+
+#if defined(LT_MODULE_PATH_VAR)
+      if (!is_done)
+	{
+	  is_done = foreach_dirinpath (getenv(LT_MODULE_PATH_VAR), 0,
+				       foreachfile_callback, fpptr, data);
+	}
+#endif
+#if defined(LT_DLSEARCH_PATH)
+      if (!is_done && *sys_dlsearch_path)
+	{
+	  is_done = foreach_dirinpath (sys_dlsearch_path, 0,
+				       foreachfile_callback, fpptr, data);
+	}
+#endif
+    }
+
+  return is_done;
+}
+
+int
+lt_dlclose (lt_dlhandle handle)
+{
+  lt_dlhandle cur, last;
+  int errors = 0;
+
+  /* check whether the handle is valid */
+  last = cur = handles;
+  while (cur && handle != cur)
+    {
+      last = cur;
+      cur = cur->next;
+    }
+
+  if (!cur)
+    {
+      LT__SETERROR (INVALID_HANDLE);
+      ++errors;
+      goto done;
+    }
+
+  cur = handle;
+  cur->info.ref_count--;
+
+  /* Note that even with resident modules, we must track the ref_count
+     correctly incase the user decides to reset the residency flag
+     later (even though the API makes no provision for that at the
+     moment).  */
+  if (cur->info.ref_count <= 0 && !LT_DLIS_RESIDENT (cur))
+    {
+      lt_user_data data = cur->vtable->dlloader_data;
+
+      if (cur != handles)
+	{
+	  last->next = cur->next;
+	}
+      else
+	{
+	  handles = cur->next;
+	}
+
+      errors += cur->vtable->module_close (data, cur->module);
+      errors += unload_deplibs (handle);
+
+      /* It is up to the callers to free the data itself.  */
+      FREE (cur->interface_data);
+
+      FREE (cur->info.filename);
+      FREE (cur->info.name);
+      FREE (cur);
+
+      goto done;
+    }
+
+  if (LT_DLIS_RESIDENT (handle))
+    {
+      LT__SETERROR (CLOSE_RESIDENT_MODULE);
+      ++errors;
+    }
+
+ done:
+  return errors;
+}
+
+void *
+lt_dlsym (lt_dlhandle place, const char *symbol)
+{
+  size_t lensym;
+  char	lsym[LT_SYMBOL_LENGTH];
+  char	*sym;
+  void *address;
+  lt_user_data data;
+  lt_dlhandle handle;
+
+  if (!place)
+    {
+      LT__SETERROR (INVALID_HANDLE);
+      return 0;
+    }
+
+  handle = place;
+
+  if (!symbol)
+    {
+      LT__SETERROR (SYMBOL_NOT_FOUND);
+      return 0;
+    }
+
+  lensym = LT_STRLEN (symbol) + LT_STRLEN (handle->vtable->sym_prefix)
+					+ LT_STRLEN (handle->info.name);
+
+  if (lensym + LT_SYMBOL_OVERHEAD < LT_SYMBOL_LENGTH)
+    {
+      sym = lsym;
+    }
+  else
+    {
+      sym = MALLOC (char, lensym + LT_SYMBOL_OVERHEAD + 1);
+      if (!sym)
+	{
+	  LT__SETERROR (BUFFER_OVERFLOW);
+	  return 0;
+	}
+    }
+
+  data = handle->vtable->dlloader_data;
+  if (handle->info.name)
+    {
+      const char *saved_error;
+
+      LT__GETERROR (saved_error);
+
+      /* this is a libtool module */
+      if (handle->vtable->sym_prefix)
+	{
+	  strcpy(sym, handle->vtable->sym_prefix);
+	  strcat(sym, handle->info.name);
+	}
+      else
+	{
+	  strcpy(sym, handle->info.name);
+	}
+
+      strcat(sym, "_LTX_");
+      strcat(sym, symbol);
+
+      /* try "modulename_LTX_symbol" */
+      address = handle->vtable->find_sym (data, handle->module, sym);
+      if (address)
+	{
+	  if (sym != lsym)
+	    {
+	      FREE (sym);
+	    }
+	  return address;
+	}
+      LT__SETERRORSTR (saved_error);
+    }
+
+  /* otherwise try "symbol" */
+  if (handle->vtable->sym_prefix)
+    {
+      strcpy(sym, handle->vtable->sym_prefix);
+      strcat(sym, symbol);
+    }
+  else
+    {
+      strcpy(sym, symbol);
+    }
+
+  address = handle->vtable->find_sym (data, handle->module, sym);
+  if (sym != lsym)
+    {
+      FREE (sym);
+    }
+
+  return address;
+}
+
+const char *
+lt_dlerror (void)
+{
+  const char *error;
+
+  LT__GETERROR (error);
+  LT__SETERRORSTR (0);
+
+  return error;
+}
+
+static int
+lt_dlpath_insertdir (char **ppath, char *before, const char *dir)
+{
+  int    errors		= 0;
+  char  *canonical	= 0;
+  char  *argz		= 0;
+  size_t argz_len	= 0;
+
+  assert (ppath);
+  assert (dir && *dir);
+
+  if (canonicalize_path (dir, &canonical) != 0)
+    {
+      ++errors;
+      goto cleanup;
+    }
+
+  assert (canonical && *canonical);
+
+  /* If *PPATH is empty, set it to DIR.  */
+  if (*ppath == 0)
+    {
+      assert (!before);		/* BEFORE cannot be set without PPATH.  */
+      assert (dir);		/* Without DIR, don't call this function!  */
+
+      *ppath = lt__strdup (dir);
+      if (*ppath == 0)
+	++errors;
+
+      goto cleanup;
+    }
+
+  assert (ppath && *ppath);
+
+  if (argzize_path (*ppath, &argz, &argz_len) != 0)
+    {
+      ++errors;
+      goto cleanup;
+    }
+
+  /* Convert BEFORE into an equivalent offset into ARGZ.  This only works
+     if *PPATH is already canonicalized, and hence does not change length
+     with respect to ARGZ.  We canonicalize each entry as it is added to
+     the search path, and don't call this function with (uncanonicalized)
+     user paths, so this is a fair assumption.  */
+  if (before)
+    {
+      assert (*ppath <= before);
+      assert ((int) (before - *ppath) <= (int) strlen (*ppath));
+
+      before = before - *ppath + argz;
+    }
+
+  if (lt_argz_insert (&argz, &argz_len, before, dir) != 0)
+    {
+      ++errors;
+      goto cleanup;
+    }
+
+  argz_stringify (argz, argz_len, LT_PATHSEP_CHAR);
+  MEMREASSIGN(*ppath, argz);
+
+ cleanup:
+  FREE (argz);
+  FREE (canonical);
+
+  return errors;
+}
+
+int
+lt_dladdsearchdir (const char *search_dir)
+{
+  int errors = 0;
+
+  if (search_dir && *search_dir)
+    {
+      if (lt_dlpath_insertdir (&user_search_path, 0, search_dir) != 0)
+	++errors;
+    }
+
+  return errors;
+}
+
+int
+lt_dlinsertsearchdir (const char *before, const char *search_dir)
+{
+  int errors = 0;
+
+  if (before)
+    {
+      if ((before < user_search_path)
+	  || (before >= user_search_path + LT_STRLEN (user_search_path)))
+	{
+	  LT__SETERROR (INVALID_POSITION);
+	  return 1;
+	}
+    }
+
+  if (search_dir && *search_dir)
+    {
+      if (lt_dlpath_insertdir (&user_search_path,
+			       (char *) before, search_dir) != 0)
+	{
+	  ++errors;
+	}
+    }
+
+  return errors;
+}
+
+int
+lt_dlsetsearchpath (const char *search_path)
+{
+  int   errors	    = 0;
+
+  FREE (user_search_path);
+
+  if (!search_path || !LT_STRLEN (search_path))
+    {
+      return errors;
+    }
+
+  if (canonicalize_path (search_path, &user_search_path) != 0)
+    ++errors;
+
+  return errors;
+}
+
+const char *
+lt_dlgetsearchpath (void)
+{
+  const char *saved_path;
+
+  saved_path = user_search_path;
+
+  return saved_path;
+}
+
+int
+lt_dlmakeresident (lt_dlhandle handle)
+{
+  int errors = 0;
+
+  if (!handle)
+    {
+      LT__SETERROR (INVALID_HANDLE);
+      ++errors;
+    }
+  else
+    {
+      handle->info.is_resident = 1;
+    }
+
+  return errors;
+}
+
+int
+lt_dlisresident	(lt_dlhandle handle)
+{
+  if (!handle)
+    {
+      LT__SETERROR (INVALID_HANDLE);
+      return -1;
+    }
+
+  return LT_DLIS_RESIDENT (handle);
+}
+
+
+
+/* --- MODULE INFORMATION --- */
+
+typedef struct {
+  const char *id_string;
+  lt_dlhandle_interface *iface;
+} lt__interface_id;
+
+lt_dlinterface_id
+lt_dlinterface_register (const char *id_string, lt_dlhandle_interface *iface)
+{
+  lt__interface_id *interface_id = (lt__interface_id *) lt__malloc (sizeof *interface_id);
+
+  /* If lt__malloc fails, it will LT__SETERROR (NO_MEMORY), which
+     can then be detected with lt_dlerror() if we return 0.  */
+  if (interface_id)
+    {
+      interface_id->id_string = lt__strdup (id_string);
+      if (!interface_id->id_string)
+	FREE (interface_id);
+      else
+	interface_id->iface = iface;
+    }
+
+  return (lt_dlinterface_id) interface_id;
+}
+
+void lt_dlinterface_free (lt_dlinterface_id key)
+{
+  lt__interface_id *interface_id = (lt__interface_id *)key;
+  FREE (interface_id->id_string);
+  FREE (interface_id);
+}
+
+void *
+lt_dlcaller_set_data (lt_dlinterface_id key, lt_dlhandle handle, void *data)
+{
+  int n_elements = 0;
+  void *stale = (void *) 0;
+  lt_dlhandle cur = handle;
+  int i;
+
+  if (cur->interface_data)
+    while (cur->interface_data[n_elements].key)
+      ++n_elements;
+
+  for (i = 0; i < n_elements; ++i)
+    {
+      if (cur->interface_data[i].key == key)
+	{
+	  stale = cur->interface_data[i].data;
+	  break;
+	}
+    }
+
+  /* Ensure that there is enough room in this handle's interface_data
+     array to accept a new element (and an empty end marker).  */
+  if (i == n_elements)
+    {
+      lt_interface_data *temp
+	= REALLOC (lt_interface_data, cur->interface_data, 2+ n_elements);
+
+      if (!temp)
+	{
+	  stale = 0;
+	  goto done;
+	}
+
+      cur->interface_data = temp;
+
+      /* We only need this if we needed to allocate a new interface_data.  */
+      cur->interface_data[i].key	= key;
+      cur->interface_data[1+ i].key	= 0;
+    }
+
+  cur->interface_data[i].data = data;
+
+ done:
+  return stale;
+}
+
+void *
+lt_dlcaller_get_data (lt_dlinterface_id key, lt_dlhandle handle)
+{
+  void *result = (void *) 0;
+  lt_dlhandle cur = handle;
+
+  /* Locate the index of the element with a matching KEY.  */
+  if (cur->interface_data)
+    {
+      int i;
+      for (i = 0; cur->interface_data[i].key; ++i)
+	{
+	  if (cur->interface_data[i].key == key)
+	    {
+	      result = cur->interface_data[i].data;
+	      break;
+	    }
+	}
+    }
+
+  return result;
+}
+
+const lt_dlinfo *
+lt_dlgetinfo (lt_dlhandle handle)
+{
+  if (!handle)
+    {
+      LT__SETERROR (INVALID_HANDLE);
+      return 0;
+    }
+
+  return &(handle->info);
+}
+
+
+lt_dlhandle
+lt_dlhandle_iterate (lt_dlinterface_id iface, lt_dlhandle place)
+{
+  lt_dlhandle handle = place;
+  lt__interface_id *iterator = (lt__interface_id *) iface;
+
+  assert (iface); /* iface is a required argument */
+
+  if (!handle)
+    handle = handles;
+  else
+    handle = handle->next;
+
+  /* advance while the interface check fails */
+  while (handle && iterator->iface
+	 && ((*iterator->iface) (handle, iterator->id_string) != 0))
+    {
+      handle = handle->next;
+    }
+
+  return handle;
+}
+
+
+lt_dlhandle
+lt_dlhandle_fetch (lt_dlinterface_id iface, const char *module_name)
+{
+  lt_dlhandle handle = 0;
+
+  assert (iface); /* iface is a required argument */
+
+  while ((handle = lt_dlhandle_iterate (iface, handle)))
+    {
+      lt_dlhandle cur = handle;
+      if (cur && cur->info.name && streq (cur->info.name, module_name))
+	break;
+    }
+
+  return handle;
+}
+
+
+int
+lt_dlhandle_map (lt_dlinterface_id iface,
+		 int (*func) (lt_dlhandle handle, void *data), void *data)
+{
+  lt__interface_id *iterator = (lt__interface_id *) iface;
+  lt_dlhandle cur = handles;
+
+  assert (iface); /* iface is a required argument */
+
+  while (cur)
+    {
+      int errorcode = 0;
+
+      /* advance while the interface check fails */
+      while (cur && iterator->iface
+	     && ((*iterator->iface) (cur, iterator->id_string) != 0))
+	{
+	  cur = cur->next;
+	}
+
+      if ((errorcode = (*func) (cur, data)) != 0)
+	return errorcode;
+    }
+
+  return 0;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.h b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.h
new file mode 100644
index 0000000..749a54d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/ltdl.h
@@ -0,0 +1,163 @@
+/* ltdl.h -- generic dlopen functions
+
+   Copyright (C) 1998-2000, 2004, 2005,
+                 2007, 2008 Free Software Foundation, Inc.
+   Written by Thomas Tanner, 1998
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+/* Only include this header file once. */
+#if !defined(LTDL_H)
+#define LTDL_H 1
+
+#include <libltdl/lt_system.h>
+#include <libltdl/lt_error.h>
+#include <libltdl/lt_dlloader.h>
+
+LT_BEGIN_C_DECLS
+
+
+/* LT_STRLEN can be used safely on NULL pointers.  */
+#define LT_STRLEN(s)	(((s) && (s)[0]) ? strlen (s) : 0)
+
+
+/* --- DYNAMIC MODULE LOADING API --- */
+
+
+typedef	struct lt__handle *lt_dlhandle;	/* A loaded module.  */
+
+/* Initialisation and finalisation functions for libltdl. */
+LT_SCOPE int	    lt_dlinit		(void);
+LT_SCOPE int	    lt_dlexit		(void);
+
+/* Module search path manipulation.  */
+LT_SCOPE int	    lt_dladdsearchdir	 (const char *search_dir);
+LT_SCOPE int	    lt_dlinsertsearchdir (const char *before,
+						  const char *search_dir);
+LT_SCOPE int 	    lt_dlsetsearchpath	 (const char *search_path);
+LT_SCOPE const char *lt_dlgetsearchpath	 (void);
+LT_SCOPE int	    lt_dlforeachfile	 (
+			const char *search_path,
+			int (*func) (const char *filename, void *data),
+			void *data);
+
+/* User module loading advisors.  */
+LT_SCOPE int	    lt_dladvise_init	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_destroy  (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_ext	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_resident (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_local	 (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_global   (lt_dladvise *advise);
+LT_SCOPE int	    lt_dladvise_preload	 (lt_dladvise *advise);
+
+/* Portable libltdl versions of the system dlopen() API. */
+LT_SCOPE lt_dlhandle lt_dlopen		(const char *filename);
+LT_SCOPE lt_dlhandle lt_dlopenext	(const char *filename);
+LT_SCOPE lt_dlhandle lt_dlopenadvise	(const char *filename,
+					 lt_dladvise advise);
+LT_SCOPE void *	    lt_dlsym		(lt_dlhandle handle, const char *name);
+LT_SCOPE const char *lt_dlerror		(void);
+LT_SCOPE int	    lt_dlclose		(lt_dlhandle handle);
+
+
+
+/* --- PRELOADED MODULE SUPPORT --- */
+
+
+/* A preopened symbol. Arrays of this type comprise the exported
+   symbols for a dlpreopened module. */
+typedef struct {
+  const char *name;
+  void       *address;
+} lt_dlsymlist;
+
+typedef int lt_dlpreload_callback_func (lt_dlhandle handle);
+
+LT_SCOPE int	lt_dlpreload	     (const lt_dlsymlist *preloaded);
+LT_SCOPE int	lt_dlpreload_default (const lt_dlsymlist *preloaded);
+LT_SCOPE int	lt_dlpreload_open    (const char *originator,
+				      lt_dlpreload_callback_func *func);
+
+#define lt_preloaded_symbols	lt__PROGRAM__LTX_preloaded_symbols
+/* Ensure C linkage.  */
+extern LT_DLSYM_CONST lt_dlsymlist lt__PROGRAM__LTX_preloaded_symbols[];
+
+#define LTDL_SET_PRELOADED_SYMBOLS() \
+	lt_dlpreload_default(lt_preloaded_symbols)
+
+
+
+
+/* --- MODULE INFORMATION --- */
+
+
+/* Associating user data with loaded modules. */
+typedef void * lt_dlinterface_id;
+typedef int lt_dlhandle_interface (lt_dlhandle handle, const char *id_string);
+
+LT_SCOPE lt_dlinterface_id lt_dlinterface_register (const char *id_string,
+					  lt_dlhandle_interface *iface);
+LT_SCOPE void	lt_dlinterface_free (lt_dlinterface_id key);
+LT_SCOPE void *	lt_dlcaller_set_data  (lt_dlinterface_id key,
+					  lt_dlhandle handle, void *data);
+LT_SCOPE void *	lt_dlcaller_get_data  (lt_dlinterface_id key,
+					  lt_dlhandle handle);
+
+
+/* Read only information pertaining to a loaded module. */
+typedef	struct {
+  char *	filename;	/* file name */
+  char *	name;		/* module name */
+  int		ref_count;	/* number of times lt_dlopened minus
+				   number of times lt_dlclosed. */
+  unsigned int	is_resident:1;	/* module can't be unloaded. */
+  unsigned int	is_symglobal:1;	/* module symbols can satisfy
+				   subsequently loaded modules.  */
+  unsigned int	is_symlocal:1;	/* module symbols are only available
+				   locally. */
+} lt_dlinfo;
+
+LT_SCOPE const lt_dlinfo *lt_dlgetinfo	    (lt_dlhandle handle);
+
+LT_SCOPE lt_dlhandle	lt_dlhandle_iterate (lt_dlinterface_id iface,
+					     lt_dlhandle place);
+LT_SCOPE lt_dlhandle	lt_dlhandle_fetch   (lt_dlinterface_id iface,
+					     const char *module_name);
+LT_SCOPE int		lt_dlhandle_map	    (lt_dlinterface_id iface,
+				int (*func) (lt_dlhandle handle, void *data),
+				void *data);
+
+
+
+/* Deprecated module residency management API. */
+LT_SCOPE int	    lt_dlmakeresident	(lt_dlhandle handle);
+LT_SCOPE int	    lt_dlisresident	(lt_dlhandle handle);
+
+#define lt_ptr void *
+
+LT_END_C_DECLS
+
+#endif /*!defined(LTDL_H)*/
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/slist.c b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/slist.c
new file mode 100644
index 0000000..25906a4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/libtool/libltdl/slist.c
@@ -0,0 +1,379 @@
+/* slist.c -- generalised singly linked lists
+
+   Copyright (C) 2000, 2004, 2007, 2008, 2009 Free Software Foundation, Inc.
+   Written by Gary V. Vaughan, 2000
+
+   NOTE: The canonical source of this file is maintained with the
+   GNU Libtool package.  Report bugs to bug-libtool@gnu.org.
+
+GNU Libltdl is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+As a special exception to the GNU Lesser General Public License,
+if you distribute this file as part of a program or library that
+is built using GNU Libtool, you may include this file under the
+same distribution terms that you use for the rest of that program.
+
+GNU Libltdl is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with GNU Libltdl; see the file COPYING.LIB.  If not, a
+copy can be downloaded from  http://www.gnu.org/licenses/lgpl.html,
+or obtained by writing to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+*/
+
+#include <assert.h>
+
+#include "slist.h"
+#include <stddef.h>
+#include <stdlib.h>
+
+static SList *	slist_sort_merge    (SList *left, SList *right,
+				     SListCompare *compare, void *userdata);
+
+
+/* Call DELETE repeatedly on each element of HEAD.
+
+   CAVEAT: If you call this when HEAD is the start of a list of boxed
+           items, you must remember that each item passed back to your
+	   DELETE function will be a boxed item that must be slist_unbox()ed
+	   before operating on its contents.
+
+   e.g. void boxed_delete (void *item) { item_free (slist_unbox (item)); }
+        ...
+	  slist = slist_delete (slist, boxed_delete);
+	...
+*/
+SList *
+slist_delete (SList *head, void (*delete_fct) (void *item))
+{
+  assert (delete_fct);
+
+  while (head)
+    {
+      SList *next = head->next;
+      (*delete_fct) (head);
+      head = next;
+    }
+
+  return 0;
+}
+
+/* Call FIND repeatedly with MATCHDATA and each item of *PHEAD, until
+   FIND returns non-NULL, or the list is exhausted.  If a match is found
+   the matching item is destructively removed from *PHEAD, and the value
+   returned by the matching call to FIND is returned.
+
+   CAVEAT: To avoid memory leaks, unless you already have the address of
+           the stale item, you should probably return that from FIND if
+	   it makes a successful match.  Don't forget to slist_unbox()
+	   every item in a boxed list before operating on its contents.   */
+SList *
+slist_remove (SList **phead, SListCallback *find, void *matchdata)
+{
+  SList *stale = 0;
+  void *result = 0;
+
+  assert (find);
+
+  if (!phead || !*phead)
+    return 0;
+
+  /* Does the head of the passed list match? */
+  result = (*find) (*phead, matchdata);
+  if (result)
+    {
+      stale = *phead;
+      *phead = stale->next;
+    }
+  /* what about the rest of the elements? */
+  else
+    {
+      SList *head;
+      for (head = *phead; head->next; head = head->next)
+	{
+	  result = (*find) (head->next, matchdata);
+	  if (result)
+	    {
+	      stale		= head->next;
+	      head->next	= stale->next;
+	      break;
+	    }
+	}
+    }
+
+  return (SList *) result;
+}
+
+/* Call FIND repeatedly with each element of SLIST and MATCHDATA, until
+   FIND returns non-NULL, or the list is exhausted.  If a match is found
+   the value returned by the matching call to FIND is returned. */
+void *
+slist_find (SList *slist, SListCallback *find, void *matchdata)
+{
+  void *result = 0;
+
+  assert (find);
+
+  for (; slist; slist = slist->next)
+    {
+      result = (*find) (slist, matchdata);
+      if (result)
+	break;
+    }
+
+  return result;
+}
+
+/* Return a single list, composed by destructively concatenating the
+   items in HEAD and TAIL.  The values of HEAD and TAIL are undefined
+   after calling this function.
+
+   CAVEAT: Don't mix boxed and unboxed items in a single list.
+
+   e.g.  slist1 = slist_concat (slist1, slist2);  */
+SList *
+slist_concat (SList *head, SList *tail)
+{
+  SList *last;
+
+  if (!head)
+    {
+      return tail;
+    }
+
+  last = head;
+  while (last->next)
+    last = last->next;
+
+  last->next = tail;
+
+  return head;
+}
+
+/* Return a single list, composed by destructively appending all of
+   the items in SLIST to ITEM.  The values of ITEM and SLIST are undefined
+   after calling this function.
+
+   CAVEAT:  Don't mix boxed and unboxed items in a single list.
+
+   e.g.  slist1 = slist_cons (slist_box (data), slist1);  */
+SList *
+slist_cons (SList *item, SList *slist)
+{
+  if (!item)
+    {
+      return slist;
+    }
+
+  assert (!item->next);
+
+  item->next = slist;
+  return item;
+}
+
+/* Return a list starting at the second item of SLIST.  */
+SList *
+slist_tail (SList *slist)
+{
+  return slist ? slist->next : NULL;
+}
+
+/* Return a list starting at the Nth item of SLIST.  If SLIST is less
+   than N items long, NULL is returned.  Just to be confusing, list items
+   are counted from 1, to get the 2nd element of slist:
+
+   e.g. shared_list = slist_nth (slist, 2);  */
+SList *
+slist_nth (SList *slist, size_t n)
+{
+  for (;n > 1 && slist; n--)
+    slist = slist->next;
+
+  return slist;
+}
+
+/* Return the number of items in SLIST.  We start counting from 1, so
+   the length of a list with no items is 0, and so on.  */
+size_t
+slist_length (SList *slist)
+{
+  size_t n;
+
+  for (n = 0; slist; ++n)
+    slist = slist->next;
+
+  return n;
+}
+
+/* Destructively reverse the order of items in SLIST.  The value of SLIST
+   is undefined after calling this function.
+
+  CAVEAT: You must store the result of this function, or you might not
+          be able to get all the items except the first one back again.
+
+  e.g.    slist = slist_reverse (slist);  */
+SList *
+slist_reverse (SList *slist)
+{
+  SList *result = 0;
+  SList *next;
+
+  while (slist)
+    {
+      next		= slist->next;
+      slist->next	= result;
+      result		= slist;
+      slist 		= next;
+    }
+
+  return result;
+}
+
+/* Call FOREACH once for each item in SLIST, passing both the item and
+   USERDATA on each call. */
+void *
+slist_foreach (SList *slist, SListCallback *foreach, void *userdata)
+{
+  void *result = 0;
+
+  assert (foreach);
+
+  while (slist)
+    {
+      SList *next = slist->next;
+      result = (*foreach) (slist, userdata);
+
+      if (result)
+	break;
+
+      slist = next;
+    }
+
+  return result;
+}
+
+/* Destructively merge the items of two ordered lists LEFT and RIGHT,
+   returning a single sorted list containing the items of both --  Part of
+   the quicksort algorithm.  The values of LEFT and RIGHT are undefined
+   after calling this function.
+
+   At each iteration, add another item to the merged list by taking the
+   lowest valued item from the head of either LEFT or RIGHT, determined
+   by passing those items and USERDATA to COMPARE.  COMPARE should return
+   less than 0 if the head of LEFT has the lower value, greater than 0 if
+   the head of RIGHT has the lower value, otherwise 0.  */
+static SList *
+slist_sort_merge (SList *left, SList *right, SListCompare *compare,
+		  void *userdata)
+{
+  SList merged, *insert;
+
+  insert = &merged;
+
+  while (left && right)
+    {
+      if ((*compare) (left, right, userdata) <= 0)
+	{
+	  insert = insert->next = left;
+	  left = left->next;
+	}
+      else
+	{
+	  insert = insert->next = right;
+	  right = right->next;
+	}
+    }
+
+  insert->next = left ? left : right;
+
+  return merged.next;
+}
+
+/* Perform a destructive quicksort on the items in SLIST, by repeatedly
+   calling COMPARE with a pair of items from SLIST along with USERDATA
+   at every iteration.  COMPARE is a function as defined above for
+   slist_sort_merge().  The value of SLIST is undefined after calling
+   this function.
+
+   e.g.  slist = slist_sort (slist, compare, 0);  */
+SList *
+slist_sort (SList *slist, SListCompare *compare, void *userdata)
+{
+  SList *left, *right;
+
+  if (!slist)
+    return slist;
+
+  /* Be sure that LEFT and RIGHT never contain the same item.  */
+  left = slist;
+  right = slist->next;
+
+  if (!right)
+    return left;
+
+  /* Skip two items with RIGHT and one with SLIST, until RIGHT falls off
+     the end.  SLIST must be about half way along.  */
+  while (right && (right = right->next))
+    {
+      if (!right || !(right = right->next))
+	break;
+      slist = slist->next;
+    }
+  right = slist->next;
+  slist->next = 0;
+
+  /* Sort LEFT and RIGHT, then merge the two.  */
+  return slist_sort_merge (slist_sort (left, compare, userdata),
+			   slist_sort (right, compare, userdata),
+			   compare, userdata);
+}
+
+
+/* Aside from using the functions above to manage chained structures of
+   any type that has a NEXT pointer as its first field, SLISTs can
+   be comprised of boxed items.  The boxes are chained together in
+   that case, so there is no need for a NEXT field in the item proper.
+   Some care must be taken to slist_box and slist_unbox each item in
+   a boxed list at the appropriate points to avoid leaking the memory
+   used for the boxes.  It us usually a very bad idea to mix boxed and
+   non-boxed items in a single list.  */
+
+/* Return a `boxed' freshly mallocated 1 element list containing
+   USERDATA.  */
+SList *
+slist_box (const void *userdata)
+{
+  SList *item = (SList *) malloc (sizeof *item);
+
+  if (item)
+    {
+      item->next     = 0;
+      item->userdata = userdata;
+    }
+
+  return item;
+}
+
+/* Return the contents of a `boxed' ITEM, recycling the box itself.  */
+void *
+slist_unbox (SList *item)
+{
+  void *userdata = 0;
+
+  if (item)
+    {
+      /* Strip the const, because responsibility for this memory
+	 passes to the caller on return.  */
+      userdata = (void *) item->userdata;
+      free (item);
+    }
+
+  return userdata;
+}
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/be/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/be/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..f5a4512
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/be/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/da/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/da/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..7a5badb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/da/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/de/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/de/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..ef209cf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/de/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/es/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/es/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..5afbd6e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/es/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fi/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fi/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..cc9ea0c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fi/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fr/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fr/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..85109c0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/fr/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ga/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ga/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..94495a8
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ga/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/gl/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/gl/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..c3ec093
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/gl/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/he/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/he/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..5e8d924
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/he/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/hr/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/hr/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..a3d3307
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/hr/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/id/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/id/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..5ef2ce3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/id/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/it/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/it/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..f232b0c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/it/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ja/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ja/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..990a70f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ja/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ko/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ko/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..a00dcfc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ko/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/lt/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/lt/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..a956d85
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/lt/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/nl/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/nl/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..1f15184
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/nl/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pl/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pl/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..a054031
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pl/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pt_BR/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pt_BR/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..28ced11
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/pt_BR/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ru/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ru/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..d868841
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/ru/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/sv/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/sv/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..ec5fbf5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/sv/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/tr/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/tr/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..002a950
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/tr/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/uk/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/uk/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..e24a088
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/uk/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/vi/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/vi/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..acfdd46
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/vi/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/zh_CN/LC_MESSAGES/make.mo b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/zh_CN/LC_MESSAGES/make.mo
new file mode 100644
index 0000000..61cffca
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/locale/zh_CN/LC_MESSAGES/make.mo
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal-1.14.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal-1.14.1
new file mode 100644
index 0000000..973fb81
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal-1.14.1
@@ -0,0 +1,98 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.40.8.
+.TH ACLOCAL "1" "September 2014" "aclocal 1.14.1" "User Commands"
+.SH NAME
+aclocal \- manual page for aclocal 1.14.1
+.SH SYNOPSIS
+.B aclocal
+[\fIOPTION\fR]...
+.SH DESCRIPTION
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+.SH OPTIONS
+.TP
+\fB\-\-automake\-acdir\fR=\fIDIR\fR
+directory holding automake\-provided m4 files
+.TP
+\fB\-\-system\-acdir\fR=\fIDIR\fR
+directory holding third\-party system\-wide files
+.TP
+\fB\-\-diff\fR[=\fICOMMAND\fR]
+run COMMAND [diff \fB\-u]\fR on M4 files that would be
+changed (implies \fB\-\-install\fR and \fB\-\-dry\-run\fR)
+.TP
+\fB\-\-dry\-run\fR
+pretend to, but do not actually update any file
+.TP
+\fB\-\-force\fR
+always update output file
+.TP
+\fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-I\fR DIR
+add directory to search list for .m4 files
+.TP
+\fB\-\-install\fR
+copy third\-party files to the first \fB\-I\fR directory
+.TP
+\fB\-\-output\fR=\fIFILE\fR
+put output in FILE (default aclocal.m4)
+.TP
+\fB\-\-print\-ac\-dir\fR
+print name of directory holding system\-wide
+third\-party m4 files, then exit
+.TP
+\fB\-\-verbose\fR
+don't be silent
+.TP
+\fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY
+.SS "Warning categories include:"
+.TP
+syntax
+dubious syntactic constructs (default)
+.TP
+unsupported
+unknown macros (default)
+.TP
+all
+all the warnings (default)
+.TP
+no\-CATEGORY
+turn off warnings in CATEGORY
+.TP
+none
+turn off all the warnings
+.TP
+error
+treat warnings as errors
+.SH AUTHOR
+Written by Tom Tromey <tromey@redhat.com>
+.IP
+and Alexandre Duret\-Lutz <adl@gnu.org>.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-automake@gnu.org>.
+.br
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2013 Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl\-2.0.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+The full documentation for
+.B aclocal
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B aclocal
+programs are properly installed at your site, the command
+.IP
+.B info aclocal
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal.1
new file mode 100644
index 0000000..fb7c34e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/aclocal.1
@@ -0,0 +1 @@
+.so man1/aclocal-1.14.1
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoconf.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoconf.1
new file mode 100644
index 0000000..daf3997
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoconf.1
@@ -0,0 +1,113 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTOCONF "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autoconf \- Generate configuration scripts
+.SH SYNOPSIS
+.B autoconf
+[\fIOPTION\fR]... [\fITEMPLATE-FILE\fR]
+.SH DESCRIPTION
+Generate a configuration script from a TEMPLATE\-FILE if given, or
+`configure.ac' if present, or else `configure.in'.  Output is sent
+to the standard output if TEMPLATE\-FILE is given, else into
+`configure'.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+consider all files obsolete
+.TP
+\fB\-o\fR, \fB\-\-output\fR=\fIFILE\fR
+save output in FILE (stdout is the default)
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY [syntax]
+.SS "Warning categories include:"
+.TP
+`cross'
+cross compilation issues
+.TP
+`obsolete'
+obsolete constructs
+.TP
+`syntax'
+dubious syntactic constructs
+.TP
+`all'
+all the warnings
+.TP
+`no\-CATEGORY'
+turn off the warnings on CATEGORY
+.TP
+`none'
+turn off all the warnings
+.TP
+`error'
+warnings are error
+.PP
+The environment variables `M4' and `WARNINGS' are honored.
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.SS "Tracing:"
+.TP
+\fB\-t\fR, \fB\-\-trace\fR=\fIMACRO[\fR:FORMAT]
+report the list of calls to MACRO
+.TP
+\fB\-i\fR, \fB\-\-initialization\fR
+also trace Autoconf's initialization process
+.PP
+In tracing mode, no configuration script is created.  FORMAT defaults
+to `$f:$l:$n:$%'; see `autom4te \fB\-\-help\fR' for information about FORMAT.
+.SH AUTHOR
+Written by David J. MacKenzie and Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autoconf
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autoconf
+programs are properly installed at your site, the command
+.IP
+.B info autoconf
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoheader.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoheader.1
new file mode 100644
index 0000000..ea3e607
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoheader.1
@@ -0,0 +1,108 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTOHEADER "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autoheader \- Create a template header for configure
+.SH SYNOPSIS
+.B autoheader
+[\fIOPTION\fR]... [\fITEMPLATE-FILE\fR]
+.SH DESCRIPTION
+Create a template file of C `#define' statements for `configure' to
+use.  To this end, scan TEMPLATE\-FILE, or `configure.ac' if present,
+or else `configure.in'.
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+consider all files obsolete
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY
+.SS "Warning categories include:"
+.TP
+`cross'
+cross compilation issues
+.TP
+`gnu'
+GNU coding standards (default in gnu and gnits modes)
+.TP
+`obsolete'
+obsolete features or constructions
+.TP
+`override'
+user redefinitions of Automake rules or variables
+.TP
+`portability'
+portability issues (default in gnu and gnits modes)
+.TP
+`syntax'
+dubious syntactic constructs (default)
+.TP
+`unsupported'
+unsupported or incomplete features (default)
+.TP
+`all'
+all the warnings
+.TP
+`no\-CATEGORY'
+turn off warnings in CATEGORY
+.TP
+`none'
+turn off all the warnings
+.TP
+`error'
+treat warnings as errors
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.SH AUTHOR
+Written by Roland McGrath and Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autoheader
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autoheader
+programs are properly installed at your site, the command
+.IP
+.B info autoheader
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autom4te.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autom4te.1
new file mode 100644
index 0000000..941192a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autom4te.1
@@ -0,0 +1,192 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTOM4TE "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autom4te \- Generate files and scripts thanks to M4
+.SH SYNOPSIS
+.B autom4te
+[\fIOPTION\fR]... [\fIFILES\fR]
+.SH DESCRIPTION
+Run GNU M4 on the FILES, avoiding useless runs.  Output the traces if tracing,
+the frozen file if freezing, otherwise the expansion of the FILES.
+.PP
+If some of the FILES are named `FILE.m4f' they are considered to be M4
+frozen files of all the previous files (which are therefore not loaded).
+If `FILE.m4f' is not found, then `FILE.m4' will be used, together with
+all the previous files.
+.PP
+Some files may be optional, i.e., will only be processed if found in the
+include path, but then must end in `.m4?';  the question mark is not part of
+the actual file name.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.TP
+\fB\-o\fR, \fB\-\-output\fR=\fIFILE\fR
+save output in FILE (defaults to `\-', stdout)
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+don't rely on cached values
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY
+.TP
+\fB\-l\fR, \fB\-\-language\fR=\fILANG\fR
+specify the set of M4 macros to use
+.TP
+\fB\-C\fR, \fB\-\-cache\fR=\fIDIRECTORY\fR
+preserve results for future runs in DIRECTORY
+.TP
+\fB\-\-no\-cache\fR
+disable the cache
+.TP
+\fB\-m\fR, \fB\-\-mode\fR=\fIOCTAL\fR
+change the non trace output file mode (0666)
+.TP
+\fB\-M\fR, \fB\-\-melt\fR
+don't use M4 frozen files
+.SS "Languages include:"
+.TP
+`Autoconf'
+create Autoconf configure scripts
+.TP
+`Autotest'
+create Autotest test suites
+.TP
+`M4sh'
+create M4sh shell scripts
+.TP
+`M4sugar'
+create M4sugar output
+.SS "Warning categories include:"
+.TP
+`cross'
+cross compilation issues
+.TP
+`gnu'
+GNU coding standards (default in gnu and gnits modes)
+.TP
+`obsolete'
+obsolete features or constructions
+.TP
+`override'
+user redefinitions of Automake rules or variables
+.TP
+`portability'
+portability issues (default in gnu and gnits modes)
+.TP
+`syntax'
+dubious syntactic constructs (default)
+.TP
+`unsupported'
+unsupported or incomplete features (default)
+.TP
+`all'
+all the warnings
+.TP
+`no\-CATEGORY'
+turn off warnings in CATEGORY
+.TP
+`none'
+turn off all the warnings
+.TP
+`error'
+treat warnings as errors
+.PP
+The environment variables `M4' and `WARNINGS' are honored.
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.SS "Tracing:"
+.TP
+\fB\-t\fR, \fB\-\-trace\fR=\fIMACRO[\fR:FORMAT]
+report the MACRO invocations
+.TP
+\fB\-p\fR, \fB\-\-preselect\fR=\fIMACRO\fR
+prepare to trace MACRO in a future run
+.SS "Freezing:"
+.TP
+\fB\-F\fR, \fB\-\-freeze\fR
+produce an M4 frozen state file for FILES
+.SS "FORMAT defaults to `$f:$l:$n:$%', and can use the following escapes:"
+.TP
+$$
+literal $
+.TP
+$f
+file where macro was called
+.TP
+$l
+line where macro was called
+.TP
+$d
+nesting depth of macro call
+.TP
+$n
+name of the macro
+.TP
+$NUM
+argument NUM, unquoted and with newlines
+.TP
+$SEP@
+all arguments, with newlines, quoted, and separated by SEP
+.TP
+$SEP*
+all arguments, with newlines, unquoted, and separated by SEP
+.TP
+$SEP%
+all arguments, without newlines, unquoted, and separated by SEP
+.PP
+SEP can be empty for the default (comma for @ and *, colon for %),
+a single character for that character, or {STRING} to use a string.
+.SH AUTHOR
+Written by Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autom4te
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autom4te
+programs are properly installed at your site, the command
+.IP
+.B info autom4te
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake-1.14.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake-1.14.1
new file mode 100644
index 0000000..344924b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake-1.14.1
@@ -0,0 +1,154 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.40.8.
+.TH AUTOMAKE "1" "September 2014" "automake 1.14.1" "User Commands"
+.SH NAME
+automake \- manual page for automake 1.14.1
+.SH SYNOPSIS
+.B automake-1.14
+[\fIOPTION\fR]... [\fIMakefile\fR]...
+.SH DESCRIPTION
+Generate Makefile.in for configure from Makefile.am.
+.SS "Operation modes:"
+.TP
+\fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely list files processed
+.TP
+\fB\-\-no\-force\fR
+only update Makefile.in's that are out of date
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY
+.SS "Dependency tracking:"
+.TP
+\fB\-i\fR, \fB\-\-ignore\-deps\fR
+disable dependency tracking code
+.TP
+\fB\-\-include\-deps\fR
+enable dependency tracking code
+.SS "Flavors:"
+.TP
+\fB\-\-foreign\fR
+set strictness to foreign
+.TP
+\fB\-\-gnits\fR
+set strictness to gnits
+.TP
+\fB\-\-gnu\fR
+set strictness to gnu
+.SS "Library files:"
+.TP
+\fB\-a\fR, \fB\-\-add\-missing\fR
+add missing standard files to package
+.TP
+\fB\-\-libdir\fR=\fIDIR\fR
+set directory storing library files
+.TP
+\fB\-\-print\-libdir\fR
+print directory storing library files
+.TP
+\fB\-c\fR, \fB\-\-copy\fR
+with \fB\-a\fR, copy missing files (default is symlink)
+.TP
+\fB\-f\fR, \fB\-\-force\-missing\fR
+force update of standard files
+.SS "Warning categories include:"
+.TP
+gnu
+GNU coding standards (default in gnu and gnits modes)
+.TP
+obsolete
+obsolete features or constructions
+.TP
+override
+user redefinitions of Automake rules or variables
+.TP
+portability
+portability issues (default in gnu and gnits modes)
+.TP
+extra\-portability
+extra portability issues related to obscure tools
+.TP
+syntax
+dubious syntactic constructs (default)
+.TP
+unsupported
+unsupported or incomplete features (default)
+.TP
+all
+all the warnings
+.TP
+no\-CATEGORY
+turn off warnings in CATEGORY
+.TP
+none
+turn off all the warnings
+.TP
+error
+treat warnings as errors
+.SS "Files automatically distributed if found (always):"
+.TP
+ABOUT\-GNU
+INSTALL             config.rpath        ltconfig
+.TP
+ABOUT\-NLS
+NEWS                config.sub          ltmain.sh
+.TP
+AUTHORS
+README              depcomp             mdate\-sh
+.TP
+BACKLOG
+THANKS              install\-sh          missing
+.TP
+COPYING
+TODO                libversion.in       mkinstalldirs
+.TP
+COPYING.DOC
+ar\-lib              ltcf\-c.sh           py\-compile
+.TP
+COPYING.LESSER
+compile             ltcf\-cxx.sh         texinfo.tex
+.TP
+COPYING.LIB
+config.guess        ltcf\-gcj.sh         ylwrap
+.IP
+ChangeLog
+.SS "Files automatically distributed if found (under certain conditions):"
+.TP
+acconfig.h
+config.h.bot        configure           configure.in
+.TP
+aclocal.m4
+config.h.top        configure.ac        stamp\-vti
+.SH AUTHOR
+Written by Tom Tromey <tromey@redhat.com>
+.IP
+and Alexandre Duret\-Lutz <adl@gnu.org>.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-automake@gnu.org>.
+.br
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2013 Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl\-2.0.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+The full documentation for
+.B automake
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B automake
+programs are properly installed at your site, the command
+.IP
+.B info automake
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake.1
new file mode 100644
index 0000000..50fb8f1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/automake.1
@@ -0,0 +1 @@
+.so man1/automake-1.14.1
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoreconf.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoreconf.1
new file mode 100644
index 0000000..474614d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoreconf.1
@@ -0,0 +1,133 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTORECONF "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autoreconf \- Update generated configuration files
+.SH SYNOPSIS
+.B autoreconf
+[\fIOPTION\fR]... [\fIDIRECTORY\fR]...
+.SH DESCRIPTION
+Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
+(formerly `gettextize'), and `libtoolize' where appropriate)
+repeatedly to remake the GNU Build System files in specified
+DIRECTORIES and their subdirectories (defaulting to `.').
+.PP
+By default, it only remakes those files that are older than their
+sources.  If you install new versions of the GNU Build System,
+you can make `autoreconf' remake all of the files by giving it the
+`\-\-force' option.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+consider all files obsolete
+.TP
+\fB\-i\fR, \fB\-\-install\fR
+copy missing auxiliary files
+.TP
+\fB\-\-no\-recursive\fR
+don't rebuild sub\-packages
+.TP
+\fB\-s\fR, \fB\-\-symlink\fR
+with \fB\-i\fR, install symbolic links instead of copies
+.TP
+\fB\-m\fR, \fB\-\-make\fR
+when applicable, re\-run ./configure && make
+.TP
+\fB\-W\fR, \fB\-\-warnings\fR=\fICATEGORY\fR
+report the warnings falling in CATEGORY [syntax]
+.SS "Warning categories include:"
+.TP
+`cross'
+cross compilation issues
+.TP
+`gnu'
+GNU coding standards (default in gnu and gnits modes)
+.TP
+`obsolete'
+obsolete features or constructions
+.TP
+`override'
+user redefinitions of Automake rules or variables
+.TP
+`portability'
+portability issues (default in gnu and gnits modes)
+.TP
+`syntax'
+dubious syntactic constructs (default)
+.TP
+`unsupported'
+unsupported or incomplete features (default)
+.TP
+`all'
+all the warnings
+.TP
+`no\-CATEGORY'
+turn off warnings in CATEGORY
+.TP
+`none'
+turn off all the warnings
+.TP
+`error'
+treat warnings as errors
+.PP
+The environment variable `WARNINGS' is honored.  Some subtools might
+support other warning types, using `all' is encouraged.
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.PP
+The environment variables AUTOM4TE, AUTOCONF, AUTOHEADER, AUTOMAKE,
+ACLOCAL, AUTOPOINT, LIBTOOLIZE, M4, and MAKE are honored.
+.SH AUTHOR
+Written by David J. MacKenzie and Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autoreconf
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autoreconf
+programs are properly installed at your site, the command
+.IP
+.B info autoreconf
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoscan.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoscan.1
new file mode 100644
index 0000000..fd29b8f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoscan.1
@@ -0,0 +1,70 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTOSCAN "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autoscan \- Generate a preliminary configure.in
+.SH SYNOPSIS
+.B autoscan
+[\fIOPTION\fR]... [\fISRCDIR\fR]
+.SH DESCRIPTION
+Examine source files in the directory tree rooted at SRCDIR, or the
+current directory if none is given.  Search the source files for
+common portability problems, check for incompleteness of
+`configure.ac', and create a file `configure.scan' which is a
+preliminary `configure.ac' for that package.
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.SH AUTHOR
+Written by David J. MacKenzie and Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autoscan
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autoscan
+programs are properly installed at your site, the command
+.IP
+.B info autoscan
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoupdate.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoupdate.1
new file mode 100644
index 0000000..808f0a7
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/autoupdate.1
@@ -0,0 +1,72 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH AUTOUPDATE "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+autoupdate \- Update a configure.in to a newer Autoconf
+.SH SYNOPSIS
+.B autoupdate
+[\fIOPTION\fR]... [\fITEMPLATE-FILE\fR]...
+.SH DESCRIPTION
+Update each TEMPLATE\-FILE if given, or `configure.ac' if present,
+or else `configure.in', to the syntax of the current version of
+Autoconf.  The original files are backed up.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-d\fR, \fB\-\-debug\fR
+don't remove temporary files
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+consider all files obsolete
+.SS "Library directories:"
+.TP
+\fB\-B\fR, \fB\-\-prepend\-include\fR=\fIDIR\fR
+prepend directory DIR to search path
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIR\fR
+append directory DIR to search path
+.SH AUTHOR
+Written by David J. MacKenzie and Akim Demaille.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B autoupdate
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B autoupdate
+programs are properly installed at your site, the command
+.IP
+.B info autoupdate
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.guess.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.guess.1
new file mode 100644
index 0000000..ff31f8e
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.guess.1
@@ -0,0 +1,62 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH CONFIG.GUESS "1" "September 2010" "GNU Autoconf 2.67.76-13418-dirty" "User Commands"
+.SH NAME
+config.guess \- guess the build system triplet
+.SH SYNOPSIS
+.B config.guess
+[\fIOPTION\fR]
+.SH DESCRIPTION
+The GNU build system distinguishes three types of machines, the
+`build' machine on which the compilers are run, the `host' machine
+on which the package being built will run, and, exclusively when you
+build a compiler, assembler etc., the `target' machine, for which the
+compiler being built will produce code.
+
+This script will guess the type of the `build' machine.
+.PP
+Output the configuration name of the system `config.guess' is run on.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-t\fR, \fB\-\-time\-stamp\fR
+print date of last modification, then exit
+.TP
+\fB\-v\fR, \fB\-\-version\fR
+print version number, then exit
+.SH "ENVIRONMENT VARIABLES"
+config.guess might need to compile and run C code, hence it needs a
+compiler for the `build' machine: use the environment variable
+`CC_FOR_BUILD' to specify the compiler for the build machine.  If
+`CC_FOR_BUILD' is not specified, `CC' will be used.  Be sure to
+specify `CC_FOR_BUILD' is `CC' is a cross-compiler to the `host'
+machine.
+
+  CC_FOR_BUILD    a native C compiler, defaults to `cc'
+  CC              a native C compiler, the previous variable is preferred
+.SH "REPORTING BUGS"
+Report bugs and patches to <config\-patches@gnu.org>.
+.PP
+.br
+Originally written by Per Bothner.
+.br
+Copyright \(co 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
+.br
+Software Foundation, Inc.
+.PP
+.br
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.sub.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.sub.1
new file mode 100644
index 0000000..46b9bfe
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/config.sub.1
@@ -0,0 +1,43 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH CONFIG.SUB "1" "September 2010" "GNU Autoconf 2.67.76-13418-dirty" "User Commands"
+.SH NAME
+config.sub \- validate and canonicalize a configuration triplet
+.SH SYNOPSIS
+.B config.sub
+[\fIOPTION\fR] \fICPU-MFR-OPSYS\fR
+.SH DESCRIPTION
+.IP
+\&../build\-aux/config.sub [OPTION] ALIAS
+.PP
+Canonicalize a configuration name.
+.SS "Operation modes:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-t\fR, \fB\-\-time\-stamp\fR
+print date of last modification, then exit
+.TP
+\fB\-v\fR, \fB\-\-version\fR
+print version number, then exit
+.SH "REPORTING BUGS"
+Report bugs and patches to <config\-patches@gnu.org>.
+.SH COPYRIGHT
+Copyright \(co 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
+Software Foundation, Inc.
+.PP
+.br
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/cp.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/cp.1
new file mode 100644
index 0000000..a21b192
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/cp.1
@@ -0,0 +1,181 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.35.
+.TH CP "1" "December 2014" "GNU coreutils 8.21" "User Commands"
+.SH NAME
+cp \- copy files and directories
+.SH SYNOPSIS
+.B cp
+[\fIOPTION\fR]... [\fI-T\fR] \fISOURCE DEST\fR
+.br
+.B cp
+[\fIOPTION\fR]... \fISOURCE\fR... \fIDIRECTORY\fR
+.br
+.B cp
+[\fIOPTION\fR]... \fI-t DIRECTORY SOURCE\fR...
+.SH DESCRIPTION
+.\" Add any additional description here
+.PP
+Copy SOURCE to DEST, or multiple SOURCE(s) to DIRECTORY.
+.PP
+Mandatory arguments to long options are mandatory for short options too.
+.TP
+\fB\-a\fR, \fB\-\-archive\fR
+same as \fB\-dR\fR \fB\-\-preserve\fR=\fIall\fR
+.TP
+\fB\-\-attributes\-only\fR
+don't copy the file data, just the attributes
+.TP
+\fB\-\-backup\fR[=\fICONTROL\fR]
+make a backup of each existing destination file
+.TP
+\fB\-b\fR
+like \fB\-\-backup\fR but does not accept an argument
+.TP
+\fB\-\-copy\-contents\fR
+copy contents of special files when recursive
+.TP
+\fB\-d\fR
+same as \fB\-\-no\-dereference\fR \fB\-\-preserve\fR=\fIlinks\fR
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+if an existing destination file cannot be
+opened, remove it and try again (this option
+is ignored when the \fB\-n\fR option is also used)
+.TP
+\fB\-i\fR, \fB\-\-interactive\fR
+prompt before overwrite (overrides a previous \fB\-n\fR
+option)
+.TP
+\fB\-H\fR
+follow command\-line symbolic links in SOURCE
+.TP
+\fB\-l\fR, \fB\-\-link\fR
+hard link files instead of copying
+.TP
+\fB\-L\fR, \fB\-\-dereference\fR
+always follow symbolic links in SOURCE
+.TP
+\fB\-n\fR, \fB\-\-no\-clobber\fR
+do not overwrite an existing file (overrides
+a previous \fB\-i\fR option)
+.TP
+\fB\-P\fR, \fB\-\-no\-dereference\fR
+never follow symbolic links in SOURCE
+.TP
+\fB\-p\fR
+same as \fB\-\-preserve\fR=\fImode\fR,ownership,timestamps
+.TP
+\fB\-\-preserve\fR[=\fIATTR_LIST\fR]
+preserve the specified attributes (default:
+mode,ownership,timestamps), if possible
+additional attributes: context, links, xattr,
+all
+.TP
+\fB\-\-no\-preserve\fR=\fIATTR_LIST\fR
+don't preserve the specified attributes
+.TP
+\fB\-\-parents\fR
+use full source file name under DIRECTORY
+.TP
+\fB\-R\fR, \fB\-r\fR, \fB\-\-recursive\fR
+copy directories recursively
+.TP
+\fB\-\-reflink\fR[=\fIWHEN\fR]
+control clone/CoW copies. See below
+.TP
+\fB\-\-remove\-destination\fR
+remove each existing destination file before
+attempting to open it (contrast with \fB\-\-force\fR)
+.TP
+\fB\-\-sparse\fR=\fIWHEN\fR
+control creation of sparse files. See below
+.TP
+\fB\-\-strip\-trailing\-slashes\fR
+remove any trailing slashes from each SOURCE
+argument
+.TP
+\fB\-s\fR, \fB\-\-symbolic\-link\fR
+make symbolic links instead of copying
+.TP
+\fB\-S\fR, \fB\-\-suffix\fR=\fISUFFIX\fR
+override the usual backup suffix
+.TP
+\fB\-t\fR, \fB\-\-target\-directory\fR=\fIDIRECTORY\fR
+copy all SOURCE arguments into DIRECTORY
+.TP
+\fB\-T\fR, \fB\-\-no\-target\-directory\fR
+treat DEST as a normal file
+.TP
+\fB\-u\fR, \fB\-\-update\fR
+copy only when the SOURCE file is newer
+than the destination file or when the
+destination file is missing
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+explain what is being done
+.TP
+\fB\-x\fR, \fB\-\-one\-file\-system\fR
+stay on this file system
+.TP
+\fB\-\-help\fR
+display this help and exit
+.TP
+\fB\-\-version\fR
+output version information and exit
+.PP
+By default, sparse SOURCE files are detected by a crude heuristic and the
+corresponding DEST file is made sparse as well.  That is the behavior
+selected by \fB\-\-sparse\fR=\fIauto\fR.  Specify \fB\-\-sparse\fR=\fIalways\fR to create a sparse DEST
+file whenever the SOURCE file contains a long enough sequence of zero bytes.
+Use \fB\-\-sparse\fR=\fInever\fR to inhibit creation of sparse files.
+.PP
+When \fB\-\-reflink\fR[=\fIalways\fR] is specified, perform a lightweight copy, where the
+data blocks are copied only when modified.  If this is not possible the copy
+fails, or if \fB\-\-reflink\fR=\fIauto\fR is specified, fall back to a standard copy.
+.PP
+The backup suffix is '~', unless set with \fB\-\-suffix\fR or SIMPLE_BACKUP_SUFFIX.
+The version control method may be selected via the \fB\-\-backup\fR option or through
+the VERSION_CONTROL environment variable.  Here are the values:
+.TP
+none, off
+never make backups (even if \fB\-\-backup\fR is given)
+.TP
+numbered, t
+make numbered backups
+.TP
+existing, nil
+numbered if numbered backups exist, simple otherwise
+.TP
+simple, never
+always make simple backups
+.PP
+As a special case, cp makes a backup of SOURCE when the force and backup
+options are given and SOURCE and DEST are the same name for an existing,
+regular file.
+.SH AUTHOR
+Written by Torbjorn Granlund, David MacKenzie, and Jim Meyering.
+.SH "REPORTING BUGS"
+Report cp bugs to bug\-coreutils@gnu.org
+.br
+GNU coreutils home page: <http://www.gnu.org/software/coreutils/>
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>
+.br
+Report cp translation bugs to <http://translationproject.org/team/>
+.SH COPYRIGHT
+Copyright \(co 2013 Free Software Foundation, Inc.
+License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+The full documentation for
+.B cp
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B cp
+programs are properly installed at your site, the command
+.IP
+.B info coreutils \(aqcp invocation\(aq
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/ifnames.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/ifnames.1
new file mode 100644
index 0000000..dd8aaa3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/ifnames.1
@@ -0,0 +1,57 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.38.2.
+.TH IFNAMES "1" "September 2010" "GNU Autoconf 2.68" "User Commands"
+.SH NAME
+ifnames \- Extract CPP conditionals from a set of files
+.SH SYNOPSIS
+.B ifnames
+[\fIOPTION\fR]... [\fIFILE\fR]...
+.SH DESCRIPTION
+Scan all of the C source FILES (or the standard input, if none are
+given) and write to the standard output a sorted list of all the
+identifiers that appear in those files in `#if', `#elif', `#ifdef', or
+`#ifndef' directives.  Print each identifier on a line, followed by a
+space\-separated list of the files in which that identifier occurs.
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print this help, then exit
+.TP
+\fB\-V\fR, \fB\-\-version\fR
+print version number, then exit
+.SH AUTHOR
+Written by David J. MacKenzie and Paul Eggert.
+.SH "REPORTING BUGS"
+Report bugs to <bug\-autoconf@gnu.org>.
+.br
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+.BR autoconf (1),
+.BR automake (1),
+.BR autoreconf (1),
+.BR autoupdate (1),
+.BR autoheader (1),
+.BR autoscan (1),
+.BR config.guess (1),
+.BR config.sub (1),
+.BR ifnames (1),
+.BR libtool (1).
+.PP
+The full documentation for
+.B ifnames
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B ifnames
+programs are properly installed at your site, the command
+.IP
+.B info ifnames
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtool.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtool.1
new file mode 100644
index 0000000..08b1186
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtool.1
@@ -0,0 +1,365 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.40.4.
+.TH LIBTOOL "1" "October 2011" "libtool 2.4.2" "User Commands"
+.SH NAME
+libtool \- manual page for libtool 2.4.2
+.SH SYNOPSIS
+.B libtool
+[\fIOPTION\fR]... [\fIMODE-ARG\fR]...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=compile COMPILE-COMMAND\fR... \fISOURCEFILE\fR
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=link LINK-COMMAND\fR...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=execute COMMAND \fR[\fIARGS\fR]...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=install INSTALL-COMMAND\fR...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=finish \fR[\fILIBDIR\fR]...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=uninstall RM \fR[\fIRM-OPTION\fR]... \fIFILE\fR...
+.br
+.B libtool
+[\fIOPTION\fR]... \fI--mode=clean RM \fR[\fIRM-OPTION\fR]... \fIFILE\fR...
+.SH DESCRIPTION
+Provide generalized library\-building support services.
+.TP
+\fB\-\-config\fR
+show all configuration variables
+.TP
+\fB\-\-debug\fR
+enable verbose shell tracing
+.TP
+\fB\-n\fR, \fB\-\-dry\-run\fR
+display commands without modifying any files
+.TP
+\fB\-\-features\fR
+display basic configuration information and exit
+.TP
+\fB\-\-mode\fR=\fIMODE\fR
+use operation mode MODE
+.TP
+\fB\-\-preserve\-dup\-deps\fR
+don't remove duplicate dependency libraries
+.TP
+\fB\-\-quiet\fR, \fB\-\-silent\fR
+don't print informational messages
+.TP
+\fB\-\-no\-quiet\fR, \fB\-\-no\-silent\fR
+print informational messages (default)
+.TP
+\fB\-\-no\-warn\fR
+don't display warning messages
+.TP
+\fB\-\-tag\fR=\fITAG\fR
+use configuration variables from tag TAG
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+print more informational messages than default
+.TP
+\fB\-\-no\-verbose\fR
+don't print the extra informational messages
+.TP
+\fB\-\-version\fR
+print version information
+.TP
+\fB\-h\fR, \fB\-\-help\fR, \fB\-\-help\-all\fR
+print short, long, or detailed help message
+.PP
+MODE must be one of the following:
+.TP
+clean
+remove files from the build directory
+.TP
+compile
+compile a source file into a libtool object
+.TP
+execute
+automatically set library path, then run a program
+.TP
+finish
+complete the installation of libtool libraries
+.TP
+install
+install libraries or executables
+.TP
+link
+create a library or an executable
+.TP
+uninstall
+remove libraries from an installed directory
+.PP
+MODE\-ARGS vary depending on the MODE.  When passed as first option,
+`\-\-mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+.PP
+GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.PP
+Description of compile mode:
+.PP
+Compile a source file into a libtool library object.
+.PP
+This mode accepts the following additional options:
+.TP
+\fB\-o\fR OUTPUT\-FILE
+set the output file name to OUTPUT\-FILE
+.TP
+\fB\-no\-suppress\fR
+do not suppress compiler output for multiple passes
+.TP
+\fB\-prefer\-pic\fR
+try to build PIC objects only
+.TP
+\fB\-prefer\-non\-pic\fR
+try to build non\-PIC objects only
+.TP
+\fB\-shared\fR
+do not build a `.o' file suitable for static linking
+.TP
+\fB\-static\fR
+only build a `.o' file suitable for static linking
+.TP
+\fB\-Wc\fR,FLAG
+pass FLAG directly to the compiler
+.PP
+COMPILE\-COMMAND is a command to be used in creating a `standard' object file
+from the given SOURCEFILE.
+.PP
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix `.c' with the
+library object suffix, `.lo'.
+.PP
+Description of link mode:
+.PP
+Link object files or libraries together to form another library, or to
+create an executable program.
+.PP
+LINK\-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+.PP
+The following components of LINK\-COMMAND are treated specially:
+.TP
+\fB\-all\-static\fR
+do not do any dynamic linking at all
+.TP
+\fB\-avoid\-version\fR
+do not add a version suffix if possible
+.TP
+\fB\-bindir\fR BINDIR
+specify path to binaries directory (for systems where
+libraries must be found in the PATH setting at runtime)
+.TP
+\fB\-dlopen\fR FILE
+`\-dlpreopen' FILE if it cannot be dlopened at runtime
+.TP
+\fB\-dlpreopen\fR FILE
+link in FILE and add its symbols to lt_preloaded_symbols
+.TP
+\fB\-export\-dynamic\fR
+allow symbols from OUTPUT\-FILE to be resolved with dlsym(3)
+.TP
+\fB\-export\-symbols\fR SYMFILE
+try to export only the symbols listed in SYMFILE
+.TP
+\fB\-export\-symbols\-regex\fR REGEX
+try to export only the symbols matching REGEX
+.TP
+\fB\-LLIBDIR\fR
+search LIBDIR for required installed libraries
+.TP
+\fB\-lNAME\fR
+OUTPUT\-FILE requires the installed library libNAME
+.TP
+\fB\-module\fR
+build a library that can dlopened
+.TP
+\fB\-no\-fast\-install\fR
+disable the fast\-install mode
+.TP
+\fB\-no\-install\fR
+link a not\-installable executable
+.TP
+\fB\-no\-undefined\fR
+declare that a library does not refer to external symbols
+.TP
+\fB\-o\fR OUTPUT\-FILE
+create OUTPUT\-FILE from the specified objects
+.TP
+\fB\-objectlist\fR FILE
+Use a list of object files found in FILE to specify objects
+.TP
+\fB\-precious\-files\-regex\fR REGEX
+don't remove output files matching REGEX
+.TP
+\fB\-release\fR RELEASE
+specify package release information
+.TP
+\fB\-rpath\fR LIBDIR
+the created library will eventually be installed in LIBDIR
+.TP
+\fB\-R[\fR ]LIBDIR
+add LIBDIR to the runtime path of programs and libraries
+.TP
+\fB\-shared\fR
+only do dynamic linking of libtool libraries
+.TP
+\fB\-shrext\fR SUFFIX
+override the standard shared library file extension
+.TP
+\fB\-static\fR
+do not do any dynamic linking of uninstalled libtool libraries
+.TP
+\fB\-static\-libtool\-libs\fR
+do not do any dynamic linking of libtool libraries
+.TP
+\fB\-version\-info\fR CURRENT[:REVISION[:AGE]]
+specify library version info [each variable defaults to 0]
+.TP
+\fB\-weak\fR LIBNAME
+declare that the target provides the LIBNAME interface
+.HP
+\fB\-Wc\fR,FLAG
+.TP
+\fB\-Xcompiler\fR FLAG
+pass linker\-specific FLAG directly to the compiler
+.HP
+\fB\-Wl\fR,FLAG
+.TP
+\fB\-Xlinker\fR FLAG
+pass linker\-specific FLAG directly to the linker
+.TP
+\fB\-XCClinker\fR FLAG
+pass link\-specific FLAG to the compiler driver (CC)
+.PP
+All other options (arguments beginning with `\-') are ignored.
+.PP
+Every other argument is treated as a filename.  Files ending in `.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+.PP
+If the OUTPUT\-FILE ends in `.la', then a libtool library is created,
+only library objects (`.lo' files) may be specified, and `\-rpath' is
+required, except when creating a convenience library.
+.PP
+If OUTPUT\-FILE ends in `.a' or `.lib', then a standard library is created
+using `ar' and `ranlib', or on Windows using `lib'.
+.PP
+If OUTPUT\-FILE ends in `.lo' or `.o', then a reloadable object file
+is created, otherwise an executable program is created.
+.PP
+Description of execute mode:
+.PP
+Automatically set library path, then run a program.
+.PP
+This mode accepts the following additional options:
+.TP
+\fB\-dlopen\fR FILE
+add the directory containing FILE to the library path
+.PP
+This mode sets the library path environment variable according to `\-dlopen'
+flags.
+.PP
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+.PP
+Then, COMMAND is executed, with ARGS as arguments.
+.PP
+Description of install mode:
+.PP
+Install executables or libraries.
+.PP
+INSTALL\-COMMAND is the installation command.  The first component should be
+either the `install' or `cp' program.
+.PP
+The following components of INSTALL\-COMMAND are treated specially:
+.TP
+\fB\-inst\-prefix\-dir\fR PREFIX\-DIR
+Use PREFIX\-DIR as a staging area for installation
+.PP
+The rest of the components are interpreted as arguments to that command (only
+BSD\-compatible install options are recognized).
+.PP
+Description of finish mode:
+.PP
+Complete the installation of libtool libraries.
+.PP
+Each LIBDIR is a directory that contains libtool libraries.
+.PP
+The commands that this mode executes may require superuser privileges.  Use
+the `\-\-dry\-run' option if you just want to see what would be executed.
+.PP
+Description of uninstall mode:
+.PP
+Remove libraries from an installation directory.
+.PP
+RM is the name of the program to use to delete files associated with each FILE
+(typically `/bin/rm').  RM\-OPTIONS are options (such as `\-f') to be passed
+to RM.
+.PP
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM.
+.PP
+Description of clean mode:
+.PP
+Remove files from the build directory.
+.PP
+RM is the name of the program to use to delete files associated with each FILE
+(typically `/bin/rm').  RM\-OPTIONS are options (such as `\-f') to be passed
+to RM.
+.PP
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM.
+.PP
+When reporting a bug, please describe a test case to reproduce it and
+include the following information:
+.TP
+host\-triplet:
+x86_64\-apple\-darwin11.2.0
+.TP
+shell:
+/bin/sh
+.TP
+compiler:
+gcc
+.TP
+compiler flags:
+\fB\-g\fR \fB\-O2\fR
+.TP
+linker:
+/usr/llvm\-gcc\-4.2/libexec/gcc/i686\-apple\-darwin11/4.2.1/ld (gnu? no)
+.TP
+libtool:
+(GNU libtool) 2.4.2
+.TP
+automake:
+automake (GNU automake) 1.11.1
+.TP
+autoconf:
+autoconf (GNU Autoconf) 2.68
+.SH AUTHOR
+Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+.SH "REPORTING BUGS"
+Report bugs to <bug\-libtool@gnu.org>.
+.SH COPYRIGHT
+Copyright \(co 2011 Free Software Foundation, Inc.
+.br
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+.SH "SEE ALSO"
+The full documentation for
+.B libtool
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B libtool
+programs are properly installed at your site, the command
+.IP
+.B info libtool
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtoolize.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtoolize.1
new file mode 100644
index 0000000..1e53702
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/libtoolize.1
@@ -0,0 +1,110 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.40.4.
+.TH LIBTOOLIZE "1" "October 2011" "libtoolize 2.4.1a" "User Commands"
+.SH NAME
+libtoolize \- manual page for libtoolize 2.4.1a
+.SH SYNOPSIS
+.B libtoolize
+[\fIOPTION\fR]...
+.SH DESCRIPTION
+Prepare a package to use libtool.
+.TP
+\fB\-c\fR, \fB\-\-copy\fR
+copy files rather than symlinking them
+.TP
+\fB\-\-debug\fR
+enable verbose shell tracing
+.TP
+\fB\-n\fR, \fB\-\-dry\-run\fR
+print commands rather than running them
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+replace existing files
+.TP
+\fB\-i\fR, \fB\-\-install\fR
+copy missing auxiliary files
+.TP
+\fB\-\-ltdl\fR[=\fIDIR\fR]
+install libltdl sources [default: libltdl]
+.TP
+\fB\-\-no\-warn\fR
+don't display warning messages
+.TP
+\fB\-\-nonrecursive\fR
+prepare ltdl for non\-recursive make
+.TP
+\fB\-q\fR, \fB\-\-quiet\fR
+work silently
+.TP
+\fB\-\-recursive\fR
+prepare ltdl for recursive make
+.TP
+\fB\-\-subproject\fR
+prepare ltdl to configure and build independently
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+verbosely report processing
+.TP
+\fB\-\-version\fR
+print version information and exit
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+print short or long help message
+.PP
+The following space or comma delimited options can be passed to libtoolize
+via the environment variable LIBTOOLIZE_OPTIONS, unknown environment
+options are ignored:
+.TP
+\fB\-\-debug\fR
+enable verbose shell tracing
+.TP
+\fB\-\-no\-warn\fR
+don't display warning messages
+.TP
+\fB\-\-quiet\fR
+work silently
+.TP
+\fB\-\-verbose\fR
+verbosely report processing
+.PP
+You must `cd' to the top directory of your package before you run
+`libtoolize'.
+.PP
+When reporting a bug, please describe a test case to reproduce it and
+include the following information:
+.TP
+host\-triplet:
+x86_64\-apple\-darwin11.2.0
+.TP
+libtoolize:
+(GNU libtool) 2.4.1a
+.TP
+automake:
+automake (GNU automake) 1.11.1
+.TP
+autoconf:
+autoconf (GNU Autoconf) 2.68
+.SH AUTHOR
+Written by Gary V. Vaughan <gary@gnu.org>, 2003
+.SH "REPORTING BUGS"
+Report bugs to <bug\-libtool@gnu.org>.
+.br
+GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+.SH COPYRIGHT
+Copyright \(co 2010 Free Software Foundation, Inc.
+.br
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+.SH "SEE ALSO"
+The full documentation for
+.B libtoolize
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B libtoolize
+programs are properly installed at your site, the command
+.IP
+.B info libtoolize
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/m4.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/m4.1
new file mode 100644
index 0000000..51233f0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/m4.1
@@ -0,0 +1,151 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.43.3.
+.TH M4 "1" "September 2013" "GNU M4 1.4.17" "User Commands"
+.SH NAME
+m4 \- macro processor
+.SH SYNOPSIS
+.B m4
+[\fIOPTION\fR]... [\fIFILE\fR]...
+.SH DESCRIPTION
+Process macros in FILEs.  If no FILE or if FILE is `\-', standard input
+is read.
+.PP
+Mandatory or optional arguments to long options are mandatory or optional
+for short options too.
+.SS "Operation modes:"
+.TP
+\fB\-\-help\fR
+display this help and exit
+.TP
+\fB\-\-version\fR
+output version information and exit
+.TP
+\fB\-E\fR, \fB\-\-fatal\-warnings\fR
+once: warnings become errors, twice: stop
+execution at first error
+.TP
+\fB\-i\fR, \fB\-\-interactive\fR
+unbuffer output, ignore interrupts
+.TP
+\fB\-P\fR, \fB\-\-prefix\-builtins\fR
+force a `m4_' prefix to all builtins
+.TP
+\fB\-Q\fR, \fB\-\-quiet\fR, \fB\-\-silent\fR
+suppress some warnings for builtins
+.TP
+\fB\-\-warn\-macro\-sequence\fR[=\fIREGEXP\fR]
+warn if macro definition matches REGEXP,
+.IP
+default \e$\e({[^}]*}\e|[0\-9][0\-9]+\e)
+.SS "Preprocessor features:"
+.TP
+\fB\-D\fR, \fB\-\-define=NAME\fR[=\fIVALUE\fR]
+define NAME as having VALUE, or empty
+.TP
+\fB\-I\fR, \fB\-\-include\fR=\fIDIRECTORY\fR
+append DIRECTORY to include path
+.TP
+\fB\-s\fR, \fB\-\-synclines\fR
+generate `#line NUM "FILE"' lines
+.TP
+\fB\-U\fR, \fB\-\-undefine\fR=\fINAME\fR
+undefine NAME
+.SS "Limits control:"
+.TP
+\fB\-g\fR, \fB\-\-gnu\fR
+override \fB\-G\fR to re\-enable GNU extensions
+.TP
+\fB\-G\fR, \fB\-\-traditional\fR
+suppress all GNU extensions
+.TP
+\fB\-H\fR, \fB\-\-hashsize\fR=\fIPRIME\fR
+set symbol lookup hash table size [509]
+.TP
+\fB\-L\fR, \fB\-\-nesting\-limit\fR=\fINUMBER\fR
+change nesting limit, 0 for unlimited [0]
+.SS "Frozen state files:"
+.TP
+\fB\-F\fR, \fB\-\-freeze\-state\fR=\fIFILE\fR
+produce a frozen state on FILE at end
+.TP
+\fB\-R\fR, \fB\-\-reload\-state\fR=\fIFILE\fR
+reload a frozen state from FILE at start
+.SS "Debugging:"
+.TP
+\fB\-d\fR, \fB\-\-debug\fR[=\fIFLAGS\fR]
+set debug level (no FLAGS implies `aeq')
+.TP
+\fB\-\-debugfile\fR[=\fIFILE\fR]
+redirect debug and trace output to FILE
+(default stderr, discard if empty string)
+.TP
+\fB\-l\fR, \fB\-\-arglength\fR=\fINUM\fR
+restrict macro tracing size
+.TP
+\fB\-t\fR, \fB\-\-trace\fR=\fINAME\fR
+trace NAME when it is defined
+.SS "FLAGS is any of:"
+.TP
+a
+show actual arguments
+.TP
+c
+show before collect, after collect and after call
+.TP
+e
+show expansion
+.TP
+f
+say current input file name
+.TP
+i
+show changes in input files
+.TP
+l
+say current input line number
+.TP
+p
+show results of path searches
+.TP
+q
+quote values as necessary, with a or e flag
+.TP
+t
+trace for all macro calls, not only traceon'ed
+.TP
+x
+add a unique macro call id, useful with c flag
+.TP
+V
+shorthand for all of the above flags
+.PP
+If defined, the environment variable `M4PATH' is a colon\-separated list
+of directories included after any specified by `\-I'.
+.PP
+Exit status is 0 for success, 1 for failure, 63 for frozen file version
+mismatch, or whatever value was passed to the m4exit macro.
+.SH AUTHOR
+Written by Rene' Seindal.
+.SH "REPORTING BUGS"
+Report bugs to: bug\-m4@gnu.org
+.br
+GNU M4 home page: <http://www.gnu.org/software/m4/>
+.br
+General help using GNU software: <http://www.gnu.org/gethelp/>
+.SH COPYRIGHT
+Copyright \(co 2013 Free Software Foundation, Inc.
+License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
+.br
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+.SH "SEE ALSO"
+The full documentation for
+.B m4
+is maintained as a Texinfo manual.  If the
+.B info
+and
+.B m4
+programs are properly installed at your site, the command
+.IP
+.B info m4
+.PP
+should give you access to the complete manual.
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/make.1 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/make.1
new file mode 100644
index 0000000..c76ce25
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/share/man/man1/make.1
@@ -0,0 +1,359 @@
+.TH MAKE 1 "22 August 1989" "GNU" "LOCAL USER COMMANDS"
+.SH NAME
+make \- GNU make utility to maintain groups of programs
+.SH SYNOPSIS
+.B "make "
+[
+.B \-f
+.I makefile
+] [ options ] ... [ targets ] ...
+.SH WARNING
+This man page is an extract of the documentation of GNU
+.IR make .
+It is updated only occasionally, because the GNU project does not use nroff.
+For complete, current documentation, refer to the Info file
+.B make.info
+which is made from the Texinfo source file
+.BR make.texi .
+.SH DESCRIPTION
+.LP
+The purpose of the
+.I make
+utility is to determine automatically which
+pieces of a large program need to be recompiled, and issue the commands to
+recompile them.
+The manual describes the GNU implementation of
+.IR make ,
+which was written by Richard Stallman and Roland McGrath, and is
+currently maintained by Paul Smith.
+Our examples show C programs, since they are most common, but you can use
+.I make
+with any programming language whose compiler can be run with a
+shell command.
+In fact,
+.I make
+is not limited to programs.
+You can use it to describe any task where some files must be
+updated automatically from others whenever the others change.
+.LP
+To prepare to use
+.IR make ,
+you must write a file called the
+.I makefile
+that describes the relationships among files in your program, and the
+states the commands for updating each file.
+In a program, typically the executable file is updated from object
+files, which are in turn made by compiling source files.
+.LP
+Once a suitable makefile exists, each time you change some source files,
+this simple shell command:
+.sp 1
+.RS
+.B make
+.RE
+.sp 1
+suffices to perform all necessary recompilations.
+The
+.I make
+program uses the makefile data base and the last-modification times
+of the files to decide which of the files need to be updated.
+For each of those files, it issues the commands recorded in the data base.
+.LP
+.I make
+executes commands in the
+.I makefile
+to update
+one or more target
+.IR names ,
+where
+.I name
+is typically a program.
+If no
+.B \-f
+option is present,
+.I make
+will look for the makefiles
+.IR GNUmakefile ,
+.IR makefile ,
+and
+.IR Makefile ,
+in that order.
+.LP
+Normally you should call your makefile either
+.I makefile
+or
+.IR Makefile .
+(We recommend
+.I Makefile
+because it appears prominently near the beginning of a directory
+listing, right near other important files such as
+.IR  README .)
+The first name checked,
+.IR GNUmakefile ,
+is not recommended for most makefiles.
+You should use this name if you have a makefile that is specific to GNU
+.IR make ,
+and will not be understood by other versions of
+.IR make .
+If
+.I makefile
+is `\-', the standard input is read.
+.LP
+.I make
+updates a target if it depends on prerequisite files
+that have been modified since the target was last modified,
+or if the target does not exist.
+.SH OPTIONS
+.sp 1
+.TP 0.5i
+.BR \-b , " \-m"
+These options are ignored for compatibility with other versions of
+.IR make .
+.TP 0.5i
+.BR \-B , " \-\-always\-make"
+Unconditionally make all targets.
+.TP 0.5i
+\fB\-C\fR \fIdir\fR, \fB\-\-directory\fR=\fIdir\fR
+Change to directory
+.I dir
+before reading the makefiles or doing anything else.
+If multiple
+.B \-C
+options are specified, each is interpreted relative to the
+previous one:
+.BR "\-C " /
+.BR "\-C " etc
+is equivalent to
+.BR "\-C " /etc.
+This is typically used with recursive invocations of
+.IR make .
+.TP 0.5i
+.B \-d
+Print debugging information in addition to normal processing.
+The debugging information says which files are being considered for
+remaking, which file-times are being compared and with what results,
+which files actually need to be remade, which implicit rules are
+considered and which are applied---everything interesting about how
+.I make
+decides what to do.
+.TP 0.5i
+.BI \-\-debug "[=FLAGS]"
+Print debugging information in addition to normal processing.
+If the
+.I FLAGS
+are omitted, then the behavior is the same as if
+.B \-d
+was specified.
+.I FLAGS
+may be
+.I a
+for all debugging output (same as using
+.BR \-d ),
+.I b
+for basic debugging,
+.I v
+for more verbose basic debugging,
+.I i
+for showing implicit rules,
+.I j
+for details on invocation of commands, and
+.I m
+for debugging while remaking makefiles.
+.TP 0.5i
+.BR \-e , " \-\-environment\-overrides"
+Give variables taken from the environment precedence
+over variables from makefiles.
+.TP 0.5i
+\fB\-f\fR \fIfile\fR, \fB\-\-file\fR=\fIfile\fR, \fB\-\-makefile\fR=\fIFILE\fR
+Use
+.I file
+as a makefile.
+.TP 0.5i
+.BR \-i , " \-\-ignore\-errors"
+Ignore all errors in commands executed to remake files.
+.TP 0.5i
+\fB\-I\fR \fIdir\fR, \fB\-\-include\-dir\fR=\fIdir\fR
+Specifies a directory
+.I dir
+to search for included makefiles.
+If several
+.B \-I
+options are used to specify several directories, the directories are
+searched in the order specified.
+Unlike the arguments to other flags of
+.IR make ,
+directories given with
+.B \-I
+flags may come directly after the flag:
+.BI \-I dir
+is allowed, as well as
+.BI "\-I " dir.
+This syntax is allowed for compatibility with the C
+preprocessor's
+.B \-I
+flag.
+.TP 0.5i
+\fB\-j\fR [\fIjobs\fR], \fB\-\-jobs\fR[=\fIjobs\fR]
+Specifies the number of
+.I jobs
+(commands) to run simultaneously.
+If there is more than one
+.B \-j
+option, the last one is effective.
+If the
+.B \-j
+option is given without an argument,
+.IR make
+will not limit the number of jobs that can run simultaneously.
+.TP 0.5i
+.BR \-k , " \-\-keep\-going"
+Continue as much as possible after an error.
+While the target that failed, and those that depend on it, cannot
+be remade, the other dependencies of these targets can be processed
+all the same.
+.TP 0.5i
+\fB\-l\fR [\fIload\fR], \fB\-\-load\-average\fR[=\fIload\fR]
+Specifies that no new jobs (commands) should be started if there are
+others jobs running and the load average is at least
+.I load
+(a floating-point number).
+With no argument, removes a previous load limit.
+.TP 0.5i
+.BR \-L , " \-\-check\-symlink\-times"
+Use the latest mtime between symlinks and target.
+.TP 0.5i
+.BR \-n , " \-\-just\-print" , " \-\-dry\-run" , " \-\-recon"
+Print the commands that would be executed, but do not execute them (except in
+certain circumstances).
+.TP 0.5i
+\fB\-o\fR \fIfile\fR, \fB\-\-old\-file\fR=\fIfile\fR, \fB\-\-assume\-old\fR=\fIfile\fR
+Do not remake the file
+.I file
+even if it is older than its dependencies, and do not remake anything
+on account of changes in
+.IR file .
+Essentially the file is treated as very old and its rules are ignored.
+.TP 0.5i
+.BR \-p , " \-\-print\-data\-base"
+Print the data base (rules and variable values) that results from
+reading the makefiles; then execute as usual or as otherwise
+specified.
+This also prints the version information given by the
+.B \-v
+switch (see below).
+To print the data base without trying to remake any files, use
+.B make
+.B \-p
+.BI \-f /dev/null.
+.TP 0.5i
+.BR \-q , " \-\-question"
+``Question mode''.
+Do not run any commands, or print anything; just return an exit status
+that is zero if the specified targets are already up to date, nonzero
+otherwise.
+.TP 0.5i
+.BR \-r , " \-\-no\-builtin\-rules"
+Eliminate use of the built\-in implicit rules.
+Also clear out the default list of suffixes for suffix rules.
+.TP 0.5i
+.BR \-R , " \-\-no\-builtin\-variables"
+Don't define any built\-in variables.
+.TP 0.5i
+.BR \-s , " \-\-silent" , " \-\-quiet"
+Silent operation; do not print the commands as they are executed.
+.TP 0.5i
+.BR \-S , " \-\-no\-keep\-going" , " \-\-stop"
+Cancel the effect of the
+.B \-k
+option.
+This is never necessary except in a recursive
+.I make
+where
+.B \-k
+might be inherited from the top-level
+.I make
+via MAKEFLAGS or if you set
+.B \-k
+in MAKEFLAGS in your environment.
+.TP 0.5i
+.BR \-t , " \-\-touch"
+Touch files (mark them up to date without really changing them)
+instead of running their commands.
+This is used to pretend that the commands were done, in order to fool
+future invocations of
+.IR make .
+.TP 0.5i
+.BR \-v , " \-\-version"
+Print the version of the
+.I make
+program plus a copyright, a list of authors and a notice that there
+is no warranty.
+.TP 0.5i
+.BR \-w , " \-\-print\-directory"
+Print a message containing the working directory
+before and after other processing.
+This may be useful for tracking down errors from complicated nests of
+recursive
+.I make
+commands.
+.TP 0.5i
+.B \-\-no\-print\-directory
+Turn off
+.BR \-w ,
+even if it was turned on implicitly.
+.TP 0.5i
+\fB\-W\fR \fIfile\fR, \fB\-\-what\-if\fR=\fIfile\fR, \fB\-\-new\-file\fR=\fIfile\fR, \fB\-\-assume\-new\fR=\fIfile\fR
+Pretend that the target
+.I file
+has just been modified.
+When used with the
+.B \-n
+flag, this shows you what would happen if you were to modify that file.
+Without
+.BR \-n ,
+it is almost the same as running a
+.I touch
+command on the given file before running
+.IR make ,
+except that the modification time is changed only in the imagination of
+.IR make .
+.TP 0.5i
+.B \-\-warn\-undefined\-variables
+Warn when an undefined variable is referenced.
+.SH "EXIT STATUS"
+GNU
+.I make
+exits with a status of zero if all makefiles were successfully parsed
+and no targets that were built failed.  A status of one will be returned
+if the
+.B \-q
+flag was used and
+.I make
+determines that a target needs to be rebuilt.  A status of two will be
+returned if any errors were encountered.
+.SH "SEE ALSO"
+.I "The GNU Make Manual"
+.SH BUGS
+See the chapter `Problems and Bugs' in
+.IR "The GNU Make Manual" .
+.SH AUTHOR
+This manual page contributed by Dennis Morse of Stanford University.
+It has been reworked by Roland McGrath.  Further updates contributed by
+Mike Frysinger.
+.SH "COPYRIGHT"
+Copyright (C) 1992, 1993, 1996, 1999, 2007 Free Software Foundation, Inc.
+This file is part of GNU
+.IR make .
+.LP
+GNU Make is free software; you can redistribute it and/or modify it under the
+terms of the GNU General Public License as published by the Free Software
+Foundation; either version 3 of the License, or (at your option) any later
+version.
+.LP
+GNU Make is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+.LP
+You should have received a copy of the GNU General Public License along with
+this program.  If not, see
+.IR http://www.gnu.org/licenses/ .
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal-1.14
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/aclocal-1.14
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoconf
new file mode 100755
index 0000000..c27f59b
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoconf
@@ -0,0 +1,500 @@
+#! /bin/sh
+# Generated from autoconf.in; do not edit by hand.
+# autoconf -- create `configure' using m4 macros
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2003,
+# 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+
+# as_fn_error STATUS ERROR
+# ------------------------
+# Output "`basename $0`: error: ERROR" to stderr. Then exit the script with
+# STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+usage="\
+Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Generate a configuration script from a TEMPLATE-FILE if given, or
+\`configure.ac' if present, or else \`configure.in'.  Output is sent
+to the standard output if TEMPLATE-FILE is given, else into
+\`configure'.
+
+Operation modes:
+  -h, --help                print this help, then exit
+  -V, --version             print version number, then exit
+  -v, --verbose             verbosely report processing
+  -d, --debug               don't remove temporary files
+  -f, --force               consider all files obsolete
+  -o, --output=FILE         save output in FILE (stdout is the default)
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY [syntax]
+
+Warning categories include:
+  \`cross'         cross compilation issues
+  \`obsolete'      obsolete constructs
+  \`syntax'        dubious syntactic constructs
+  \`all'           all the warnings
+  \`no-CATEGORY'   turn off the warnings on CATEGORY
+  \`none'          turn off all the warnings
+  \`error'         warnings are error
+
+The environment variables \`M4' and \`WARNINGS' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the list of calls to MACRO
+  -i, --initialization        also trace Autoconf's initialization process
+
+In tracing mode, no configuration script is created.  FORMAT defaults
+to \`\$f:\$l:\$n:\$%'; see \`autom4te --help' for information about FORMAT.
+
+Report bugs to <bug-autoconf@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>."
+
+version="\
+autoconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille."
+
+help="\
+Try \`$as_me --help' for more information."
+
+exit_missing_arg='
+  as_fn_error $? "option \`$1'\'' requires an argument$as_nl$help"'
+# restore font-lock: '
+
+# Variables.
+: ${AUTOM4TE='/x86_64-apple-darwin/bin/autom4te'}
+autom4te_options=
+outfile=
+verbose=false
+
+# Parse command line.
+while test $# -gt 0 ; do
+  option=`expr "x$1" : 'x\(--[^=]*\)' \| \
+	       "x$1" : 'x\(-.\)'`
+  optarg=`expr "x$1" : 'x--[^=]*=\(.*\)' \| \
+	       "x$1" : 'x-.\(.*\)'`
+  case $1 in
+    --version | -V )
+       echo "$version" ; exit ;;
+    --help | -h )
+       $as_echo "$usage"; exit ;;
+
+    --verbose | -v )
+       verbose=:
+       autom4te_options="$autom4te_options $1"; shift ;;
+
+    # Arguments passed as is to autom4te.
+    --debug      | -d   | \
+    --force      | -f   | \
+    --include=*  | -I?* | \
+    --prepend-include=* | -B?* | \
+    --warnings=* | -W?* )
+       case $1 in
+	 *\'*) arg=`$as_echo "$1" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$1 ;;
+       esac
+       autom4te_options="$autom4te_options '$arg'"; shift ;;
+    # Options with separated arg passed as is to autom4te.
+    --include  | -I | \
+    --prepend-include  | -B | \
+    --warnings | -W )
+       test $# = 1 && eval "$exit_missing_arg"
+       case $2 in
+	 *\'*) arg=`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$2 ;;
+       esac
+       autom4te_options="$autom4te_options $option '$arg'"
+       shift; shift ;;
+
+    --trace=* | -t?* )
+       traces="$traces --trace='"`$as_echo "$optarg" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift ;;
+    --trace | -t )
+       test $# = 1 && eval "$exit_missing_arg"
+       traces="$traces --trace='"`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift; shift ;;
+    --initialization | -i )
+       autom4te_options="$autom4te_options --melt"
+       shift;;
+
+    --output=* | -o?* )
+       outfile=$optarg
+       shift ;;
+    --output | -o )
+       test $# = 1 && eval "$exit_missing_arg"
+       outfile=$2
+       shift; shift ;;
+
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       exec >&2
+       as_fn_error $? "invalid option \`$1'$as_nl$help" ;; #`
+    * )
+       break ;;
+  esac
+done
+
+# Find the input file.
+case $# in
+  0)
+    if test -f configure.ac; then
+      if test -f configure.in; then
+	$as_echo "$as_me: warning: both \`configure.ac' and \`configure.in' are present." >&2
+	$as_echo "$as_me: warning: proceeding with \`configure.ac'." >&2
+      fi
+      infile=configure.ac
+    elif test -f configure.in; then
+      infile=configure.in
+    else
+      as_fn_error $? "no input file"
+    fi
+    test -z "$traces" && test -z "$outfile" && outfile=configure;;
+  1)
+    infile=$1 ;;
+  *) exec >&2
+     as_fn_error $? "invalid number of arguments$as_nl$help" ;;
+esac
+
+# Unless specified, the output is stdout.
+test -z "$outfile" && outfile=-
+
+# Run autom4te with expansion.
+eval set x "$autom4te_options" \
+  --language=autoconf --output=\"\$outfile\" "$traces" \"\$infile\"
+shift
+$verbose && $as_echo "$as_me: running $AUTOM4TE $*" >&2
+exec "$AUTOM4TE" "$@"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoheader b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoheader
new file mode 100755
index 0000000..aa7f7dc
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoheader
@@ -0,0 +1,304 @@
+#! /usr/bin/perl
+# -*- Perl -*-
+# Generated from autoheader.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoheader -- create `config.h.in' from `configure.ac'
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Roland McGrath.
+# Rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, "$pkgdatadir";
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use strict;
+
+# Using `do FILE', we need `local' vars.
+use vars qw ($config_h %verbatim %symbol);
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-apple-darwin/bin/autom4te';
+local $config_h;
+my $config_h_in;
+my @prepend_include;
+my @include;
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Create a template file of C \`\#define\' statements for \`configure\' to
+use.  To this end, scan TEMPLATE-FILE, or \`configure.ac\' if present,
+or else \`configure.in\'.
+
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -f, --force              consider all files obsolete
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+" . Autom4te::ChannelDefs::usage () . "
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "autoheader (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Roland McGrath and Akim Demaille.
+";
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  parse_WARNINGS;
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'W|warnings=s'        => \&parse_warnings);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('ah');
+switch_warning 'obsolete';
+parse_args;
+
+# Preach.
+my $config_h_top = find_file ("config.h.top?",
+			      reverse (@prepend_include), @include);
+my $config_h_bot = find_file ("config.h.bot?",
+			      reverse (@prepend_include), @include);
+my $acconfig_h = find_file ("acconfig.h?",
+			    reverse (@prepend_include), @include);
+if ($config_h_top || $config_h_bot || $acconfig_h)
+  {
+    my $msg = << "END";
+    Using auxiliary files such as \`acconfig.h\', \`config.h.bot\'
+    and \`config.h.top\', to define templates for \`config.h.in\'
+    is deprecated and discouraged.
+
+    Using the third argument of \`AC_DEFINE\' and
+    \`AC_DEFINE_UNQUOTED\' allows one to define a template without
+    \`acconfig.h\':
+
+      AC_DEFINE([NEED_FUNC_MAIN], 1,
+		[Define if a function \`main\' is needed.])
+
+    More sophisticated templates can also be produced, see the
+    documentation.
+END
+    $msg =~ s/^    /WARNING: /gm;
+    msg 'obsolete', $msg;
+  }
+
+# Set up autoconf.
+my $autoconf = "'$autom4te' --language=autoconf ";
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+$autoconf .= ' --debug' if $debug;
+$autoconf .= ' --force' if $force;
+$autoconf .= ' --verbose' if $verbose;
+
+# ----------------------- #
+# Real work starts here.  #
+# ----------------------- #
+
+# Source what the traces are trying to tell us.
+verb "$me: running $autoconf to trace from $ARGV[0]";
+my $quoted_tmp = shell_quote ($tmp);
+xsystem ("$autoconf"
+	 # If you change this list, update the
+	 # `Autoheader-preselections' section of autom4te.in.
+	 . ' --trace AC_CONFIG_HEADERS:\'$$config_h ||= \'"\'"\'$1\'"\'"\';\''
+	 . ' --trace AH_OUTPUT:\'$$verbatim{\'"\'"\'$1\'"\'"\'} = \'"\'"\'$2\'"\'"\';\''
+	 . ' --trace AC_DEFINE_TRACE_LITERAL:\'$$symbol{\'"\'"\'$1\'"\'"\'} = 1;\''
+	 . " " . shell_quote ($ARGV[0]) . " >$quoted_tmp/traces.pl");
+
+local (%verbatim, %symbol);
+debug "$me: \`do'ing $tmp/traces.pl:\n" . `sed 's/^/| /' $quoted_tmp/traces.pl`;
+do "$tmp/traces.pl";
+warn "couldn't parse $tmp/traces.pl: $@" if $@;
+unless ($config_h)
+  {
+    error "error: AC_CONFIG_HEADERS not found in $ARGV[0]";
+    exit 1;
+  }
+
+# We template only the first CONFIG_HEADER.
+$config_h =~ s/ .*//;
+# Support "outfile[:infile]", defaulting infile="outfile.in".
+($config_h, $config_h_in) = split (':', $config_h, 2);
+$config_h_in ||= "$config_h.in";
+
+# %SYMBOL might contain things like `F77_FUNC(name,NAME)', but we keep
+# only the name of the macro.
+%symbol = map { s/\(.*//; $_ => 1 } keys %symbol;
+
+my $out = new Autom4te::XFile ("> " . open_quote ("$tmp/config.hin"));
+
+# Don't write "do not edit" -- it will get copied into the
+# config.h, which it's ok to edit.
+print $out "/* $config_h_in.  Generated from $ARGV[0] by autoheader.  */\n";
+
+# Dump the top.
+if ($config_h_top)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_top));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+# Dump `acconfig.h', except for its bottom portion.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    while ($_ = $in->getline)
+      {
+	last if /\@BOTTOM\@/;
+	next if /\@TOP\@/;
+	print $out $_;
+      }
+  }
+
+# Dump the templates from `configure.ac'.
+foreach (sort keys %verbatim)
+  {
+    print $out "\n$verbatim{$_}\n";
+  }
+
+# Dump bottom portion of `acconfig.h'.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    my $dump = 0;
+    while ($_ = $in->getline)
+      {
+	print $out $_ if $dump;
+	$dump = 1  if /\@BOTTOM\@/;
+      }
+  }
+
+# Dump the bottom.
+if ($config_h_bot)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_bot));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+$out->close;
+
+# Check that all the symbols have a template.
+{
+  my $in = new Autom4te::XFile ("< " . open_quote ("$tmp/config.hin"));
+  my $suggest_ac_define = 1;
+  while ($_ = $in->getline)
+    {
+      my ($symbol) = /^\#\s*\w+\s+(\w+)/
+	or next;
+      delete $symbol{$symbol};
+    }
+  foreach (sort keys %symbol)
+    {
+      msg 'syntax', "warning: missing template: $_";
+      if ($suggest_ac_define)
+	{
+	  msg 'syntax',  "Use AC_DEFINE([$_], [], [Description])";
+	  $suggest_ac_define = 0;
+	}
+
+    }
+  exit 1
+    if keys %symbol;
+}
+
+update_file ("$tmp/config.hin", "$config_h_in", $force);
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autom4te b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autom4te
new file mode 100755
index 0000000..d112913
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autom4te
@@ -0,0 +1,1075 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autom4te.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autom4te - Wrapper around M4 libraries.
+# Copyright (C) 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::C4che;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Data directory.
+my $pkgdatadir = $ENV{'AC_MACRODIR'} || '//share/autoconf';
+
+# $LANGUAGE{LANGUAGE} -- Automatic options for LANGUAGE.
+my %language;
+
+my $output = '-';
+
+# Mode of the output file except for traces.
+my $mode = "0666";
+
+# If melt, don't use frozen files.
+my $melt = 0;
+
+# Names of the cache directory, cache directory index, trace cache
+# prefix, and output cache prefix.  And the IO object for the index.
+my $cache;
+my $icache;
+my $tcache;
+my $ocache;
+my $icache_file;
+
+my $flock_implemented = 'yes';
+
+# The macros to trace mapped to their format, as specified by the
+# user.
+my %trace;
+
+# The macros the user will want to trace in the future.
+# We need `include' to get the included file, `m4_pattern_forbid' and
+# `m4_pattern_allow' to check the output.
+#
+# FIXME: What about `sinclude'?
+my @preselect = ('include',
+		 'm4_pattern_allow', 'm4_pattern_forbid',
+		 '_m4_warn');
+
+# M4 include path.
+my @include;
+
+# Do we freeze?
+my $freeze = 0;
+
+# $M4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+# Some non-GNU m4's don't reject the --help option, so give them /dev/null.
+fatal "need GNU m4 1.4 or later: $m4"
+  if system "$m4 --help </dev/null 2>&1 | grep reload-state >/dev/null";
+
+# Set some high recursion limit as the default limit, 250, has already
+# been hit with AC_OUTPUT.  Don't override the user's choice.
+$m4 .= ' --nesting-limit=1024'
+  if " $m4 " !~ / (--nesting-limit(=[0-9]+)?|-L[0-9]*) /;
+
+
+# @M4_BUILTIN -- M4 builtins and a useful comment.
+my @m4_builtin = `echo dumpdef | $m4 2>&1 >/dev/null`;
+map { s/:.*//;s/\W// } @m4_builtin;
+
+
+# %M4_BUILTIN_ALTERNATE_NAME
+# --------------------------
+# The builtins are renamed, e.g., `define' is renamed `m4_define'.
+# So map `define' to `m4_define' and conversely.
+# Some macros don't follow this scheme: be sure to properly map to their
+# alternate name too.
+#
+# FIXME: Trace status of renamed builtins was fixed in M4 1.4.5, which
+# we now depend on; do we still need to do this mapping?
+#
+# So we will merge them, i.e., tracing `BUILTIN' or tracing
+# `m4_BUILTIN' will be the same: tracing both, but honoring the
+# *last* trace specification.
+#
+# FIXME: This is not enough: in the output `$0' will be `BUILTIN'
+# sometimes and `m4_BUILTIN' at others.  We should return a unique name,
+# the one specified by the user.
+#
+# FIXME: To be absolutely rigorous, I would say that given that we
+# _redefine_ divert (instead of _copying_ it), divert and the like
+# should not be part of this list.
+my %m4_builtin_alternate_name;
+@m4_builtin_alternate_name{"$_", "m4_$_"} = ("m4_$_", "$_")
+  foreach (grep { !/m4wrap|m4exit|dnl|ifelse|__.*__/ } @m4_builtin);
+@m4_builtin_alternate_name{"ifelse", "m4_if"}   = ("m4_if", "ifelse");
+@m4_builtin_alternate_name{"m4exit", "m4_exit"} = ("m4_exit", "m4exit");
+@m4_builtin_alternate_name{"m4wrap", "m4_wrap"} = ("m4_wrap", "m4wrap");
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILES]
+
+Run GNU M4 on the FILES, avoiding useless runs.  Output the traces if tracing,
+the frozen file if freezing, otherwise the expansion of the FILES.
+
+If some of the FILES are named \`FILE.m4f\' they are considered to be M4
+frozen files of all the previous files (which are therefore not loaded).
+If \`FILE.m4f\' is not found, then \`FILE.m4\' will be used, together with
+all the previous files.
+
+Some files may be optional, i.e., will only be processed if found in the
+include path, but then must end in \`.m4?\';  the question mark is not part of
+the actual file name.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -o, --output=FILE        save output in FILE (defaults to \`-\', stdout)
+  -f, --force              don\'t rely on cached values
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+  -l, --language=LANG      specify the set of M4 macros to use
+  -C, --cache=DIRECTORY    preserve results for future runs in DIRECTORY
+      --no-cache           disable the cache
+  -m, --mode=OCTAL         change the non trace output file mode (0666)
+  -M, --melt               don\'t use M4 frozen files
+
+Languages include:
+  \`Autoconf\'   create Autoconf configure scripts
+  \`Autotest\'   create Autotest test suites
+  \`M4sh\'       create M4sh shell scripts
+  \`M4sugar\'    create M4sugar output
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variables \`M4\' and \`WARNINGS\' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the MACRO invocations
+  -p, --preselect=MACRO       prepare to trace MACRO in a future run
+
+Freezing:
+  -F, --freeze   produce an M4 frozen state file for FILES
+
+FORMAT defaults to \`\$f:\$l:\$n:\$%\', and can use the following escapes:
+  \$\$     literal \$
+  \$f     file where macro was called
+  \$l     line where macro was called
+  \$d     nesting depth of macro call
+  \$n     name of the macro
+  \$NUM   argument NUM, unquoted and with newlines
+  \$SEP\@  all arguments, with newlines, quoted, and separated by SEP
+  \$SEP*  all arguments, with newlines, unquoted, and separated by SEP
+  \$SEP%  all arguments, without newlines, unquoted, and separated by SEP
+SEP can be empty for the default (comma for \@ and *, colon for %),
+a single character for that character, or {STRING} to use a string.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version =  <<"EOF";
+autom4te (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Akim Demaille.
+EOF
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# $OPTION
+# files_to_options (@FILE)
+# ------------------------
+# Transform Autom4te conventions (e.g., using foo.m4f to designate a frozen
+# file) into a suitable command line for M4 (e.g., using --reload-state).
+# parse_args guarantees that we will see at most one frozen file, and that
+# if a frozen file is present, it is the first argument.
+sub files_to_options (@)
+{
+  my (@file) = @_;
+  my @res;
+  foreach my $file (@file)
+    {
+      my $arg = shell_quote ($file);
+      if ($file =~ /\.m4f$/)
+	{
+	  $arg = "--reload-state=$arg";
+	  # If the user downgraded M4 from 1.6 to 1.4.x after freezing
+	  # the file, then we ensure the frozen __m4_version__ will
+	  # not cause m4_init to make the wrong decision about the
+	  # current M4 version.
+	  $arg .= " --undefine=__m4_version__"
+	    unless grep {/__m4_version__/} @m4_builtin;
+	}
+      push @res, $arg;
+    }
+  return join ' ', @res;
+}
+
+
+# load_configuration ($FILE)
+# --------------------------
+# Load the configuration $FILE.
+sub load_configuration ($)
+{
+  my ($file) = @_;
+  use Text::ParseWords;
+
+  my $cfg = new Autom4te::XFile ("< " . open_quote ($file));
+  my $lang;
+  while ($_ = $cfg->getline)
+    {
+      chomp;
+      # Comments.
+      next
+	if /^\s*(\#.*)?$/;
+
+      my @words = shellwords ($_);
+      my $type = shift @words;
+      if ($type eq 'begin-language:')
+	{
+	  fatal "$file:$.: end-language missing for: $lang"
+	    if defined $lang;
+	  $lang = lc $words[0];
+	}
+      elsif ($type eq 'end-language:')
+	{
+	  error "$file:$.: end-language mismatch: $lang"
+	    if $lang ne lc $words[0];
+	  $lang = undef;
+	}
+      elsif ($type eq 'args:')
+	{
+	  fatal "$file:$.: no current language"
+	    unless defined $lang;
+	  push @{$language{$lang}}, @words;
+	}
+      else
+	{
+	  error "$file:$.: unknown directive: $type";
+	}
+    }
+}
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  # We want to look for the early options, which should not be found
+  # in the configuration file.  Prepend to the user arguments.
+  # Perform this repeatedly so that we can use --language in language
+  # definitions.  Beware that there can be several --language
+  # invocations.
+  my @language;
+  do {
+    @language = ();
+    use Getopt::Long;
+    Getopt::Long::Configure ("pass_through", "permute");
+    GetOptions ("l|language=s" => \@language);
+
+    foreach (@language)
+      {
+	error "unknown language: $_"
+	  unless exists $language{lc $_};
+	unshift @ARGV, @{$language{lc $_}};
+      }
+  } while @language;
+
+  # --debug is useless: it is parsed below.
+  if (exists $ENV{'AUTOM4TE_DEBUG'})
+    {
+      print STDERR "$me: concrete arguments:\n";
+      foreach my $arg (@ARGV)
+	{
+	  print STDERR "| $arg\n";
+	}
+    }
+
+  # Process the arguments for real this time.
+  my @trace;
+  my @prepend_include;
+  parse_WARNINGS;
+  getopt
+    (
+     # Operation modes:
+     "o|output=s"   => \$output,
+     "W|warnings=s" => \&parse_warnings,
+     "m|mode=s"     => \$mode,
+     "M|melt"       => \$melt,
+
+     # Library directories:
+     "B|prepend-include=s" => \@prepend_include,
+     "I|include=s"         => \@include,
+
+     # Tracing:
+     # Using a hash for traces is seducing.  Unfortunately, upon `-t FOO',
+     # instead of mapping `FOO' to undef, Getopt maps it to `1', preventing
+     # us from distinguishing `-t FOO' from `-t FOO=1'.  So let's do it
+     # by hand.
+     "t|trace=s"     => \@trace,
+     "p|preselect=s" => \@preselect,
+
+     # Freezing.
+     "F|freeze" => \$freeze,
+
+     # Caching.
+     "C|cache=s" => \$cache,
+     "no-cache"  => sub { $cache = undef; },
+    );
+
+  fatal "too few arguments
+Try `$me --help' for more information."
+    unless @ARGV;
+
+  # Freezing:
+  # We cannot trace at the same time (well, we can, but it sounds insane).
+  # And it implies melting: there is risk not to update properly using
+  # old frozen files, and worse yet: we could load a frozen file and
+  # refreeze it!  A sort of caching :)
+  fatal "cannot freeze and trace"
+    if $freeze && @trace;
+  $melt = 1
+    if $freeze;
+
+  # Names of the cache directory, cache directory index, trace cache
+  # prefix, and output cache prefix.  If the cache is not to be
+  # preserved, default to a temporary directory (automatically removed
+  # on exit).
+  $cache = $tmp
+    unless $cache;
+  $icache = "$cache/requests";
+  $tcache = "$cache/traces.";
+  $ocache = "$cache/output.";
+
+  # Normalize the includes: the first occurrence is enough, several is
+  # a pain since it introduces a useless difference in the path which
+  # invalidates the cache.  And strip `.' which is implicit and always
+  # first.
+  @include = grep { !/^\.$/ } uniq (reverse(@prepend_include), @include);
+
+  # Convert @trace to %trace, and work around the M4 builtins tracing
+  # problem.
+  # The default format is `$f:$l:$n:$%'.
+  foreach (@trace)
+    {
+      /^([^:]+)(?::(.*))?$/ms;
+      $trace{$1} = defined $2 ? $2 : '$f:$l:$n:$%';
+      $trace{$m4_builtin_alternate_name{$1}} = $trace{$1}
+	if exists $m4_builtin_alternate_name{$1};
+    }
+
+  # Work around the M4 builtins tracing problem for @PRESELECT.
+  # FIXME: Is this still needed, now that we rely on M4 1.4.5?
+  push (@preselect,
+	map { $m4_builtin_alternate_name{$_} }
+	grep { exists $m4_builtin_alternate_name{$_} } @preselect);
+
+  # If we find frozen files, then all the files before it are
+  # discarded: the frozen file is supposed to include them all.
+  #
+  # We don't want to depend upon m4's --include to find the top level
+  # files, so we use `find_file' here.  Try to get a canonical name,
+  # as it's part of the key for caching.  And some files are optional
+  # (also handled by `find_file').
+  my @argv;
+  foreach (@ARGV)
+    {
+      if ($_ eq '-')
+	{
+	  push @argv, $_;
+	}
+      elsif (/\.m4f$/)
+	{
+	  # Frozen files are optional => pass a `?' to `find_file'.
+	  my $file = find_file ("$_?", @include);
+	  if (!$melt && $file)
+	    {
+	      @argv = ($file);
+	    }
+	  else
+	    {
+	      s/\.m4f$/.m4/;
+	      push @argv, find_file ($_, @include);
+	    }
+	}
+      else
+	{
+	  my $file = find_file ($_, @include);
+	  push @argv, $file
+	    if $file;
+	}
+    }
+  @ARGV = @argv;
+}
+
+
+# handle_m4 ($REQ, @MACRO)
+# ------------------------
+# Run m4 on the input files, and save the traces on the @MACRO.
+sub handle_m4 ($@)
+{
+  my ($req, @macro) = @_;
+
+  # GNU m4 appends when using --debugfile/--error-output.
+  unlink ($tcache . $req->id . "t");
+
+  # Run m4.
+  #
+  # We don't output directly to the cache files, to avoid problems
+  # when we are interrupted (that leaves corrupted files).
+  xsystem ("$m4 "
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . ' --debug=aflq'
+	   . (!exists $ENV{'AUTOM4TE_NO_FATAL'} ? ' --fatal-warning' : '')
+	   . " --error-output=" . shell_quote ("$tcache" . $req->id . "t")
+	   . join (' --trace=', '', map { shell_quote ($_) } sort @macro)
+	   . " " . files_to_options (@ARGV)
+	   . " > " . shell_quote ("$ocache" . $req->id . "t"));
+
+  # Everything went ok: preserve the outputs.
+  foreach my $file (map { $_ . $req->id } ($tcache, $ocache))
+    {
+      use File::Copy;
+      move ("${file}t", "$file")
+	or fatal "cannot rename ${file}t as $file: $!";
+    }
+}
+
+
+# warn_forbidden ($WHERE, $WORD, %FORBIDDEN)
+# ------------------------------------------
+# $WORD is forbidden.  Warn with a dedicated error message if in
+# %FORBIDDEN, otherwise a simple `error: possibly undefined macro'
+# will do.
+my $first_warn_forbidden = 1;
+sub warn_forbidden ($$%)
+{
+  my ($where, $word, %forbidden) = @_;
+  my $message;
+
+  for my $re (sort keys %forbidden)
+    {
+      if ($word =~ $re)
+	{
+	  $message = $forbidden{$re};
+	  last;
+	}
+    }
+  $message ||= "possibly undefined macro: $word";
+  warn "$where: error: $message\n";
+  if ($first_warn_forbidden)
+    {
+      warn <<EOF;
+      If this token and others are legitimate, please use m4_pattern_allow.
+      See the Autoconf documentation.
+EOF
+      $first_warn_forbidden = 0;
+    }
+}
+
+
+# handle_output ($REQ, $OUTPUT)
+# -----------------------------
+# Run m4 on the input files, perform quadrigraphs substitution, check for
+# forbidden tokens, and save into $OUTPUT.
+sub handle_output ($$)
+{
+  my ($req, $output) = @_;
+
+  verb "creating $output";
+
+  # Load the forbidden/allowed patterns.
+  handle_traces ($req, "$tmp/patterns",
+		 ('m4_pattern_forbid' => 'forbid:$1:$2',
+		  'm4_pattern_allow'  => 'allow:$1'));
+  my @patterns = new Autom4te::XFile ("< " . open_quote ("$tmp/patterns"))->getlines;
+  chomp @patterns;
+  my %forbidden =
+    map { /^forbid:([^:]+):.+$/ => /^forbid:[^:]+:(.+)$/ } @patterns;
+  my $forbidden = join ('|', map { /^forbid:([^:]+)/ } @patterns) || "^\$";
+  my $allowed   = join ('|', map { /^allow:([^:]+)/  } @patterns) || "^\$";
+
+  verb "forbidden tokens: $forbidden";
+  verb "forbidden token : $_ => $forbidden{$_}"
+    foreach (sort keys %forbidden);
+  verb "allowed   tokens: $allowed";
+
+  # Read the (cached) raw M4 output, produce the actual result.  We
+  # have to use the 2nd arg to have Autom4te::XFile honor the third, but then
+  # stdout is to be handled by hand :(.  Don't use fdopen as it means
+  # we will close STDOUT, which we already do in END.
+  my $out = new Autom4te::XFile;
+  if ($output eq '-')
+    {
+      $out->open (">$output");
+    }
+  else
+    {
+      $out->open($output, O_CREAT | O_WRONLY | O_TRUNC, oct ($mode));
+    }
+  fatal "cannot create $output: $!"
+    unless $out;
+  my $in = new Autom4te::XFile ("< " . open_quote ($ocache . $req->id));
+
+  my %prohibited;
+  my $res;
+  while ($_ = $in->getline)
+    {
+      s/\s+$//;
+      s/__oline__/$./g;
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+
+      $res = $_;
+
+      # Don't complain in comments.  Well, until we have something
+      # better, don't consider `#include' etc. are comments.
+      s/\#.*//
+	unless /^\#\s*(if|include|endif|ifdef|ifndef|define)\b/;
+      foreach (split (/\W+/))
+	{
+	  $prohibited{$_} = $.
+	    if !/^$/ && /$forbidden/o && !/$allowed/o && ! exists $prohibited{$_};
+	}
+
+      # Performed *last*: the empty quadrigraph.
+      $res =~ s/\@&t\@//g;
+
+      print $out "$res\n";
+    }
+
+  $out->close();
+
+  # If no forbidden words, we're done.
+  return
+    if ! %prohibited;
+
+  # Locate the forbidden words in the last input file.
+  # This is unsatisfying but...
+  $exit_code = 1;
+  if ($ARGV[$#ARGV] ne '-')
+    {
+      my $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+      my $file = new Autom4te::XFile ("< " . open_quote ($ARGV[$#ARGV]));
+
+      while ($_ = $file->getline)
+	{
+	  # Don't complain in comments.  Well, until we have something
+	  # better, don't consider `#include' etc. to be comments.
+	  s/\#.*//
+	    unless /^\#(if|include|endif|ifdef|ifndef|define)\b/;
+
+	  # Complain once per word, but possibly several times per line.
+	  while (/$prohibited/)
+	    {
+	      my $word = $1;
+	      warn_forbidden ("$ARGV[$#ARGV]:$.", $word, %forbidden);
+	      delete $prohibited{$word};
+	      # If we're done, exit.
+	      return
+		if ! %prohibited;
+	      $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+	    }
+	}
+    }
+  warn_forbidden ("$output:$prohibited{$_}", $_, %forbidden)
+    foreach (sort { $prohibited{$a} <=> $prohibited{$b} } keys %prohibited);
+}
+
+
+## --------------------- ##
+## Handling the traces.  ##
+## --------------------- ##
+
+
+# $M4_MACRO
+# trace_format_to_m4 ($FORMAT)
+# ----------------------------
+# Convert a trace $FORMAT into a M4 trace processing macro's body.
+sub trace_format_to_m4 ($)
+{
+  my ($format) = @_;
+  my $underscore = $_;
+  my %escape = (# File name.
+		'f' => '$1',
+		# Line number.
+		'l' => '$2',
+		# Depth.
+		'd' => '$3',
+		# Name (also available as $0).
+		'n' => '$4',
+		# Escaped dollar.
+		'$' => '$');
+
+  my $res = '';
+  $_ = $format;
+  while ($_)
+    {
+      # $n -> $(n + 4)
+      if (s/^\$(\d+)//)
+	{
+	  $res .= "\$" . ($1 + 4);
+	}
+      # $x, no separator given.
+      elsif (s/^\$([fldn\$])//)
+	{
+	  $res .= $escape{$1};
+	}
+      # $.x or ${sep}x.
+      elsif (s/^\$\{([^}]*)\}([@*%])//
+	    || s/^\$(.?)([@*%])//)
+	{
+	  # $@, list of quoted effective arguments.
+	  if ($2 eq '@')
+	    {
+	      $res .= ']at_at([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $*, list of unquoted effective arguments.
+	  elsif ($2 eq '*')
+	    {
+	      $res .= ']at_star([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $%, list of flattened unquoted effective arguments.
+	  elsif ($2 eq '%')
+	    {
+	      $res .= ']at_percent([' . ($1 ? $1 : ':') . '], $@)[';
+	    }
+	}
+      elsif (/^(\$.)/)
+	{
+	  error "invalid escape: $1";
+	}
+      else
+	{
+	  s/^([^\$]+)//;
+	  $res .= $1;
+	}
+    }
+
+  $_ = $underscore;
+  return '[[' . $res . ']]';
+}
+
+
+# handle_traces($REQ, $OUTPUT, %TRACE)
+# ------------------------------------
+# We use M4 itself to process the traces.  But to avoid name clashes when
+# processing the traces, the builtins are disabled, and moved into `at_'.
+# Actually, all the low level processing macros are in `at_' (and `_at_').
+# To avoid clashes between user macros and `at_' macros, the macros which
+# implement tracing are in `AT_'.
+#
+# Having $REQ is needed to neutralize the macros which have been traced,
+# but are not wanted now.
+sub handle_traces ($$%)
+{
+  my ($req, $output, %trace) = @_;
+
+  verb "formatting traces for `$output': " . join (', ', sort keys %trace);
+
+  # Processing the traces.
+  my $trace_m4 = new Autom4te::XFile ("> " . open_quote ("$tmp/traces.m4"));
+
+  $_ = <<'EOF';
+  divert(-1)
+  changequote([, ])
+  # _at_MODE(SEPARATOR, ELT1, ELT2...)
+  # ----------------------------------
+  # List the elements, separating then with SEPARATOR.
+  # MODE can be:
+  #  `at'       -- the elements are enclosed in brackets.
+  #  `star'     -- the elements are listed as are.
+  #  `percent'  -- the elements are `flattened': spaces are singled out,
+  #                and no new line remains.
+  define([_at_at],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[[$2]]],
+	     [[[$2]][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_percent],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [at_flatten([$2])],
+	     [at_flatten([$2])[$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_star],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[$2]],
+	     [[$2][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  # FLATTEN quotes its result.
+  # Note that the second pattern is `newline, tab or space'.  Don't lose
+  # the tab!
+  define([at_flatten],
+  [at_patsubst(at_patsubst([[[$1]]], [\\\n]), [[\n\t ]+], [ ])])
+
+  define([at_args],    [at_shift(at_shift(at_shift(at_shift(at_shift($@)))))])
+  define([at_at],      [_$0([$1], at_args($@))])
+  define([at_percent], [_$0([$1], at_args($@))])
+  define([at_star],    [_$0([$1], at_args($@))])
+
+EOF
+  s/^  //mg;s/\\t/\t/mg;s/\\n/\n/mg;
+  print $trace_m4 $_;
+
+  # If you trace `define', then on `define([m4_exit], defn([m4exit])' you
+  # will produce
+  #
+  #    AT_define([m4sugar.m4], [115], [1], [define], [m4_exit], <m4exit>)
+  #
+  # Since `<m4exit>' is not quoted, the outer m4, when processing
+  # `trace.m4' will exit prematurely.  Hence, move all the builtins to
+  # the `at_' name space.
+
+  print $trace_m4 "# Copy the builtins.\n";
+  map { print $trace_m4 "define([at_$_], defn([$_]))\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+  print $trace_m4 "# Disable them.\n";
+  map { print $trace_m4 "at_undefine([$_])\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+
+  # Neutralize traces: we don't want traces of cached requests (%REQUEST).
+  print $trace_m4
+   "## -------------------------------------- ##\n",
+   "## By default neutralize all the traces.  ##\n",
+   "## -------------------------------------- ##\n",
+   "\n";
+  print $trace_m4 "at_define([AT_$_], [at_dnl])\n"
+    foreach (sort keys %{$req->macro});
+  print $trace_m4 "\n";
+
+  # Implement traces for current requests (%TRACE).
+  print $trace_m4
+    "## ------------------------- ##\n",
+    "## Trace processing macros.  ##\n",
+    "## ------------------------- ##\n",
+    "\n";
+  foreach (sort keys %trace)
+    {
+      # Trace request can be embed \n.
+      (my $comment = "Trace $_:$trace{$_}") =~ s/^/\# /;
+      print $trace_m4 "$comment\n";
+      print $trace_m4 "at_define([AT_$_],\n";
+      print $trace_m4 trace_format_to_m4 ($trace{$_}) . ")\n\n";
+    }
+  print $trace_m4 "\n";
+
+  # Reenable output.
+  print $trace_m4 "at_divert(0)at_dnl\n";
+
+  # Transform the traces from m4 into an m4 input file.
+  # Typically, transform:
+  #
+  # | m4trace:configure.ac:3: -1- AC_SUBST([exec_prefix], [NONE])
+  #
+  # into
+  #
+  # | AT_AC_SUBST([configure.ac], [3], [1], [AC_SUBST], [exec_prefix], [NONE])
+  #
+  # Pay attention that the file name might include colons, if under DOS
+  # for instance, so we don't use `[^:]+'.
+  my $traces = new Autom4te::XFile ("< " . open_quote ($tcache . $req->id));
+  while ($_ = $traces->getline)
+    {
+      # Trace with arguments, as the example above.  We don't try
+      # to match the trailing parenthesis as it might be on a
+      # separate line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^(]+)\((.*)$}
+       {AT_$4([$1], [$2], [$3], [$4], $5};
+      # Traces without arguments, always on a single line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^)]*)\n$}
+       {AT_$4([$1], [$2], [$3], [$4])\n};
+      print $trace_m4 "$_";
+    }
+  $trace_m4->close;
+
+  my $in = new Autom4te::XFile ("$m4 " . shell_quote ("$tmp/traces.m4") . " |");
+  my $out = new Autom4te::XFile ("> " . open_quote ($output));
+
+  # This is dubious: should we really transform the quadrigraphs in
+  # traces?  It might break balanced [ ] etc. in the output.  The
+  # consensus seeems to be that traces are more useful this way.
+  while ($_ = $in->getline)
+    {
+      # It makes no sense to try to transform __oline__.
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+      s/\@&t\@//g;
+      print $out $_;
+    }
+}
+
+
+# $BOOL
+# up_to_date ($REQ)
+# -----------------
+# Are the cache files of $REQ up to date?
+# $REQ is `valid' if it corresponds to the request and exists, which
+# does not mean it is up to date.  It is up to date if, in addition,
+# its files are younger than its dependencies.
+sub up_to_date ($)
+{
+  my ($req) = @_;
+
+  return 0
+    if ! $req->valid;
+
+  my $tfile = $tcache . $req->id;
+  my $ofile = $ocache . $req->id;
+
+  # We can't answer properly if the traces are not computed since we
+  # need to know what other files were included.  Actually, if any of
+  # the cache files is missing, we are not up to date.
+  return 0
+    if ! -f $tfile || ! -f $ofile;
+
+  # The youngest of the cache files must be older than the oldest of
+  # the dependencies.
+  my $tmtime = mtime ($tfile);
+  my $omtime = mtime ($ofile);
+  my ($file, $mtime) = ($tmtime < $omtime
+			? ($ofile, $omtime) : ($tfile, $tmtime));
+
+  # We depend at least upon the arguments.
+  my @dep = @ARGV;
+
+  # stdin is always out of date.
+  if (grep { $_ eq '-' } @dep)
+    { return 0 }
+
+  # Files may include others.  We can use traces since we just checked
+  # if they are available.
+  handle_traces ($req, "$tmp/dependencies",
+		 ('include'    => '$1',
+		  'm4_include' => '$1'));
+  my $deps = new Autom4te::XFile ("< " . open_quote ("$tmp/dependencies"));
+  while ($_ = $deps->getline)
+    {
+      chomp;
+      my $file = find_file ("$_?", @include);
+      # If a file which used to be included is no longer there, then
+      # don't say it's missing (it might no longer be included).  But
+      # of course, that causes the output to be outdated (as if the
+      # time stamp of that missing file was newer).
+      return 0
+	if ! $file;
+      push @dep, $file;
+    }
+
+  # If $FILE is younger than one of its dependencies, it is outdated.
+  return up_to_date_p ($file, @dep);
+}
+
+
+## ---------- ##
+## Freezing.  ##
+## ---------- ##
+
+# freeze ($OUTPUT)
+# ----------------
+sub freeze ($)
+{
+  my ($output) = @_;
+
+  # When processing the file with diversion disabled, there must be no
+  # output but comments and empty lines.
+  my $result = xqx ("$m4"
+		    . ' --fatal-warning'
+		    . join (' --include=', '', map { shell_quote ($_) } @include)
+		    . ' --define=divert'
+		    . " " . files_to_options (@ARGV)
+		    . ' </dev/null');
+  $result =~ s/#.*\n//g;
+  $result =~ s/^\n//mg;
+
+  fatal "freezing produced output:\n$result"
+    if $result;
+
+  # If freezing produces output, something went wrong: a bad `divert',
+  # or an improper paren etc.
+  xsystem ("$m4"
+	   . ' --fatal-warning'
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . " --freeze-state=" . shell_quote ($output)
+	   . " " . files_to_options (@ARGV)
+	   . ' </dev/null');
+}
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('am4t');
+load_configuration ($ENV{'AUTOM4TE_CFG'} || "$pkgdatadir/autom4te.cfg");
+load_configuration ("$ENV{'HOME'}/.autom4te.cfg")
+  if exists $ENV{'HOME'} && -f "$ENV{'HOME'}/.autom4te.cfg";
+load_configuration (".autom4te.cfg")
+  if -f ".autom4te.cfg";
+parse_args;
+
+# Freezing does not involve the cache.
+if ($freeze)
+  {
+    freeze ($output);
+    exit $exit_code;
+  }
+
+# We need our cache directory.  Don't fail with parallel creation.
+if (! -d "$cache")
+  {
+    mkdir "$cache", 0755
+      or -d "$cache"
+      or fatal "cannot create $cache: $!";
+  }
+
+# Open the index for update, and lock it.  autom4te handles several
+# files, but the index is the first and last file to be updated, so
+# locking it is sufficient.
+$icache_file = new Autom4te::XFile $icache, O_RDWR|O_CREAT;
+$icache_file->lock (LOCK_EX)
+  if ($flock_implemented eq "yes");
+
+# Read the cache index if available and older than autom4te itself.
+# If autom4te is younger, then some structures such as C4che might
+# have changed, which would corrupt its processing.
+Autom4te::C4che->load ($icache_file)
+  if -f $icache && mtime ($icache) > mtime ($0);
+
+# Add the new trace requests.
+my $req = Autom4te::C4che->request ('input' => \@ARGV,
+				    'path'  => \@include,
+				    'macro' => [keys %trace, @preselect]);
+
+# If $REQ's cache files are not up to date, or simply if the user
+# discarded them (-f), declare it invalid.
+$req->valid (0)
+  if $force || ! up_to_date ($req);
+
+# We now know whether we can trust the Request object.  Say it.
+verb "the trace request object is:\n" . $req->marshall;
+
+# We need to run M4 if (i) the user wants it (--force), (ii) $REQ is
+# invalid.
+handle_m4 ($req, keys %{$req->macro})
+  if $force || ! $req->valid;
+
+# Issue the warnings each time autom4te was run.
+my $separator = "\n" . ('-' x 25) . " END OF WARNING " . ('-' x 25) . "\n\n";
+handle_traces ($req, "$tmp/warnings",
+	       ('_m4_warn' => "\$1::\$f:\$l::\$2::\$3$separator"));
+# Swallow excessive newlines.
+for (split (/\n*$separator\n*/o, contents ("$tmp/warnings")))
+{
+  # The message looks like:
+  # | syntax::input.as:5::ouch
+  # | ::input.as:4: baz is expanded from...
+  # | input.as:2: bar is expanded from...
+  # | input.as:3: foo is expanded from...
+  # | input.as:5: the top level
+  # In particular, m4_warn guarantees that either $stackdump is empty, or
+  # it consists of lines where only the last line ends in "top level".
+  my ($cat, $loc, $msg, $stacktrace) = split ('::', $_, 4);
+  msg $cat, $loc, "warning: $msg",
+    partial => ($stacktrace =~ /top level$/) + 0;
+  for (split /\n/, $stacktrace)
+    {
+      my ($loc, $trace) = split (': ', $_, 2);
+      msg $cat, $loc, $trace, partial => ($trace !~ /top level$/) + 0;
+    }
+}
+
+# Now output...
+if (%trace)
+  {
+    # Always produce traces, since even if the output is young enough,
+    # there is no guarantee that the traces use the same *format*
+    # (e.g., `-t FOO:foo' and `-t FOO:bar' are both using the same M4
+    # traces, hence the M4 traces cache is usable, but its formatting
+    # will yield different results).
+    handle_traces ($req, $output, %trace);
+  }
+else
+  {
+    # Actual M4 expansion, if the user wants it, or if $output is old
+    # (STDOUT is pretty old).
+    handle_output ($req, $output)
+      if $force || mtime ($output) < mtime ($ocache . $req->id);
+  }
+
+# If we ran up to here, the cache is valid.
+$req->valid (1);
+Autom4te::C4che->save ($icache_file);
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake-1.14
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/automake-1.14
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoreconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoreconf
new file mode 100755
index 0000000..944ccdd
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoreconf
@@ -0,0 +1,718 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoreconf.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoreconf - install the GNU Build System in a directory tree
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David J. MacKenzie.
+# Extended and rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+# Do not use Cwd::chdir, since it might hang.
+use Cwd 'cwd';
+use strict;
+
+## ----------- ##
+## Variables.  ##
+## ----------- ##
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [DIRECTORY]...
+
+Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
+(formerly `gettextize'), and `libtoolize' where appropriate)
+repeatedly to remake the GNU Build System files in specified
+DIRECTORIES and their subdirectories (defaulting to `.').
+
+By default, it only remakes those files that are older than their
+sources.  If you install new versions of the GNU Build System,
+you can make `autoreconf' remake all of the files by giving it the
+`--force' option.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don't remove temporary files
+  -f, --force              consider all files obsolete
+  -i, --install            copy missing auxiliary files
+      --no-recursive       don't rebuild sub-packages
+  -s, --symlink            with -i, install symbolic links instead of copies
+  -m, --make               when applicable, re-run ./configure && make
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY [syntax]
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variable \`WARNINGS\' is honored.  Some subtools might
+support other warning types, using \`all' is encouraged.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+The environment variables AUTOM4TE, AUTOCONF, AUTOHEADER, AUTOMAKE,
+ACLOCAL, AUTOPOINT, LIBTOOLIZE, M4, and MAKE are honored.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoreconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+# Lib files.
+my $autoconf   = $ENV{'AUTOCONF'}   || '/x86_64-apple-darwin/bin/autoconf';
+my $autoheader = $ENV{'AUTOHEADER'} || '/x86_64-apple-darwin/bin/autoheader';
+my $autom4te   = $ENV{'AUTOM4TE'}   || '/x86_64-apple-darwin/bin/autom4te';
+my $automake   = $ENV{'AUTOMAKE'}   || 'automake';
+my $aclocal    = $ENV{'ACLOCAL'}    || 'aclocal';
+my $libtoolize = $ENV{'LIBTOOLIZE'} || 'libtoolize';
+my $autopoint  = $ENV{'AUTOPOINT'}  || 'autopoint';
+my $make       = $ENV{'MAKE'}       || 'make';
+
+# --install -- as --add-missing in other tools.
+my $install = 0;
+# symlink -- when --install, use symlinks instead.
+my $symlink = 0;
+# Does aclocal support --force?
+my $aclocal_supports_force = 0;
+# Does aclocal support -Wfoo?
+my $aclocal_supports_warnings = 0;
+# Does automake support --force-missing?
+my $automake_supports_force_missing = 0;
+# Does automake support -Wfoo?
+my $automake_supports_warnings = 0;
+
+my @prepend_include;
+my @include;
+
+# List of command line warning requests.
+my @warning;
+
+# Rerun `./configure && make'?
+my $run_make = 0;
+
+# Recurse into subpackages
+my $recursive = 1;
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ("W|warnings=s"         => \@warning,
+	  'I|include=s'          => \@include,
+	  'B|prepend-include=s'	 => \@prepend_include,
+	  'i|install'            => \$install,
+	  's|symlink'            => \$symlink,
+	  'm|make'               => \$run_make,
+	  'recursive!'           => \$recursive);
+
+  # Split the warnings as a list of elements instead of a list of
+  # lists.
+  @warning = map { split /,/ } @warning;
+  parse_WARNINGS;
+  parse_warnings '--warnings', @warning;
+
+  # Even if the user specified a configure.ac, trim to get the
+  # directory, and look for configure.ac again.  Because (i) the code
+  # is simpler, and (ii) we are still able to diagnose simultaneous
+  # presence of configure.ac and configure.in.
+  @ARGV = map { /configure\.(ac|in)$/ ? dirname ($_) : $_ } @ARGV;
+  push @ARGV, '.' unless @ARGV;
+
+  if ($verbose && $debug)
+    {
+      for my $prog ($autoconf, $autoheader,
+		    $automake, $aclocal,
+		    $autopoint,
+		    $libtoolize)
+	{
+	  xsystem ("$prog --version | sed 1q >&2");
+	  print STDERR "\n";
+	}
+    }
+
+  my $aclocal_help = `$aclocal --help 2>/dev/null`;
+  my $automake_help = `$automake --help 2>/dev/null`;
+  $aclocal_supports_force = $aclocal_help =~ /--force/;
+  $aclocal_supports_warnings = $aclocal_help =~ /--warnings/;
+  $automake_supports_force_missing = $automake_help =~ /--force-missing/;
+  $automake_supports_warnings = $automake_help =~ /--warnings/;
+
+  # Dispatch autoreconf's option to the tools.
+  # --include;
+  $aclocal    .= join (' -I ', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+  $autoheader .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoheader .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+  # --install and --symlink;
+  if ($install)
+    {
+      $automake   .= ' --add-missing';
+      $automake   .= ' --copy' unless $symlink;
+      $libtoolize .= ' --copy' unless $symlink;
+    }
+  # --force;
+  if ($force)
+    {
+      $aclocal    .= ' --force'
+	if $aclocal_supports_force;
+      $autoconf   .= ' --force';
+      $autoheader .= ' --force';
+      $automake   .= ' --force-missing'
+	if $automake_supports_force_missing;
+      $autopoint  .= ' --force';
+      $libtoolize .= ' --force';
+    }
+  else
+    {
+      # The implementation of --no-force is bogus in all implementations
+      # of Automake up to 1.8, so we avoid it in these cases.  (Automake
+      # 1.8 is the first version where aclocal supports force, hence
+      # the condition.)
+      $automake .= ' --no-force'
+	if $aclocal_supports_force;
+    }
+  # --verbose --verbose or --debug;
+  if ($verbose > 1 || $debug)
+    {
+      $autoconf   .= ' --verbose';
+      $autoheader .= ' --verbose';
+      $automake   .= ' --verbose';
+      $aclocal    .= ' --verbose';
+    }
+  if ($debug)
+    {
+      $autoconf   .= ' --debug';
+      $autoheader .= ' --debug';
+      $libtoolize .= ' --debug';
+    }
+  # --warnings;
+  if (@warning)
+    {
+      my $warn = ' --warnings=' . join (',', @warning);
+      $autoconf   .= $warn;
+      $autoheader .= $warn;
+      $automake   .= $warn
+	if $automake_supports_warnings;
+      $aclocal    .= $warn
+        if $aclocal_supports_warnings;
+    }
+}
+
+
+# &run_aclocal ($ACLOCAL, $FLAGS)
+# -------------------------------
+# Update aclocal.m4 as lazily as possible, as aclocal pre-1.8 always
+# overwrites aclocal.m4, hence triggers autoconf, autoheader, automake
+# etc. uselessly.  aclocal 1.8+ does not need this.
+sub run_aclocal ($$)
+{
+  my ($aclocal, $flags) = @_;
+
+  # aclocal 1.8+ does all this for free.  It can be recognized by its
+  # --force support.
+  if ($aclocal_supports_force)
+    {
+      xsystem ("$aclocal $flags");
+    }
+  else
+    {
+      xsystem ("$aclocal $flags --output=aclocal.m4t");
+      # aclocal may produce no output.
+      if (-f 'aclocal.m4t')
+	{
+	  update_file ('aclocal.m4t', 'aclocal.m4');
+	  # Make sure that the local m4 files are older than
+	  # aclocal.m4.
+	  #
+	  # Why is not always the case?  Because we already run
+	  # aclocal at first (before tracing), which, for instance,
+	  # can find Gettext's macros in .../share/aclocal, so we may
+	  # have had the right aclocal.m4 already.  Then autopoint is
+	  # run, and installs locally these M4 files.  Then
+	  # autoreconf, via update_file, sees it is the _same_
+	  # aclocal.m4, and doesn't change its timestamp.  But later,
+	  # Automake's Makefile expresses that aclocal.m4 depends on
+	  # these local files, which are newer, so it triggers aclocal
+	  # again.
+	  #
+	  # To make sure aclocal.m4 is no older, we change the
+	  # modification times of the local M4 files to be not newer
+	  # than it.
+	  #
+	  # First, where are the local files?
+	  my $aclocal_local_dir = '.';
+	  if ($flags =~ /-I\s+(\S+)/)
+	    {
+	      $aclocal_local_dir = $1;
+	    }
+	  # All the local files newer than aclocal.m4 are to be
+	  # made not newer than it.
+	  my $aclocal_m4_mtime = mtime ('aclocal.m4');
+	  for my $file (glob ("$aclocal_local_dir/*.m4"), 'acinclude.m4')
+	    {
+	      if ($aclocal_m4_mtime < mtime ($file))
+		{
+		  debug "aging $file to be not newer than aclocal.m4";
+		  utime $aclocal_m4_mtime, $aclocal_m4_mtime, $file;
+		}
+	    }
+	}
+    }
+}
+
+# &autoreconf_current_directory
+# -----------------------------
+sub autoreconf_current_directory ()
+{
+  my $configure_ac = find_configure_ac;
+
+  # ---------------------- #
+  # Is it using Autoconf?  #
+  # ---------------------- #
+
+  my $uses_autoconf;
+  my $uses_gettext;
+  if (-f $configure_ac)
+    {
+      my $configure_ac_file = new Autom4te::XFile "< $configure_ac";
+      while ($_ = $configure_ac_file->getline)
+	{
+	  s/#.*//;
+	  s/dnl.*//;
+	  $uses_autoconf = 1 if /AC_INIT/;
+	  # See below for why we look for gettext here.
+	  $uses_gettext = 1  if /^AM_GNU_GETTEXT_VERSION/;
+	}
+    }
+  if (!$uses_autoconf)
+    {
+      verb "$configure_ac: not using Autoconf";
+      return;
+    }
+
+
+  # ------------------- #
+  # Running autopoint.  #
+  # ------------------- #
+
+  # Gettext is a bit of a problem: its macros are not necessarily
+  # visible to aclocal, so if we start with a completely striped down
+  # package (think of a fresh CVS checkout), running `aclocal' first
+  # will fail: the Gettext macros are missing.
+  #
+  # Therefore, we can't use the traces to decide if we use Gettext or
+  # not.  I guess that once Gettext move to 2.5x we will be able to,
+  # but in the meanwhile forget it.
+  #
+  # We can only grep for AM_GNU_GETTEXT_VERSION in configure.ac.  You
+  # might think this approach is naive, and indeed it is, as it
+  # prevents one to embed AM_GNU_GETTEXT_VERSION in another *.m4, but
+  # anyway we don't limit the generality, since... that's what
+  # autopoint does.  Actually, it is even more restrictive, as it
+  # greps for `^AM_GNU_GETTEXT_VERSION('.  We did this above, while
+  # scanning configure.ac.
+  if (!$uses_gettext)
+    {
+      verb "$configure_ac: not using Gettext";
+    }
+  elsif (!$install)
+    {
+      verb "$configure_ac: not running autopoint: --install not given";
+    }
+  else
+    {
+      xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
+    }
+
+
+  # ----------------- #
+  # Running aclocal.  #
+  # ----------------- #
+
+  # Run it first: it might discover new macros to add, e.g.,
+  # AC_PROG_LIBTOOL, which we will trace later to see if Libtool is
+  # used.
+  #
+  # Always run it.  Tracking its sources for up-to-dateness is too
+  # complex and too error prone.  The best we can do is avoiding
+  # nuking the time stamp.
+  my $uses_aclocal = 1;
+
+  # Nevertheless, if aclocal.m4 exists and is not made by aclocal,
+  # don't run aclocal.
+
+  if (-f 'aclocal.m4')
+    {
+      my $aclocal_m4 = new Autom4te::XFile 'aclocal.m4';
+      $_ = $aclocal_m4->getline;
+      $uses_aclocal = 0
+	unless defined ($_) && /generated.*by aclocal/;
+    }
+
+  # If there are flags for aclocal in Makefile.am, use them.
+  my $aclocal_flags = '';
+  if ($uses_aclocal && -f 'Makefile.am')
+    {
+      my $makefile = new Autom4te::XFile 'Makefile.am';
+      while ($_ = $makefile->getline)
+	{
+	  if (/^ACLOCAL_[A-Z_]*FLAGS\s*=\s*(.*)/)
+	    {
+	      $aclocal_flags = $1;
+	      last;
+	    }
+	}
+    }
+
+  if (!$uses_aclocal)
+    {
+      verb "$configure_ac: not using aclocal";
+    }
+  else
+    {
+      # Some file systems have sub-second time stamps, and if so we may
+      # run into trouble later, after we rerun autoconf and set the
+      # time stamps of input files to be no greater than aclocal.m4,
+      # because the time-stamp-setting operation (utime) has a
+      # resolution of only 1 second.  Work around the problem by
+      # ensuring that there is at least a one-second window before the
+      # time stamp of aclocal.m4t in which no file time stamps can
+      # fall.
+      sleep 1;
+
+      run_aclocal ($aclocal, $aclocal_flags);
+    }
+
+  # We might have to rerun aclocal if Libtool (or others) imports new
+  # macros.
+  my $rerun_aclocal = 0;
+
+
+
+  # ------------------------------- #
+  # See what tools will be needed.  #
+  # ------------------------------- #
+
+  # Perform a single trace reading to avoid --force forcing a rerun
+  # between two --trace, that's useless.  If there is no AC_INIT, then
+  # we are not interested: it looks like a Cygnus thingy.
+  my $aux_dir;
+  my $uses_gettext_via_traces;
+  my $uses_libtool;
+  my $uses_libltdl;
+  my $uses_autoheader;
+  my $uses_automake;
+  my @subdir;
+  verb "$configure_ac: tracing";
+  my $traces = new Autom4te::XFile
+    ("$autoconf"
+     . join (' ',
+	     map { ' --trace=' . $_ . ':\$n::\${::}%' }
+	     # If you change this list, update the
+	     # `Autoreconf-preselections' section of autom4te.in.
+	     'AC_CONFIG_AUX_DIR',
+	     'AC_CONFIG_HEADERS',
+	     'AC_CONFIG_SUBDIRS',
+	     'AC_INIT',
+	     'AC_PROG_LIBTOOL',
+	     'LT_INIT',
+	     'LT_CONFIG_LTDL_DIR',
+	     'AM_GNU_GETTEXT',
+	     'AM_INIT_AUTOMAKE',
+	    )
+     . ' |');
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($macro, @args) = split (/::/);
+      $aux_dir = $args[0]           if $macro eq "AC_CONFIG_AUX_DIR";
+      $uses_autoconf = 1            if $macro eq "AC_INIT";
+      $uses_gettext_via_traces = 1  if $macro eq "AM_GNU_GETTEXT";
+      $uses_libtool = 1             if $macro eq "AC_PROG_LIBTOOL"
+                                       || $macro eq "LT_INIT";
+      $uses_libltdl = 1             if $macro eq "LT_CONFIG_LTDL_DIR";
+      $uses_autoheader = 1          if $macro eq "AC_CONFIG_HEADERS";
+      $uses_automake = 1            if $macro eq "AM_INIT_AUTOMAKE";
+      push @subdir, split (' ', $args[0])
+                                    if $macro eq "AC_CONFIG_SUBDIRS" && $recursive;
+    }
+
+  # The subdirs are *optional*, they may not exist.
+  foreach (@subdir)
+    {
+      if (-d)
+	{
+	  verb "$configure_ac: adding subdirectory $_ to autoreconf";
+	  autoreconf ($_);
+	}
+      else
+	{
+	  verb "$configure_ac: subdirectory $_ not present";
+	}
+    }
+
+  # Gettext consistency checks...
+  error "$configure_ac: AM_GNU_GETTEXT is used, but not AM_GNU_GETTEXT_VERSION"
+    if $uses_gettext_via_traces && ! $uses_gettext;
+  error "$configure_ac: AM_GNU_GETTEXT_VERSION is used, but not AM_GNU_GETTEXT"
+    if $uses_gettext && ! $uses_gettext_via_traces;
+
+
+  # ---------------------------- #
+  # Setting up the source tree.  #
+  # ---------------------------- #
+
+  # libtoolize, automake --add-missing etc. will drop files in the
+  # $AUX_DIR.  But these tools fail to install these files if the
+  # directory itself does not exist, which valid: just imagine a CVS
+  # repository with hand written code only (there is not even a need
+  # for a Makefile.am!).
+
+  if (defined $aux_dir && ! -d $aux_dir)
+    {
+      verb "$configure_ac: creating directory $aux_dir";
+      mkdir $aux_dir, 0755
+	or error "cannot create $aux_dir: $!";
+    }
+
+
+  # -------------------- #
+  # Running libtoolize.  #
+  # -------------------- #
+
+  if (!$uses_libtool)
+    {
+      verb "$configure_ac: not using Libtool";
+    }
+  elsif ($install)
+    {
+      if ($uses_libltdl)
+	{
+	  $libtoolize .= " --ltdl";
+	}
+      xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
+      $rerun_aclocal = 1;
+    }
+  else
+    {
+      verb "$configure_ac: not running libtoolize: --install not given";
+    }
+
+
+
+  # ------------------- #
+  # Rerunning aclocal.  #
+  # ------------------- #
+
+  # If we re-installed Libtool or Gettext, the macros might have changed.
+  # Automake also needs an up-to-date aclocal.m4.
+  if ($rerun_aclocal)
+    {
+      if (!$uses_aclocal)
+	{
+	  verb "$configure_ac: not using aclocal";
+	}
+      else
+	{
+	  run_aclocal ($aclocal, $aclocal_flags);
+	}
+    }
+
+
+  # ------------------ #
+  # Running autoconf.  #
+  # ------------------ #
+
+  # Don't try to be smarter than `autoconf', which does its own up to
+  # date checks.
+  #
+  # We prefer running autoconf before autoheader, because (i) the
+  # latter runs the former, and (ii) autoconf is stricter than
+  # autoheader.  So all in all, autoconf should give better error
+  # messages.
+  xsystem ($autoconf);
+
+
+  # -------------------- #
+  # Running autoheader.  #
+  # -------------------- #
+
+  # We now consider that if AC_CONFIG_HEADERS is used, then autoheader
+  # is used too.
+  #
+  # Just as for autoconf, up to date ness is performed by the tool
+  # itself.
+  #
+  # Run it before automake, since the latter checks the presence of
+  # config.h.in when it sees an AC_CONFIG_HEADERS.
+  if (!$uses_autoheader)
+    {
+      verb "$configure_ac: not using Autoheader";
+    }
+  else
+    {
+      xsystem ($autoheader);
+    }
+
+
+  # ------------------ #
+  # Running automake.  #
+  # ------------------ #
+
+  if (!$uses_automake)
+    {
+      verb "$configure_ac: not using Automake";
+    }
+  else
+    {
+      # We should always run automake, and let it decide whether it shall
+      # update the file or not.  In fact, the effect of `$force' is already
+      # included in `$automake' via `--no-force'.
+      xsystem ($automake);
+    }
+
+
+  # -------------- #
+  # Running make.  #
+  # -------------- #
+
+  if ($run_make)
+    {
+      if (!-f "config.status")
+	{
+	  verb "no config.status: cannot re-make";
+	}
+      else
+	{
+	  xsystem ("./config.status --recheck");
+	  xsystem ("./config.status");
+	  if (!-f "Makefile")
+	    {
+	      verb "no Makefile: cannot re-make";
+	    }
+	  else
+	    {
+	      xsystem ("$make");
+	    }
+	}
+    }
+}
+
+
+# &autoreconf ($DIRECTORY)
+# ------------------------
+# Reconf the $DIRECTORY.
+sub autoreconf ($)
+{
+  my ($directory) = @_;
+  my $cwd = cwd;
+
+  # The format for this message is not free: taken from Emacs, itself
+  # using GNU Make's format.
+  verb "Entering directory `$directory'";
+  chdir $directory
+    or error "cannot chdir to $directory: $!";
+
+  autoreconf_current_directory;
+
+  # The format is not free: taken from Emacs, itself using GNU Make's
+  # format.
+  verb "Leaving directory `$directory'";
+  chdir $cwd
+    or error "cannot chdir to $cwd: $!";
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+# When debugging, it is convenient that all the related temporary
+# files be at the same place.
+mktmpdir ('ar');
+$ENV{'TMPDIR'} = $tmp;
+parse_args;
+
+# Autoreconf all the given configure.ac.  Unless `--no-recursive' is passed,
+# AC_CONFIG_SUBDIRS will be traversed in &autoreconf_current_directory.
+$ENV{'AUTOM4TE'} = $autom4te;
+for my $directory (@ARGV)
+  {
+    require_configure_ac ($directory);
+    autoreconf ($directory);
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoscan b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoscan
new file mode 100755
index 0000000..02dfb69
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoscan
@@ -0,0 +1,679 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoscan.in; do not edit by hand.
+
+# autoscan - Create configure.scan (a preliminary configure.ac) for a package.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Configure_ac;
+use Autom4te::General;
+use Autom4te::FileUtils;
+use Autom4te::XFile;
+use File::Basename;
+use File::Find;
+use strict;
+
+use vars qw(@cfiles @makefiles @shfiles @subdirs %printed);
+
+# The kind of the words we are looking for.
+my @kinds = qw (function header identifier program
+		makevar librarie);
+
+# For each kind, the default macro.
+my %generic_macro =
+  (
+   'function'   => 'AC_CHECK_FUNCS',
+   'header'     => 'AC_CHECK_HEADERS',
+   'identifier' => 'AC_CHECK_TYPES',
+   'program'    => 'AC_CHECK_PROGS',
+   'library'    => 'AC_CHECK_LIB'
+  );
+
+my %kind_comment =
+  (
+   'function'   => 'Checks for library functions.',
+   'header'     => 'Checks for header files.',
+   'identifier' => 'Checks for typedefs, structures, and compiler characteristics.',
+   'program'    => 'Checks for programs.',
+  );
+
+# $USED{KIND}{ITEM} is the list of locations where the ITEM (of KIND) was used
+# in the user package.
+# For instance $USED{function}{alloca} is the list of `file:line' where
+# `alloca (...)' appears.
+my %used = ();
+
+# $MACRO{KIND}{ITEM} is the list of macros to use to test ITEM.
+# Initialized from lib/autoscan/*.  E.g., $MACRO{function}{alloca} contains
+# the singleton AC_FUNC_ALLOCA.  Some require several checks.
+my %macro = ();
+
+# $NEEDED_MACROS{MACRO} is an array of locations requiring MACRO.
+# E.g., $NEEDED_MACROS{AC_FUNC_ALLOC} the list of `file:line' containing
+# `alloca (...)'.
+my %needed_macros =
+  (
+   'AC_PREREQ' => [$me],
+  );
+
+my $configure_scan = 'configure.scan';
+my $log;
+
+# Autoconf and lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-apple-darwin/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+my @prepend_include;
+my @include = ('//share/autoconf');
+
+# $help
+# -----
+$help = "Usage: $0 [OPTION]... [SRCDIR]
+
+Examine source files in the directory tree rooted at SRCDIR, or the
+current directory if none is given.  Search the source files for
+common portability problems, check for incompleteness of
+`configure.ac', and create a file `$configure_scan' which is a
+preliminary `configure.ac' for that package.
+
+  -h, --help          print this help, then exit
+  -V, --version       print version number, then exit
+  -v, --verbose       verbosely report processing
+  -d, --debug         don't remove temporary files
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $version
+# --------
+$version = "autoscan (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+
+
+
+## ------------------------ ##
+## Command line interface.  ##
+## ------------------------ ##
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ('I|include=s' => \@include,
+	  'B|prepend-include=s' => \@prepend_include);
+
+  die "$me: too many arguments
+Try `$me --help' for more information.\n"
+    if @ARGV > 1;
+
+  my $srcdir = $ARGV[0] || ".";
+
+  verb "srcdir = $srcdir";
+  chdir $srcdir || error "cannot cd to $srcdir: $!";
+}
+
+
+# init_tables ()
+# --------------
+# Put values in the tables of what to do with each token.
+sub init_tables ()
+{
+  # The data file format supports only one line of macros per function.
+  # If more than that is required for a common portability problem,
+  # a new Autoconf macro should probably be written for that case,
+  # instead of duplicating the code in lots of configure.ac files.
+  my $file = find_file ("autoscan/autoscan.list",
+			reverse (@prepend_include), @include);
+  my $table = new Autom4te::XFile "< " . open_quote ($file);
+  my $tables_are_consistent = 1;
+
+  while ($_ = $table->getline)
+    {
+      # Ignore blank lines and comments.
+      next
+	if /^\s*$/ || /^\s*\#/;
+
+      # '<kind>: <word> <macro invocation>' or...
+      # '<kind>: <word> warn: <message>'.
+      if (/^(\S+):\s+(\S+)\s+(\S.*)$/)
+	{
+	  my ($kind, $word, $macro) = ($1, $2, $3);
+	  error "$file:$.: invalid kind: $_"
+	    unless grep { $_ eq $kind } @kinds;
+	  push @{$macro{$kind}{$word}}, $macro;
+	}
+      else
+	{
+	  error "$file:$.: invalid definition: $_";
+	}
+    }
+
+  if ($debug)
+    {
+      foreach my $kind (@kinds)
+	{
+	  foreach my $word (sort keys %{$macro{$kind}})
+	    {
+	      print "$kind: $word: @{$macro{$kind}{$word}}\n";
+	    }
+	}
+
+    }
+}
+
+
+# used ($KIND, $WORD, [$WHERE])
+# -----------------------------
+# $WORD is used as a $KIND.
+sub used ($$;$)
+{
+  my ($kind, $word, $where) = @_;
+  $where ||= "$File::Find::name:$.";
+  if (
+      # Check for all the libraries.  But `-links' is certainly a
+      # `find' argument, and `-le', a `test' argument.
+      ($kind eq 'library' && $word !~ /^(e|inks)$/)
+      # Other than libraries are to be checked only if listed in
+      # the Autoscan library files.
+      || defined $macro{$kind}{$word}
+     )
+    {
+      push (@{$used{$kind}{$word}}, $where);
+    }
+}
+
+
+
+## ----------------------- ##
+## Scanning source files.  ##
+## ----------------------- ##
+
+
+# scan_c_file ($FILE-NAME)
+# ------------------------
+sub scan_c_file ($)
+{
+  my ($file_name) = @_;
+  push @cfiles, $File::Find::name;
+
+  # Nonzero if in a multiline comment.
+  my $in_comment = 0;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      if ($in_comment && s,^.*?\*/,,)
+	{
+	  $in_comment = 0;
+	}
+      # The whole line is inside a commment.
+      next if $in_comment;
+      # All on one line.
+      s,/\*.*?\*/,,g;
+
+      # Starting on this line.
+      if (s,/\*.*$,,)
+	{
+	  $in_comment = 1;
+	}
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*//)
+	{
+	  if (/^include\s*<([^>]*)>/)
+	    {
+	      used ('header', $1);
+	    }
+	  if (s/^(if|ifdef|ifndef|elif)\s+//)
+	    {
+	      foreach my $word (split (/\W+/))
+		{
+		  used ('identifier', $word)
+		    unless $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+		}
+	    }
+	  # Ignore other preprocessor directives.
+	  next;
+	}
+
+      # Remove string and character constants.
+      s,\"[^\"]*\",,g;
+      s,\'[^\']*\',,g;
+
+      # Tokens in the code.
+      # Maybe we should ignore function definitions (in column 0)?
+      while (s/\b([a-zA-Z_]\w*)\s*\(/ /)
+	{
+	  used ('function', $1);
+	}
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('identifier', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_makefile($MAKEFILE-NAME)
+# -----------------------------
+sub scan_makefile ($)
+{
+  my ($file_name) = @_;
+  push @makefiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      s/#.*//;
+
+      # Variable assignments.
+      while (s/\b([a-zA-Z_]\w*)\s*=/ /)
+	{
+	  used ('makevar', $1);
+	}
+      # Be sure to catch a whole word.  For instance `lex$U.$(OBJEXT)'
+      # is a single token.  Otherwise we might believe `lex' is needed.
+      foreach my $word (split (/\s+/))
+	{
+	  # Libraries.
+	  if ($word =~ /^-l([a-zA-Z_]\w*)$/)
+	    {
+	      used ('library', $1);
+	    }
+	  # Tokens in the code.
+	  # We allow some additional characters, e.g., `+', since
+	  # autoscan/programs includes `c++'.
+	  if ($word =~ /^[a-zA-Z_][\w+]*$/)
+	    {
+	      used ('program', $word);
+	    }
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_sh_file($SHELL-SCRIPT-NAME)
+# --------------------------------
+sub scan_sh_file ($)
+{
+  my ($file_name) = @_;
+  push @shfiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments and variable references.
+      s/#.*//;
+      s/\${[^\}]*}//g;
+      s/@[^@]*@//g;
+
+      # Tokens in the code.
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('program', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_file ()
+# ------------
+# Called by &find on each file.  $_ contains the current file name with
+# the current directory of the walk through.
+sub scan_file ()
+{
+  # Wanted only if there is no corresponding FILE.in.
+  return
+    if -f "$_.in";
+
+  # Save $_ as Find::File requires it to be preserved.
+  local $_ = $_;
+
+  # Strip a useless leading `./'.
+  $File::Find::name =~ s,^\./,,;
+
+  if ($_ ne '.' and -d $_ and
+      -f "$_/configure.in"  ||
+      -f "$_/configure.ac"  ||
+      -f "$_/configure.gnu" ||
+      -f "$_/configure")
+    {
+      $File::Find::prune = 1;
+      push @subdirs, $File::Find::name;
+    }
+  if (/\.[chlym](\.in)?$/)
+    {
+      used 'program', 'cc', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif (/\.(cc|cpp|cxx|CC|C|hh|hpp|hxx|HH|H|yy|ypp|ll|lpp)(\.in)?$/)
+    {
+      used 'program', 'c++', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif ((/^((?:GNUm|M|m)akefile)(\.in)?$/ && ! -f "$1.am")
+	 || /^(?:GNUm|M|m)akefile(\.am)?$/)
+    {
+      scan_makefile ($_);
+    }
+  elsif (/\.sh(\.in)?$/)
+    {
+      scan_sh_file ($_);
+    }
+}
+
+
+# scan_files ()
+# -------------
+# Read through the files and collect lists of tokens in them
+# that might create nonportabilities.
+sub scan_files ()
+{
+  find (\&scan_file, '.');
+
+  if ($verbose)
+    {
+      print "cfiles: @cfiles\n";
+      print "makefiles: @makefiles\n";
+      print "shfiles: @shfiles\n";
+
+      foreach my $kind (@kinds)
+	{
+	  print "\n$kind:\n";
+	  foreach my $word (sort keys %{$used{$kind}})
+	    {
+	      print "$word: @{$used{$kind}{$word}}\n";
+	    }
+	}
+    }
+}
+
+
+## ----------------------- ##
+## Output configure.scan.  ##
+## ----------------------- ##
+
+
+# output_kind ($FILE, $KIND)
+# --------------------------
+sub output_kind ($$)
+{
+  my ($file, $kind) = @_;
+  # Lists of words to be checked with the generic macro.
+  my @have;
+
+  print $file "\n# $kind_comment{$kind}\n"
+    if exists $kind_comment{$kind};
+  foreach my $word (sort keys %{$used{$kind}})
+    {
+      # Output the needed macro invocations in $configure_scan if not
+      # already printed, and remember these macros are needed.
+      foreach my $macro (@{$macro{$kind}{$word}})
+	{
+	  if ($macro =~ /^warn:\s+(.*)/)
+	    {
+	      my $message = $1;
+	      foreach my $location (@{$used{$kind}{$word}})
+		{
+		  warn "$location: warning: $message\n";
+		}
+	    }
+	  elsif (exists $generic_macro{$kind}
+	      && $macro eq $generic_macro{$kind})
+	    {
+	      push (@have, $word);
+	      push (@{$needed_macros{"$generic_macro{$kind}([$word])"}},
+		    @{$used{$kind}{$word}});
+	    }
+	  else
+	    {
+	      if (! $printed{$macro})
+		{
+		  print $file "$macro\n";
+		  $printed{$macro} = 1;
+		}
+	      push (@{$needed_macros{$macro}},
+		    @{$used{$kind}{$word}});
+	    }
+	}
+    }
+  print $file "$generic_macro{$kind}([" . join(' ', sort(@have)) . "])\n"
+    if @have;
+}
+
+
+# output_libraries ($FILE)
+# ------------------------
+sub output_libraries ($)
+{
+  my ($file) = @_;
+
+  print $file "\n# Checks for libraries.\n";
+  foreach my $word (sort keys %{$used{'library'}})
+    {
+      print $file "# FIXME: Replace `main' with a function in `-l$word':\n";
+      print $file "AC_CHECK_LIB([$word], [main])\n";
+    }
+}
+
+
+# output ($CONFIGURE_SCAN)
+# ------------------------
+# Print a proto configure.ac.
+sub output ($)
+{
+  my $configure_scan = shift;
+  my %unique_makefiles;
+
+  my $file = new Autom4te::XFile "> " . open_quote ($configure_scan);
+
+  print $file
+    ("#                                               -*- Autoconf -*-\n" .
+     "# Process this file with autoconf to produce a configure script.\n" .
+     "\n" .
+     "AC_PREREQ([2.68])\n" .
+     "AC_INIT([FULL-PACKAGE-NAME], [VERSION], [BUG-REPORT-ADDRESS])\n");
+  if (defined $cfiles[0])
+    {
+      print $file "AC_CONFIG_SRCDIR([$cfiles[0]])\n";
+      print $file "AC_CONFIG_HEADERS([config.h])\n";
+    }
+
+  output_kind ($file, 'program');
+  output_kind ($file, 'makevar');
+  output_libraries ($file);
+  output_kind ($file, 'header');
+  output_kind ($file, 'identifier');
+  output_kind ($file, 'function');
+
+  print $file "\n";
+  if (@makefiles)
+    {
+      # Change DIR/Makefile.in to DIR/Makefile.
+      foreach my $m (@makefiles)
+	{
+	  $m =~ s/\.(?:in|am)$//;
+	  $unique_makefiles{$m}++;
+	}
+      print $file ("AC_CONFIG_FILES([",
+		   join ("\n                 ",
+			 sort keys %unique_makefiles), "])\n");
+    }
+  if (@subdirs)
+    {
+      print $file ("AC_CONFIG_SUBDIRS([",
+		   join ("\n                   ",
+			 sort @subdirs), "])\n");
+    }
+  print $file "AC_OUTPUT\n";
+
+  $file->close;
+}
+
+
+
+## --------------------------------------- ##
+## Checking the accuracy of configure.ac.  ##
+## --------------------------------------- ##
+
+
+# &check_configure_ac ($CONFIGURE_AC)
+# -----------------------------------
+# Use autoconf to check if all the suggested macros are included
+# in CONFIGURE_AC.
+sub check_configure_ac ($)
+{
+  my ($configure_ac) = @_;
+
+  # Find what needed macros are invoked in CONFIGURE_AC.
+  # I'd be very happy if someone could explain to me why sort (uniq ...)
+  # doesn't work properly: I need `uniq (sort ...)'.  --akim
+  my $trace_option =
+    join (' --trace=', '',
+	  uniq (sort (map { s/\(.*//; $_ } keys %needed_macros)));
+
+  verb "running: $autoconf $trace_option $configure_ac";
+  my $traces =
+    new Autom4te::XFile "$autoconf $trace_option $configure_ac |";
+
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($file, $line, $macro, @args) = split (/:/, $_);
+      if ($macro =~ /^AC_CHECK_(HEADER|FUNC|TYPE|MEMBER)S$/)
+	{
+	  # To be rigorous, we should distinguish between space and comma
+	  # separated macros.  But there is no point.
+	  foreach my $word (split (/\s|,/, $args[0]))
+	    {
+	      # AC_CHECK_MEMBERS wants `struct' or `union'.
+	      if ($macro eq "AC_CHECK_MEMBERS"
+		  && $word =~ /^stat.st_/)
+		{
+		  $word = "struct " . $word;
+		}
+	      delete $needed_macros{"$macro([$word])"};
+	    }
+	}
+      else
+	{
+	  delete $needed_macros{$macro};
+	}
+    }
+
+  $traces->close;
+
+  # Report the missing macros.
+  foreach my $macro (sort keys %needed_macros)
+    {
+      warn ("$configure_ac: warning: missing $macro wanted by: "
+	    . (${$needed_macros{$macro}}[0])
+	    . "\n");
+      print $log "$me: warning: missing $macro wanted by: \n";
+      foreach my $need (@{$needed_macros{$macro}})
+	{
+	  print $log "\t$need\n";
+	}
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$log = new Autom4te::XFile "> " . open_quote ("$me.log");
+
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+my $configure_ac = find_configure_ac;
+init_tables;
+scan_files;
+output ('configure.scan');
+if (-f $configure_ac)
+  {
+    check_configure_ac ($configure_ac);
+  }
+# This close is really needed.  For some reason, probably best named
+# a bug, it seems that the dtor of $LOG is not called automatically
+# at END.  It results in a truncated file.
+$log->close;
+exit 0;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoupdate b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoupdate
new file mode 100755
index 0000000..7cc765d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/autoupdate
@@ -0,0 +1,1064 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoupdate.in; do not edit by hand.
+
+# autoupdate - modernize an Autoconf file.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David MacKenzie <djm@gnu.ai.mit.edu>.
+# Rewritten by Akim Demaille <akim@freefriends.org>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-apple-darwin/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+# We need to find m4sugar.
+my @prepend_include;
+my @include = ('//share/autoconf');
+my $force = 0;
+# m4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]...
+
+Update each TEMPLATE-FILE if given, or `configure.ac' if present,
+or else `configure.in', to the syntax of the current version of
+Autoconf.  The original files are backed up.
+
+Operation modes:
+  -h, --help                 print this help, then exit
+  -V, --version              print version number, then exit
+  -v, --verbose              verbosely report processing
+  -d, --debug                don't remove temporary files
+  -f, --force                consider all files obsolete
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoupdate (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'f|force'             => \$force);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+
+# ----------------- #
+# Autoconf macros.  #
+# ----------------- #
+
+my (%ac_macros, %au_macros, %m4_builtins);
+
+# HANDLE_AUTOCONF_MACROS ()
+# -------------------------
+# @M4_BUILTINS -- M4 builtins and a useful comment.
+sub handle_autoconf_macros ()
+{
+  # Get the builtins.
+  xsystem ("echo dumpdef | $m4 2>" . shell_quote ("$tmp/m4.defs") . " >/dev/null");
+  my $m4_defs = new Autom4te::XFile "< " . open_quote ("$tmp/m4.defs");
+  while ($_ = $m4_defs->getline)
+    {
+      $m4_builtins{$1} = 1
+	if /^(\w+):/;
+    }
+  $m4_defs->close;
+
+  my $macros = new Autom4te::XFile ("$autoconf"
+				    . " --trace AU_DEFINE:'AU:\$f:\$1'"
+				    . " --trace define:'AC:\$f:\$1'"
+				    . " --melt /dev/null |");
+  while ($_ = $macros->getline)
+    {
+      chomp;
+      my ($domain, $file, $macro) = /^(AC|AU):(.*):([^:]*)$/ or next;
+      if ($domain eq "AU")
+	{
+	  $au_macros{$macro} = 1;
+	}
+      elsif ($file =~ /(^|\/)m4sugar\/(m4sugar|version)\.m4$/)
+	{
+	  # Add the m4sugar macros to m4_builtins.
+	  $m4_builtins{$macro} = 1;
+	}
+      else
+	{
+	  # Autoconf, aclocal, and m4sh macros.
+	  $ac_macros{$macro} = 1;
+	}
+    }
+  $macros->close;
+
+
+  # Don't keep AU macros in @AC_MACROS.
+  delete $ac_macros{$_}
+    foreach (keys %au_macros);
+  # Don't keep M4sugar macros which are redefined by Autoconf,
+  # such as `builtin', `changequote' etc.  See autoconf/autoconf.m4.
+  delete $ac_macros{$_}
+    foreach (keys %m4_builtins);
+  error "no current Autoconf macros found"
+    unless keys %ac_macros;
+  error "no obsolete Autoconf macros found"
+    unless keys %au_macros;
+
+  if ($debug)
+    {
+      print STDERR "Current Autoconf macros:\n";
+      print STDERR join (' ', sort keys %ac_macros) . "\n\n";
+      print STDERR "Obsolete Autoconf macros:\n";
+      print STDERR join (' ', sort keys %au_macros) . "\n\n";
+    }
+
+  # ac.m4 -- autoquoting definitions of the AC macros (M4sugar excluded).
+  # unac.m4 -- undefine the AC macros.
+  my $ac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/ac.m4");
+  print $ac_m4 "# ac.m4 -- autoquoting definitions of the AC macros.\n";
+  my $unac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unac.m4");
+  print $unac_m4 "# unac.m4 -- undefine the AC macros.\n";
+  foreach (sort keys %ac_macros)
+    {
+      print $ac_m4   "_au_m4_define([$_], [m4_if(\$#, 0, [[\$0]], [[\$0(\$\@)]])])\n";
+      print $unac_m4 "_au_m4_undefine([$_])\n";
+    }
+
+  # m4save.m4 -- save the m4 builtins.
+  # unm4.m4 -- disable the m4 builtins.
+  # m4.m4 -- enable the m4 builtins.
+  my $m4save_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4save.m4");
+  print $m4save_m4 "# m4save.m4 -- save the m4 builtins.\n";
+  my $unm4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unm4.m4");
+  print $unm4_m4 "# unm4.m4 -- disable the m4 builtins.\n";
+  my $m4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4.m4");
+  print $m4_m4 "# m4.m4 -- enable the m4 builtins.\n";
+  foreach (sort keys %m4_builtins)
+    {
+      print $m4save_m4 "_au__save([$_])\n";
+      print $unm4_m4   "_au__undefine([$_])\n";
+      print $m4_m4     "_au__restore([$_])\n";
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --force" if $force;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+mktmpdir ('au');
+handle_autoconf_macros;
+
+# $au_changequote -- enable the quote `[', `]' right before any AU macro.
+my $au_changequote =
+  's/\b(' . join ('|', keys %au_macros) . ')\b/_au_m4_changequote([,])$1/g';
+
+# au.m4 -- definitions the AU macros.
+xsystem ("$autoconf --trace AU_DEFINE:'_au_defun(\@<:\@\$1\@:>\@,
+\@<:\@\$2\@:>\@)' --melt /dev/null "
+	. ">" . shell_quote ("$tmp/au.m4"));
+
+
+
+## ------------------- ##
+## Process the files.  ##
+## ------------------- ##
+
+foreach my $file (@ARGV)
+  {
+    # We need an actual file.
+    if ($file eq '-')
+      {
+	$file = "$tmp/stdin";
+	system "cat >" . shell_quote ($file);
+      }
+    elsif (! -r "$file")
+      {
+	die "$me: $file: No such file or directory";
+      }
+
+    # input.m4 -- m4 program to produce the updated file.
+    # Load the values, the dispatcher, neutralize m4, and the prepared
+    # input file.
+    my $input_m4 = <<\EOF;
+      divert(-1)                                            -*- Autoconf -*-
+      changequote([,])
+
+      # Define our special macros:
+      define([_au__defn], defn([defn]))
+      define([_au__divert], defn([divert]))
+      define([_au__ifdef], defn([ifdef]))
+      define([_au__include], defn([include]))
+      define([_au___undefine], defn([undefine]))
+      define([_au__undefine], [_au__ifdef([$1], [_au___undefine([$1])])])
+      define([_au__save], [m4_ifdef([$1],
+	[m4_define([_au_$1], _m4_defn([$1]))])])
+      define([_au__restore],
+	[_au_m4_ifdef([_au_$1],
+	  [_au_m4_define([$1], _au__defn([_au_$1]))])])
+
+      # Set up m4sugar.
+      include(m4sugar/m4sugar.m4)
+
+      # Redefine __file__ to make warnings nicer; $file is replaced below.
+      m4_define([__file__], [$file])
+
+      # Redefine m4_location to fix the line number.
+      m4_define([m4_location], [__file__:m4_eval(__line__ - _au__first_line)])
+
+      # Move all the builtins into the `_au_' pseudo namespace
+      m4_include([m4save.m4])
+
+      # _au_defun(NAME, BODY)
+      # ---------------------
+      # Define NAME to BODY, plus AU activation/deactivation.
+      _au_m4_define([_au_defun],
+      [_au_m4_define([$1],
+      [_au_enable()dnl
+      $2[]dnl
+      _au_disable()])])
+
+      # Import the definition of the obsolete macros.
+      _au__include([au.m4])
+
+
+      ## ------------------------ ##
+      ## _au_enable/_au_disable.  ##
+      ## ------------------------ ##
+
+      # They work by pair: each time an AU macro is activated, it runs
+      # _au_enable, and at its end its runs _au_disable (see _au_defun
+      # above).  AU macros might use AU macros, which should
+      # enable/disable only for the outer AU macros.
+      #
+      # `_au_enabled' is used to this end, determining whether we really
+      # enable/disable.
+
+
+      # __au_enable
+      # -----------
+      # Reenable the builtins, m4sugar, and the autoquoting AC macros.
+      _au_m4_define([__au_enable],
+      [_au__divert(-1)
+      # Enable special characters.
+      _au_m4_changecom([#])
+
+      _au__include([m4.m4])
+      _au__include([ac.m4])
+
+      _au__divert(0)])
+
+      # _au_enable
+      # ----------
+      # Called at the beginning of all the obsolete macros.  If this is the
+      # outermost level, call __au_enable.
+      _au_m4_define([_au_enable],
+      [_au_m4_ifdef([_au_enabled],
+		 [],
+		 [__au_enable()])_au_dnl
+      _au_m4_pushdef([_au_enabled])])
+
+
+      # __au_disable
+      # ------------
+      # Disable the AC autoquoting macros, m4sugar, and m4.
+      _au_m4_define([__au_disable],
+      [_au__divert(-1)
+      _au__include([unac.m4])
+      _au__include([unm4.m4])
+
+      # Disable special characters.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au__divert(0)])
+
+      # _au_disable
+      # -----------
+      # Called at the end of all the obsolete macros.  If we are at the
+      # outermost level, call __au_disable.
+      _au_m4_define([_au_disable],
+      [_au_m4_popdef([_au_enabled])_au_dnl
+      _au_m4_ifdef([_au_enabled],
+		[],
+		[__au_disable()])])
+
+
+      ## ------------------------------- ##
+      ## Disable, and process the file.  ##
+      ## ------------------------------- ##
+      # The AC autoquoting macros are not loaded yet, hence invoking
+      # `_au_disable' would be wrong.
+      _au__include([unm4.m4])
+
+      # Disable special characters, and set the first line number.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au_m4_define(_au__first_line, _au___line__)_au__divert(0)_au_dnl
+EOF
+
+    $input_m4 =~ s/^      //mg;
+    $input_m4 =~ s/\$file/$file/g;
+
+    # prepared input -- input, but reenables the quote before each AU macro.
+    open INPUT_M4, "> " . open_quote ("$tmp/input.m4")
+       or error "cannot open: $!";
+    open FILE, "< " . open_quote ($file)
+       or error "cannot open: $!";
+    print INPUT_M4 "$input_m4";
+    while (<FILE>)
+       {
+	 eval $au_changequote;
+	 print INPUT_M4;
+       }
+    close FILE
+       or error "cannot close $file: $!";
+    close INPUT_M4
+       or error "cannot close $tmp/input.m4: $!";
+
+    # Now ask m4 to perform the update.
+    xsystem ("$m4 --include=" . shell_quote ($tmp)
+	     . join (' --include=', '', map { shell_quote ($_) } reverse (@prepend_include))
+	     . join (' --include=', '', map { shell_quote ($_) } @include)
+	     . " " . shell_quote ("$tmp/input.m4") . " > " . shell_quote ("$tmp/updated"));
+    update_file ("$tmp/updated",
+		 "$file" eq "$tmp/stdin" ? '-' : "$file");
+  }
+exit 0;
+
+
+#		  ## ---------------------------- ##
+#		  ## How `autoupdate' functions.  ##
+#		  ## ---------------------------- ##
+#
+# The task of `autoupdate' is not trivial: the biggest difficulty being
+# that you must limit the changes to the parts that really need to be
+# updated.  Finding a satisfying implementation proved to be quite hard,
+# as this is the fifth implementation of `autoupdate'.
+#
+# Below, we will use a simple example of an obsolete macro:
+#
+#     AU_DEFUN([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))])
+#     AC_DEFUN([NEW], [echo "sum($1) = $2"])
+#
+# the input file contains
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Of course the expected output is
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0], [0])
+#
+#
+# # First implementation: sed
+# # =========================
+#
+# The first implementation was only able to change the name of obsolete
+# macros.
+#
+# The file `acoldnames.m4' defined the old names based on the new names.
+# It was simple then to produce a sed script such as:
+#
+#     s/OLD/NEW/g
+#
+# Updating merely consisted in running this script on the file to
+# update.
+#
+# This scheme suffers from an obvious limitation: that `autoupdate' was
+# unable to cope with new macros that just swap some of its arguments
+# compared to the old macro.  Fortunately, that was enough to upgrade
+# from Autoconf 1 to Autoconf 2.  (But I have no idea whether the
+# changes in Autoconf 2 were precisely limited by this constraint.)
+#
+#
+# # Second implementation: hooks
+# # ============================
+#
+# The version 2.15 of Autoconf brought a vast number of changes compared
+# to 2.13, so a solution was needed.  One could think of extending the
+# `sed' scripts with specialized code for complex macros.  However, this
+# approach is of course full of flaws:
+#
+# a. the Autoconf maintainers have to write these snippets, which we
+#    just don't want to,
+#
+# b. I really don't think you'll ever manage to handle the quoting of
+#    m4 with a sed script.
+#
+# To satisfy a., let's remark that the code which implements the old
+# features in term of the new feature is exactly the code which should
+# replace the old code.
+#
+# To answer point b, as usual in the history of Autoconf, the answer, at
+# least on the paper, is simple: m4 is the best tool to parse m4, so
+# let's use m4.
+#
+# Therefore the specification is:
+#
+#     I want to be able to tell Autoconf, well, m4, that the macro I
+#     am currently defining is an obsolete macro (so that the user is
+#     warned), and its code is the code to use when running autoconf,
+#     but that the very same code has to be used when running
+#     autoupdate.  To summarize, the interface I want is
+#     `AU_DEFUN(OLD-NAME, NEW-CODE)'.
+#
+#
+# Now for the technical details.
+#
+# When running autoconf, except for the warning, AU_DEFUN is basically
+# AC_DEFUN.
+#
+# When running autoupdate, we want *only* OLD-NAMEs to be expanded.
+# This obviously means that acgeneral.m4 and acspecific.m4 must not be
+# loaded.  Nonetheless, because we want to use a rich set of m4
+# features, m4sugar.m4 is needed.  Please note that the fact that
+# Autoconf's macros are not loaded is positive on two points:
+#
+# - we do get an updated `configure.ac', not a `configure'!
+#
+# - the old macros are replaced by *calls* to the new-macros, not the
+#   body of the new macros, since their body is not defined!!!
+#   (Whoa, that's really beautiful!).
+#
+# Additionally we need to disable the quotes when reading the input for
+# two reasons: first because otherwise `m4' will swallow the quotes of
+# other macros:
+#
+#     NEW([1, 2], 3)
+#     => NEW(1, 2, 3)
+#
+# and second, because we want to update the macro calls which are
+# quoted, i.e., we want
+#
+#     FOO([OLD(1, 2)])
+#     => FOO([NEW([1, 2], [3])])
+#
+# If we don't disable the quotes, only the macros called at the top
+# level would be updated.
+#
+# So, let's disable the quotes.
+#
+# Well, not quite: m4sugar.m4 still needs to use quotes for some macros.
+# Well, in this case, when running in autoupdate code, each macro first
+# reestablishes the quotes, expands itself, and disables the quotes.
+#
+# Thinking a bit more, you realize that in fact, people may use `define',
+# `ifelse' etc. in their files, and you certainly don't want to process
+# them.  Another example is `dnl': you don't want to remove the
+# comments.  You then realize you don't want exactly to import m4sugar:
+# you want to specify when it is enabled (macros active), and disabled.
+# m4sugar provides m4_disable/m4_enable to this end.
+#
+# You're getting close to it.  Now remains one task: how to handle
+# twofold definitions?
+#
+# Remember that the same AU_DEFUN must be understood in two different
+# ways, the AC way, and the AU way.
+#
+# One first solution is to check whether acgeneral.m4 was loaded.  But
+# that's definitely not cute.  Another is simply to install `hooks',
+# that is to say, to keep in some place m4 knows, late `define' to be
+# triggered *only* in AU mode.
+#
+# You first think of designing AU_DEFUN like this:
+#
+# 1. AC_DEFUN(OLD-NAME,
+#	      [Warn the user OLD-NAME is obsolete.
+#	       NEW-CODE])
+#
+# 2. Store for late AU binding([define(OLD_NAME,
+#				[Reestablish the quotes.
+#				 NEW-CODE
+#				 Disable the quotes.])])
+#
+# but this will not work: NEW-CODE probably uses $1, $2 etc. and these
+# guys will be replaced with the argument of `Store for late AU binding'
+# when you call it.
+#
+# I don't think there is a means to avoid this using this technology
+# (remember that $1 etc. are *always* expanded in m4).  You may also try
+# to replace them with $[1] to preserve them for a later evaluation, but
+# if `Store for late AU binding' is properly written, it will remain
+# quoted till the end...
+#
+# You have to change technology.  Since the problem is that `$1'
+# etc. should be `consumed' right away, one solution is to define now a
+# second macro, `AU_OLD-NAME', and to install a hook than binds OLD-NAME
+# to AU_OLD-NAME.  Then, autoupdate.m4 just need to run the hooks.  By
+# the way, the same method was used in autoheader.
+#
+#
+# # Third implementation: m4 namespaces by m4sugar
+# # ==============================================
+#
+# Actually, this implementation was just a clean up of the previous
+# implementation: instead of defining hooks by hand, m4sugar was equipped
+# with `namespaces'.  What are they?
+#
+# Sometimes we want to disable some *set* of macros, and restore them
+# later.  We provide support for this via namespaces.
+#
+# There are basically three characters playing this scene: defining a
+# macro in a namespace, disabling a namespace, and restoring a namespace
+# (i.e., all the definitions it holds).
+#
+# Technically, to define a MACRO in NAMESPACE means to define the macro
+# named `NAMESPACE::MACRO' to the VALUE.  At the same time, we append
+# `undefine(NAME)' in the macro named `m4_disable(NAMESPACE)', and
+# similarly a binding of NAME to the value of `NAMESPACE::MACRO' in
+# `m4_enable(NAMESPACE)'.  These mechanisms allow to bind the macro of
+# NAMESPACE and to unbind them at will.
+#
+# Of course this implementation is really inefficient: m4 has to grow
+# strings which can become quickly huge, which slows it significantly.
+#
+# In particular one should avoid as much as possible to use `define' for
+# temporaries.  Now that `define' has quite a complex meaning, it is an
+# expensive operations that should be limited to macros.  Use
+# `m4_define' for temporaries.
+#
+# Private copies of the macros we used in entering / exiting the m4sugar
+# namespace.  It is much more convenient than fighting with the renamed
+# version of define etc.
+#
+#
+#
+# Those two implementations suffered from serious problems:
+#
+# - namespaces were really expensive, and incurred a major performance
+#   loss on `autoconf' itself, not only `autoupdate'.  One solution
+#   would have been the limit the use of namespaces to `autoupdate', but
+#   that's again some complications on m4sugar, which really doesn't need
+#   this.  So we wanted to get rid of the namespaces.
+#
+# - since the quotes were disabled, autoupdate was sometimes making
+#   wrong guesses, for instance on:
+#
+#     foo([1, 2])
+#
+#   m4 saw 2 arguments: `[1'and `2]'.  A simple solution, somewhat
+#   fragile, is to reestablish the quotes right before all the obsolete
+#   macros, i.e., to use sed so that the previous text becomes
+#
+#     changequote([, ])foo([1, 2])
+#
+#   To this end, one wants to trace the definition of obsolete macros.
+#
+# It was there that the limitations of the namespace approach became
+# painful: because it was a complex machinery playing a lot with the
+# builtins of m4 (hence, quite fragile), tracing was almost impossible.
+#
+#
+# So this approach was dropped.
+#
+#
+# # The fourth implementation: two steps
+# # ====================================
+#
+# If you drop the uses of namespaces, you no longer can compute the
+# updated value, and replace the old call with it simultaneously.
+#
+# Obviously you will use m4 to compute the updated values, but you may
+# use some other tool to achieve the replacement.  Personally, I trust
+# nobody but m4 to parse m4, so below, m4 will perform the two tasks.
+#
+# How can m4 be used to replace *some* macros calls with newer values.
+# Well, that's dead simple: m4 should learn the definitions of obsolete
+# macros, forget its builtins, disable the quotes, and then run on the
+# input file, which amounts to doing this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# which will result in
+#
+#     dnl The Unbelievable Truth
+#     NEW(1, 2, m4_eval(1 + 2))
+#     NEW([0, 0],
+#	  0)
+#
+# Grpmh.  Two problems.  A minor problem: it would have been much better
+# to have the `m4_eval' computed, and a major problem: you lost the
+# quotation in the result.
+#
+# Let's address the big problem first.  One solution is to define any
+# modern macro to rewrite its calls with the proper quotation, thanks to
+# `$@'.  Again, tracing the `define's makes it possible to know which
+# are these macros, so you input is:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     define([NEW], [[NEW($@)]changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     changequote([, ])NEW([0, 0],
+#	  0)
+#
+# which results in
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2],[m4_eval(1 + 2)])
+#     NEW([0, 0],[0])
+#
+# Our problem is solved, i.e., the first call to `NEW' is properly
+# quoted, but introduced another problem: we changed the layout of the
+# second calls, which can be a drama in the case of huge macro calls
+# (think of `AC_TRY_RUN' for instance).  This example didn't show it,
+# but we also introduced parens to macros which did not have some:
+#
+#     AC_INIT
+#     => AC_INIT()
+#
+# No big deal for the semantics (unless the macro depends upon $#, which
+# is bad), but the users would not be happy.
+#
+# Additionally, we introduced quotes that were not there before, which is
+# OK in most cases, but could change the semantics of the file.
+#
+# Cruel dilemma: we do want the auto-quoting definition of `NEW' when
+# evaluating `OLD', but we don't when we evaluate the second `NEW'.
+# Back to namespaces?
+#
+# No.
+#
+#
+# # Second step: replacement
+# # ------------------------
+#
+# No, as announced above, we will work in two steps: in a first step we
+# compute the updated values, and in a second step we replace them.  Our
+# goal is something like this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([1, 2], [3])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., the new value of `OLD' is precomputed using the auto-quoting
+# definition of `NEW' and the m4 builtins.  We'll see how afterwards,
+# let's finish with the replacement.
+#
+# Of course the solution above is wrong: if there were other calls to
+# `OLD' with different values, we would smash them to the same value.
+# But it is quite easy to generalize the scheme above:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+#     define([OLD], [defn([OLD($@)])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., for each call to obsolete macros, we build an array `call =>
+# value', and use a macro to dispatch these values.  This results in:
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0],
+#	  0)
+#
+# In French, we say `Youpi !', which you might roughly translate as
+# `Yippee!'.
+#
+#
+# # First step: computation
+# # -----------------------
+#
+# Let's study the anatomy of the file, and name its sections:
+#
+# prologue
+#     divert(-1)dnl
+#     changequote([, ])
+# values
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+# dispatcher
+#     define([OLD], [defn([OLD($@)])changequote()])
+# disabler
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+# input
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+#
+# # Computing the `values' section
+# # ..............................
+#
+# First we need to get the list of all the AU macro uses.  To this end,
+# first get the list of all the AU macros names by tracing `AU_DEFUN' in
+# the initialization of autoconf.  This list is computed in the file
+# `au.txt' below.
+#
+# Then use this list to trace all the AU macro uses in the input.  The
+# goal is obtain in the case of our example:
+#
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# This is the file `values.in' below.
+#
+# We want to evaluate this with only the builtins (in fact m4sugar), the
+# auto-quoting definitions of the new macros (`new.m4'), and the
+# definition of the old macros (`old.m4').  Computing these last two
+# files is easy: it's just a matter of using the right `--trace' option.
+#
+# So the content of `values.in' is:
+#
+#     include($autoconf_dir/m4sugar.m4)
+#     m4_include(new.m4)
+#     m4_include(old.m4)
+#     divert(0)dnl
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# We run m4 on it, which yields:
+#
+#     define([OLD([1],[2])],@<<@NEW([1, 2], [3])@>>@)
+#
+# Transform `@<<@' and `@>>@' into quotes and we get
+#
+#     define([OLD([1],[2])],[NEW([1, 2], [3])])
+#
+# This is `values.m4'.
+#
+#
+# # Computing the `dispatcher' section
+# # ..................................
+#
+# The `prologue', and the `disabler' are simple and need no commenting.
+#
+# To compute the `dispatcher' (`dispatch.m4'), again, it is a simple
+# matter of using the right `--trace'.
+#
+# Finally, the input is not exactly the input file, rather it is the
+# input file with the added `changequote'.  To this end, we build
+# `quote.sed'.
+#
+#
+# # Putting it all together
+# # .......................
+#
+# We build the file `input.m4' which contains:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     include(values.m4)
+#     include(dispatch.m4)
+#     undefine([dnl])
+#     undefine([eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# And we just run m4 on it.  Et voila`, Monsieur !  Mais oui, mais oui.
+#
+# Well, there are a few additional technicalities.  For instance, we
+# rely on `changequote', `ifelse' and `defn', but we don't want to
+# interpret the changequotes of the user, so we simply use another name:
+# `_au_changequote' etc.
+#
+#
+# # Failure of the fourth approach
+# # ------------------------------
+#
+# This approach is heavily based on traces, but then there is an obvious
+# problem: non expanded code will never be seen.  In particular, the body
+# of a `define' definition is not seen, so on the input
+#
+#	  define([idem], [OLD(0, [$1])])
+#
+# autoupdate would never see the `OLD', and wouldn't have updated it.
+# Worse yet, if `idem(0)' was used later, then autoupdate sees that
+# `OLD' is used, computes the result for `OLD(0, 0)' and sets up a
+# dispatcher for `OLD'.  Since there was no computed value for `OLD(0,
+# [$1])', the dispatcher would have replaced with... nothing, leading
+# to
+#
+#	  define([idem], [])
+#
+# With some more thinking, you see that the two step approach is wrong,
+# the namespace approach was much saner.
+#
+# But you learned a lot, in particular you realized that using traces
+# can make it possible to simulate namespaces!
+#
+#
+#
+# # The fifth implementation: m4 namespaces by files
+# # ================================================
+#
+# The fourth implementation demonstrated something unsurprising: you
+# cannot precompute, i.e., the namespace approach was the right one.
+# Still, we no longer want them, they're too expensive.  Let's have a
+# look at the way it worked.
+#
+# When updating
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# you evaluate `input.m4':
+#
+#     divert(-1)
+#     changequote([, ])
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#     ...
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# where `m4_disable' undefines the m4 and m4sugar, and disables the quotes
+# and comments:
+#
+#     define([m4_disable],
+#     [undefine([__file__])
+#     ...
+#     changecom(#)
+#     changequote()])
+#
+# `m4_enable' does the converse: reestablish quotes and comments
+# --easy--, reestablish m4sugar --easy: just load `m4sugar.m4' again-- and
+# reenable the builtins.  This later task requires that you first save
+# the builtins.  And BTW, the definition above of `m4_disable' cannot
+# work: you undefined `changequote' before using it!  So you need to use
+# your privates copies of the builtins.  Let's introduce three files for
+# this:
+#
+#  `m4save.m4'
+#    moves the m4 builtins into the `_au_' pseudo namespace,
+#  `unm4.m4'
+#    undefines the builtins,
+#  `m4.m4'
+#    restores them.
+#
+# So `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#
+#     # Import AU.
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)])
+#
+#     define([_au_disable],
+#     [# Disable m4sugar.
+#     # Disable the m4 builtins.
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Based on what we learned in the fourth implementation we know that we
+# have to enable the quotes *before* any AU macro, and we know we need
+# to build autoquoting versions of the AC macros.  But the autoquoting
+# AC definitions must be disabled in the rest of the file, and enabled
+# inside AU macros.
+#
+# Using `autoconf --trace' it is easy to build the files
+#
+#   `ac.m4'
+#     define the autoquoting AC fake macros
+#   `disable.m4'
+#     undefine the m4sugar and AC autoquoting macros.
+#   `au.m4'
+#     definitions of the AU macros (such as `OLD' above).
+#
+# Now, `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#     # Import AU.
+#     include([au.m4])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)
+#     _au_include(ac.m4)])
+#
+#     define([_au_disable],
+#     [_au_include([disable.m4])
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     _au_changequote([, ])OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Finally, version V is ready.
+#
+# Well... almost.
+#
+# There is a slight problem that remains: if an AU macro OUTER includes
+# an AU macro INNER, then _au_enable will be run when entering OUTER
+# and when entering INNER (not good, but not too bad yet).  But when
+# getting out of INNER, _au_disable will disable everything while we
+# were still in OUTER.  Badaboom.
+#
+# Therefore _au_enable and _au_disable have to be written to work by
+# pairs: each _au_enable pushdef's _au_enabled, and each _au_disable
+# popdef's _au_enabled.  And of course _au_enable and _au_disable are
+# effective when _au_enabled is *not* defined.
+#
+# Finally, version V' is ready.  And there is much rejoicing.  (And I
+# have free time again.  I think.  Yeah, right.)
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/cp b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/cp
new file mode 100755
index 0000000..5f4768d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/cp
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/ifnames b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/ifnames
new file mode 100755
index 0000000..69bffb2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/ifnames
@@ -0,0 +1,153 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from ifnames.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# ifnames - print the identifiers used in C preprocessor conditionals
+
+# Copyright (C) 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Reads from stdin if no files are given.
+# Writes to stdout.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>
+# and Paul Eggert <eggert@twinsun.com>.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::General;
+use Autom4te::XFile;
+use Autom4te::FileUtils;
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILE]...
+
+Scan all of the C source FILES (or the standard input, if none are
+given) and write to the standard output a sorted list of all the
+identifiers that appear in those files in `#if', `#elif', `#ifdef', or
+`#ifndef' directives.  Print each identifier on a line, followed by a
+space-separated list of the files in which that identifier occurs.
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "ifnames (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Paul Eggert.
+";
+
+
+# &parse_args ()
+# --------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ();
+}
+
+
+# %OCCURRENCE
+# -----------
+my %occurrence;
+
+
+# &scan_file ($FILE-NAME)
+# -----------------------
+sub scan_file ($)
+{
+  my ($file_name) = @_;
+  my $file = new Autom4te::XFile ("< " . open_quote ($file_name));
+  while ($_ = $file->getline)
+    {
+      # Continuation lines.
+      $_ .= $file->getline
+	while (s/\\$//);
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*(if|ifdef|ifndef|elif)\s+//)
+	{
+	  # Remove comments.  Not perfect, but close enough.
+	  s(/\*.*?\*/)();
+	  s(/\*.*)();
+	  s(//.*)();
+	  foreach my $word (split (/\W+/))
+	    {
+	      next
+		if $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+	      $occurrence{$word}{$file_name} = 1;
+	    }
+	}
+    }
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+parse_args();
+foreach (@ARGV)
+  {
+    scan_file ($_);
+  }
+foreach (sort keys %occurrence)
+  {
+    print "$_ ", join (' ', sort keys %{$occurrence{$_}}), "\n";
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtool b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtool
new file mode 100755
index 0000000..13a7739
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtool
@@ -0,0 +1,10984 @@
+#! /bin/sh
+
+# libtool - Provide generalized library-building support services.
+# Generated automatically by config.status (libtool) 2.4.2
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags="CXX F77 FC GO GCJ RC "
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=2.4.2
+macro_revision=1.3337
+
+# Assembler program.
+AS="as"
+
+# DLL creation program.
+DLLTOOL="false"
+
+# Object dumper program.
+OBJDUMP="false"
+
+# Whether or not to build shared libraries.
+build_libtool_libs=yes
+
+# Whether or not to build static libraries.
+build_old_libs=yes
+
+# What type of objects to build.
+pic_mode=default
+
+# Whether or not to optimize for fast installation.
+fast_install=needless
+
+# Shell to use when invoking shell scripts.
+SHELL="/bin/sh"
+
+# An echo program that protects backslashes.
+ECHO="printf %s\\n"
+
+# The PATH separator for the build system.
+PATH_SEPARATOR=":"
+
+# The host system.
+host_alias=
+host=x86_64-apple-darwin13.4.0
+host_os=darwin13.4.0
+
+# The build system.
+build_alias=
+build=x86_64-apple-darwin13.4.0
+build_os=darwin13.4.0
+
+# A sed program that does not truncate output.
+SED="/usr/bin/sed"
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP="/usr/bin/grep"
+
+# An ERE matcher.
+EGREP="/usr/bin/grep -E"
+
+# A literal string matcher.
+FGREP="/usr/bin/grep -F"
+
+# A BSD- or MS-compatible name lister.
+NM="/usr/bin/nm"
+
+# Whether we need soft or hard links.
+LN_S="ln -s"
+
+# What is the maximum length of a command?
+max_cmd_len=196608
+
+# Object file suffix (normally "o").
+objext=o
+
+# Executable file suffix (normally "").
+exeext=
+
+# whether the shell understands "unset".
+lt_unset=unset
+
+# turn spaces into newlines.
+SP2NL="tr \\040 \\012"
+
+# turn newlines into spaces.
+NL2SP="tr \\015\\012 \\040\\040"
+
+# convert $build file names to $host format.
+to_host_file_cmd=func_convert_file_noop
+
+# convert $build files to toolchain format.
+to_tool_file_cmd=func_convert_file_noop
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method="pass_all"
+
+# Command to use when deplibs_check_method = "file_magic".
+file_magic_cmd="\$MAGIC_CMD"
+
+# How to find potential files when deplibs_check_method = "file_magic".
+file_magic_glob=""
+
+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
+want_nocaseglob="no"
+
+# Command to associate shared and link libraries.
+sharedlib_from_linklib_cmd="printf %s\\n"
+
+# The archiver.
+AR="ar"
+
+# Flags to create an archive.
+AR_FLAGS="cru"
+
+# How to feed a file listing to the archiver.
+archiver_list_spec=""
+
+# A symbol stripping program.
+STRIP="strip"
+
+# Commands used to install an old-style archive.
+RANLIB="ranlib"
+old_postinstall_cmds="chmod 644 \$oldlib~\$RANLIB \$tool_oldlib"
+old_postuninstall_cmds=""
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=yes
+
+# A C compiler.
+LTCC="gcc"
+
+# LTCC compiler flags.
+LTCFLAGS="-g -O2"
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe="sed -n -e 's/^.*[	 ]\\([BCDEGRST][BCDEGRST]*\\)[	 ][	 ]*_\\([_A-Za-z][_A-Za-z0-9]*\\)\$/\\1 _\\2 \\2/p' | sed '/ __gnu_lto/d'"
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl="sed -n -e 's/^T .* \\(.*\\)\$/extern int \\1();/p' -e 's/^[BCDEGRST]* .* \\(.*\\)\$/extern char \\1;/p'"
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[BCDEGRST]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p'"
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[BCDEGRST]* \\([^ ]*\\) \\(lib[^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p' -e 's/^[BCDEGRST]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"lib\\2\", (void *) \\&\\2},/p'"
+
+# Specify filename containing input files for $NM.
+nm_file_list_spec=""
+
+# The root where to search for dependent libraries,and in which our libraries should be installed.
+lt_sysroot=
+
+# The name of the directory that contains temporary libtool files.
+objdir=.libs
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=file
+
+# Must we lock files when doing compilation?
+need_locks="no"
+
+# Manifest tool.
+MANIFEST_TOOL=":"
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL="dsymutil"
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT="nmedit"
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO="lipo"
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL="otool"
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=":"
+
+# Old archive suffix (normally "a").
+libext=a
+
+# Shared library suffix (normally ".so").
+shrext_cmds="\`test .\$module = .yes && echo .so || echo .dylib\`"
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=""
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink="PATH DYLD_LIBRARY_PATH  GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=no
+
+# Do we need a version for libraries?
+need_version=no
+
+# Library versioning type.
+version_type=darwin
+
+# Shared library runtime path variable.
+runpath_var=
+
+# Shared library path variable.
+shlibpath_var=DYLD_LIBRARY_PATH
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=yes
+
+# Format of library name prefix.
+libname_spec="lib\$name"
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec="\${libname}\${release}\${major}\$shared_ext \${libname}\$shared_ext"
+
+# The coded name of the library, if different from the real name.
+soname_spec="\${libname}\${release}\${major}\$shared_ext"
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=""
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=""
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=""
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds=""
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=""
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=no
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec="/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/clang/6.0  /usr/local/lib"
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec="/usr/local/lib /lib /usr/lib"
+
+# Whether dlopen is supported.
+dlopen_support=yes
+
+# Whether dlopen of programs is supported.
+dlopen_self=yes
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=yes
+
+# Commands to strip libraries.
+old_striplib="strip -S"
+striplib="strip -x"
+
+
+# The linker used to build libraries.
+LD="/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="gcc"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin -fno-rtti -fno-exceptions"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fno-common -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\`for conv in \$convenience\\\"\\\"; do test  -n \\\"\$conv\\\" && new_convenience=\\\"\$new_convenience \${wl}-force_load,\$conv\\\"; done; func_echo_all \\\"\$new_convenience\\\"\`"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring \$single_module"
+archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring \$single_module \${wl}-exported_symbols_list,\$output_objdir/\${libname}-symbols.expsym"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags"
+module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags \${wl}-exported_symbols_list,\$output_objdir/\${libname}-symbols.expsym"
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="no"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag="\${wl}-undefined \${wl}dynamic_lookup"
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=yes
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=yes
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL CONFIG
+
+
+# libtool (GNU libtool) 2.4.2
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --no-warn            don't display warning messages
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.4.2
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.2
+TIMESTAMP=""
+package_revision=1.3337
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+} # Extended-shell func_dirname implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result="${1##*/}"
+} # Extended-shell func_basename implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"
+} # Extended-shell func_dirname_and_basename implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}
+} # Extended-shell func_stripname implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
+} # Extended-shell func_split_short_opt implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}
+} # Extended-shell func_split_long_opt implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+eval "${1}+=\${2}"
+} # Extended-shell func_append implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+    func_quote_for_eval "${2}"
+    eval "${1}+=\\ \$func_quote_for_eval_result"
+} # Extended-shell func_append_quoted implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+func_arith_result=$(( $* ))
+} # Extended-shell func_arith implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+func_len_result=${#1}
+} # Extended-shell func_len implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac
+} # Extended-shell func_lo2o implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+func_xform_result=${1%.*}.lo
+} # Extended-shell func_xform implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+  shift; set dummy --mode clean ${1+"$@"}; shift
+  ;;
+compile|compil|compi|comp|com|co|c)
+  shift; set dummy --mode compile ${1+"$@"}; shift
+  ;;
+execute|execut|execu|exec|exe|ex|e)
+  shift; set dummy --mode execute ${1+"$@"}; shift
+  ;;
+finish|finis|fini|fin|fi|f)
+  shift; set dummy --mode finish ${1+"$@"}; shift
+  ;;
+install|instal|insta|inst|ins|in|i)
+  shift; set dummy --mode install ${1+"$@"}; shift
+  ;;
+link|lin|li|l)
+  shift; set dummy --mode link ${1+"$@"}; shift
+  ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+  shift; set dummy --mode uninstall ${1+"$@"}; shift
+  ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_warning=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+			;;
+      --config)
+			opt_config=:
+func_config
+			;;
+      --dlopen|-dlopen)
+			optarg="$1"
+			opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+			shift
+			;;
+      --preserve-dup-deps)
+			opt_preserve_dup_deps=:
+			;;
+      --features)
+			opt_features=:
+func_features
+			;;
+      --finish)
+			opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+			;;
+      --help)
+			opt_help=:
+			;;
+      --help-all)
+			opt_help_all=:
+opt_help=': help-all'
+			;;
+      --mode)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_mode="$optarg"
+case $optarg in
+  # Valid mode arguments:
+  clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+  # Catch anything else as an error
+  *) func_error "invalid argument for $opt"
+     exit_cmd=exit
+     break
+     ;;
+esac
+			shift
+			;;
+      --no-silent|--no-quiet)
+			opt_silent=false
+preserve_args+=" $opt"
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+preserve_args+=" $opt"
+			;;
+      --no-verbose)
+			opt_verbose=false
+preserve_args+=" $opt"
+			;;
+      --silent|--quiet)
+			opt_silent=:
+preserve_args+=" $opt"
+        opt_verbose=false
+			;;
+      --verbose|-v)
+			opt_verbose=:
+preserve_args+=" $opt"
+opt_silent=false
+			;;
+      --tag)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_tag="$optarg"
+preserve_args+=" $opt $optarg"
+func_enable_tag "$optarg"
+			shift
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-n*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # save first non-option argument
+  if test "$#" -gt 0; then
+    nonopt="$opt"
+    shift
+  fi
+
+  # preserve --debug
+  test "$opt_debug" = : || preserve_args+=" --debug"
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+      ;;
+  esac
+
+  $opt_help || {
+    # Sanity checks first:
+    func_check_version_match
+
+    if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+      func_fatal_configuration "not configured to build any kind of library"
+    fi
+
+    # Darwin sucks
+    eval std_shrext=\"$shrext_cmds\"
+
+    # Only execute mode is allowed to have -dlopen flags.
+    if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+      func_error "unrecognized option \`-dlopen'"
+      $ECHO "$help" 1>&2
+      exit $EXIT_FAILURE
+    fi
+
+    # Change the help message to a mode-specific one.
+    generic_help="$help"
+    help="Try \`$progname --help --mode=$opt_mode' for more information."
+  }
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_dirname_and_basename "$1" "" "."
+    func_stripname '' '.exe' "$func_basename_result"
+    func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot.  Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+  func_resolve_sysroot_result=$1
+  case $func_resolve_sysroot_result in
+  =*)
+    func_stripname '=' '' "$func_resolve_sysroot_result"
+    func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+    ;;
+  esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+  case "$lt_sysroot:$1" in
+  ?*:"$lt_sysroot"*)
+    func_stripname "$lt_sysroot" '' "$1"
+    func_replace_sysroot_result="=$func_stripname_result"
+    ;;
+  *)
+    # Including no sysroot.
+    func_replace_sysroot_result=$1
+    ;;
+  esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+	func_append_quoted CC_quoted "$arg"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_append_quoted CC_quoted "$arg"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+  $opt_debug
+  func_convert_core_file_wine_to_w32_result="$1"
+  if test -n "$1"; then
+    # Unfortunately, winepath does not exit with a non-zero error code, so we
+    # are forced to check the contents of stdout. On the other hand, if the
+    # command is not found, the shell will set an exit code of 127 and print
+    # *an error message* to stdout. So we must check for both error code of
+    # zero AND non-empty stdout, which explains the odd construction:
+    func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+    if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+      func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+        $SED -e "$lt_sed_naive_backslashify"`
+    else
+      func_convert_core_file_wine_to_w32_result=
+    fi
+  fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+  $opt_debug
+  # unfortunately, winepath doesn't convert paths, only file names
+  func_convert_core_path_wine_to_w32_result=""
+  if test -n "$1"; then
+    oldIFS=$IFS
+    IFS=:
+    for func_convert_core_path_wine_to_w32_f in $1; do
+      IFS=$oldIFS
+      func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+      if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+        if test -z "$func_convert_core_path_wine_to_w32_result"; then
+          func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+        else
+          func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+        fi
+      fi
+    done
+    IFS=$oldIFS
+  fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+  $opt_debug
+  if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+    func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+    if test "$?" -ne 0; then
+      # on failure, ensure result is empty
+      func_cygpath_result=
+    fi
+  else
+    func_cygpath_result=
+    func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+  fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format.  Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+  $opt_debug
+  # awkward: cmd appends spaces to result
+  func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+    $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+  $opt_debug
+  if test -z "$2" && test -n "$1" ; then
+    func_error "Could not determine host file name corresponding to"
+    func_error "  \`$1'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback:
+    func_to_host_file_result="$1"
+  fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+  $opt_debug
+  if test -z "$4" && test -n "$3"; then
+    func_error "Could not determine the host path corresponding to"
+    func_error "  \`$3'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback.  This is a deliberately simplistic "conversion" and
+    # should not be "improved".  See libtool.info.
+    if test "x$1" != "x$2"; then
+      lt_replace_pathsep_chars="s|$1|$2|g"
+      func_to_host_path_result=`echo "$3" |
+        $SED -e "$lt_replace_pathsep_chars"`
+    else
+      func_to_host_path_result="$3"
+    fi
+  fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+  $opt_debug
+  case $4 in
+  $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+    ;;
+  esac
+  case $4 in
+  $2 ) func_to_host_path_result+="$3"
+    ;;
+  esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+  $opt_debug
+  $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result.  If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+  $opt_debug
+  case ,$2, in
+    *,"$to_tool_file_cmd",*)
+      func_to_tool_file_result=$1
+      ;;
+    *)
+      $to_tool_file_cmd "$1"
+      func_to_tool_file_result=$func_to_host_file_result
+      ;;
+  esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+  func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+    # LT_CYGPATH in this case.
+    func_to_host_file_result=`cygpath -m "$1"`
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format.  Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_file_wine_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_msys_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format.  Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set.  Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+    func_convert_core_file_wine_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format.  If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+#   file name conversion function    : func_convert_file_X_to_Y ()
+#   path conversion function         : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same.  If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+  $opt_debug
+  if test -z "$to_host_path_cmd"; then
+    func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+    to_host_path_cmd="func_convert_path_${func_stripname_result}"
+  fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+  $opt_debug
+  func_init_to_host_path_cmd
+  $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+  func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from ARG.  MSYS
+    # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+    # and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format.  Requires a wine environment and
+# a working winepath.  Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format.  Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set.  Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from
+    # ARG. msys behavior is inconsistent here, cygpath turns them
+    # into '.;' and ';.', and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          pie_flag+=" $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  later+=" $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_append_quoted lastarg "$arg"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  base_compile+=" $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_append_quoted base_compile "$lastarg"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      removelist+=" $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    removelist+=" $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+    srcfile=$func_to_tool_file_result
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	command+=" -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	command+=" -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      command+="$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $opt_mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$opt_mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $opt_dlopen; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  dir+="/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_append_quoted args "$file"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libs=
+    libdirs=
+    admincmds=
+
+    for opt in "$nonopt" ${1+"$@"}
+    do
+      if test -d "$opt"; then
+	libdirs+=" $opt"
+
+      elif test -f "$opt"; then
+	if func_lalib_unsafe_p "$opt"; then
+	  libs+=" $opt"
+	else
+	  func_warning "\`$opt' is not a valid libtool archive"
+	fi
+
+      else
+	func_fatal_error "invalid argument \`$opt'"
+      fi
+    done
+
+    if test -n "$libs"; then
+      if test -n "$lt_sysroot"; then
+        sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+        sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+      else
+        sysroot_cmd=
+      fi
+
+      # Remove sysroot references
+      if $opt_dry_run; then
+        for lib in $libs; do
+          echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+        done
+      else
+        tmpdir=`func_mktempdir`
+        for lib in $libs; do
+	  sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+	    > $tmpdir/tmp-la
+	  mv -f $tmpdir/tmp-la $lib
+	done
+        ${RM}r "$tmpdir"
+      fi
+    fi
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || admincmds+="
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      echo "----------------------------------------------------------------------"
+      echo "Libraries have been installed in:"
+      for libdir in $libdirs; do
+	$ECHO "   $libdir"
+      done
+      echo
+      echo "If you ever happen to want to link against installed libraries"
+      echo "in a given directory, LIBDIR, you must either use libtool, and"
+      echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+      echo "flag during linking and do at least one of the following:"
+      if test -n "$shlibpath_var"; then
+	echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+	echo "     during execution"
+      fi
+      if test -n "$runpath_var"; then
+	echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+	echo "     during linking"
+      fi
+      if test -n "$hardcode_libdir_flag_spec"; then
+	libdir=LIBDIR
+	eval flag=\"$hardcode_libdir_flag_spec\"
+
+	$ECHO "   - use the \`$flag' linker flag"
+      fi
+      if test -n "$admincmds"; then
+	$ECHO "   - have your system administrator run these commands:$admincmds"
+      fi
+      if test -f /etc/ld.so.conf; then
+	echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+      fi
+      echo
+
+      echo "See any operating system documentation about shared libraries for"
+      case $host in
+	solaris2.[6789]|solaris2.1[0-9])
+	  echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	  echo "pages."
+	  ;;
+	*)
+	  echo "more information, such as the ld(1) and ld.so(8) manual pages."
+	  ;;
+      esac
+      echo "----------------------------------------------------------------------"
+    fi
+    exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    install_prog+="$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	files+=" $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      install_prog+=" $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      install_shared_prog+=" $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	install_shared_prog+=" -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	staticlibs+=" $file"
+	;;
+
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) current_libdirs+=" $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) future_libdirs+=" $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	dir+="$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && staticlibs+=" $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+      func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+      tool_oldlib=$func_to_tool_file_result
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+	    func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+	    $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+          case $host in
+	    *cygwin* | *mingw* | *cegcc* )
+	      # if an import library, we need to obtain dlname
+	      if func_win32_import_lib_p "$dlprefile"; then
+	        func_tr_sh "$dlprefile"
+	        eval "curr_lafile=\$libfile_$func_tr_sh_result"
+	        dlprefile_dlbasename=""
+	        if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+	          # Use subshell, to avoid clobbering current variable values
+	          dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+	          if test -n "$dlprefile_dlname" ; then
+	            func_basename "$dlprefile_dlname"
+	            dlprefile_dlbasename="$func_basename_result"
+	          else
+	            # no lafile. user explicitly requested -dlpreopen <import library>.
+	            $sharedlib_from_linklib_cmd "$dlprefile"
+	            dlprefile_dlbasename=$sharedlib_from_linklib_result
+	          fi
+	        fi
+	        $opt_dry_run || {
+	          if test -n "$dlprefile_dlbasename" ; then
+	            eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+	          else
+	            func_warning "Could not compute DLL name from $name"
+	            eval '$ECHO ": $name " >> "$nlist"'
+	          fi
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+	            $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+	        }
+	      else # not an import lib
+	        $opt_dry_run || {
+	          eval '$ECHO ": $name " >> "$nlist"'
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	        }
+	      fi
+	    ;;
+	    *)
+	      $opt_dry_run || {
+	        eval '$ECHO ": $name " >> "$nlist"'
+	        func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	        eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	      }
+	    ;;
+          esac
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) symtab_cflags+=" $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      func_to_tool_file "$1" func_convert_file_msys_to_w32
+      win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+  $opt_debug
+  sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+  $opt_debug
+  match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+  $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+    $SED '/^Contents of section '"$match_literal"':/{
+      # Place marker at beginning of archive member dllname section
+      s/.*/====MARK====/
+      p
+      d
+    }
+    # These lines can sometimes be longer than 43 characters, but
+    # are always uninteresting
+    /:[	 ]*file format pe[i]\{,1\}-/d
+    /^In archive [^:]*:/d
+    # Ensure marker is printed
+    /^====MARK====/p
+    # Remove all lines with less than 43 characters
+    /^.\{43\}/!d
+    # From remaining lines, remove first 43 characters
+    s/^.\{43\}//' |
+    $SED -n '
+      # Join marker and all lines until next marker into a single line
+      /^====MARK====/ b para
+      H
+      $ b para
+      b
+      :para
+      x
+      s/\n//g
+      # Remove the marker
+      s/^====MARK====//
+      # Remove trailing dots and whitespace
+      s/[\. \t]*$//
+      # Print
+      /./p' |
+    # we now have a list, one entry per line, of the stringified
+    # contents of the appropriate section of all members of the
+    # archive which possess that section. Heuristic: eliminate
+    # all those which have a first or second character that is
+    # a '.' (that is, objdump's representation of an unprintable
+    # character.) This should work for all archives with less than
+    # 0x302f exports -- but will fail for DLLs whose name actually
+    # begins with a literal '.' or a single character followed by
+    # a '.'.
+    #
+    # Of those that remain, print the first one.
+    $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+  test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+  test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+  $opt_debug
+  if func_cygming_gnu_implib_p "$1" ; then
+    # binutils import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+  elif func_cygming_ms_implib_p "$1" ; then
+    # ms-generated import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+  else
+    # unknown
+    sharedlib_from_linklib_result=""
+  fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  case \" \$* \" in
+  *\\ --lt-*)
+    for lt_wr_arg
+    do
+      case \$lt_wr_arg in
+      --lt-*) ;;
+      *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+      esac
+      shift
+    done ;;
+  esac
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# fixup the dll searchpath if we need to.
+	#
+	# Fix the DLL searchpath if we need to.  Do this before prepending
+	# to shlibpath, because on Windows, both are PATH and uninstalled
+	# libraries must come first.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_path "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_path "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  /* Update the DLL searchpath.  EXE_PATH_VALUE ($dllsearchpath) must
+     be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+     because on Windows, both *_VARNAMEs are PATH but uninstalled
+     libraries must come first. */
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+	      $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/  fputs ("\1", f);/p
+g
+D'
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      libtool_args+=" $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  compile_command+=" @OUTPUT@"
+	  finalize_command+=" @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    compile_command+=" @SYMFILE@"
+	    finalize_command+=" @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      dlfiles+=" $arg"
+	    else
+	      dlprefiles+=" $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) deplibs+=" $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      moreargs+=" $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      dlfiles+=" $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    dlprefiles+=" $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  libobjs+=" $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  non_pic_objects+=" $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  non_pic_objects+=" $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  libobjs+=" $pic_object"
+		  non_pic_objects+=" $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) rpath+=" $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) xrpath+=" $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  weak_libs+=" $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $wl$qarg"
+	  prev=
+	  compile_command+=" $wl$qarg"
+	  finalize_command+=" $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  compile_command+=" $link_static_flag"
+	  finalize_command+=" $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  compile_command+=" $arg"
+	  finalize_command+=" $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname "-L" '' "$arg"
+	if test -z "$func_stripname_result"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	func_resolve_sysroot "$func_stripname_result"
+	dir=$func_resolve_sysroot_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "* | *" $arg "*)
+	  # Will only happen for absolute or sysroot arguments
+	  ;;
+	*)
+	  # Preserve sysroot, but never include relative directories
+	  case $dir in
+	    [\\/]* | [A-Za-z]:[\\/]* | =*) deplibs+=" $arg" ;;
+	    *) deplibs+=" -L$dir" ;;
+	  esac
+	  lib_search_path+=" $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) dllsearchpath+=":$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    deplibs+=" System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	deplibs+=" $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot|--sysroot)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+      |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) new_inherited_linker_flags+=" $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	=*)
+	  func_stripname '=' '' "$dir"
+	  dir=$lt_sysroot$func_stripname_result
+	  ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) xrpath+=" $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $func_quote_for_eval_result"
+	  compiler_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $wl$func_quote_for_eval_result"
+	  compiler_flags+=" $wl$func_quote_for_eval_result"
+	  linker_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      # --sysroot=*          for sysroot support
+      # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+      -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        compile_command+=" $arg"
+        finalize_command+=" $arg"
+        compiler_flags+=" $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	objs+=" $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		dlfiles+=" $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      dlprefiles+=" $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    libobjs+=" $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    non_pic_objects+=" $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    libobjs+=" $pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	deplibs+=" $arg"
+	old_deplibs+=" $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	func_resolve_sysroot "$arg"
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  dlfiles+=" $func_resolve_sysroot_result"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  dlprefiles+=" $func_resolve_sysroot_result"
+	  prev=
+	else
+	  deplibs+=" $func_resolve_sysroot_result"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      compile_command+=" $arg"
+      finalize_command+=" $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    func_to_tool_file "$output_objdir/"
+    tool_output_objdir=$func_to_tool_file_result
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_preserve_dup_deps ; then
+	case "$libs " in
+	*" $deplib "*) specialdeplibs+=" $deplib" ;;
+	esac
+      fi
+      libs+=" $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) specialdeplibs+=" $pre_post_deps" ;;
+	  esac
+	  pre_post_deps+=" $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  func_resolve_sysroot "$lib"
+	  case $lib in
+	  *.la)	func_source "$func_resolve_sysroot_result" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) deplibs+=" $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+        |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    compiler_flags+=" $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    dir=$func_resolve_sysroot_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) xrpath+=" $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la)
+	  func_resolve_sysroot "$deplib"
+	  lib=$func_resolve_sysroot_result
+	  ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      newdlprefiles+=" $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      newdlfiles+=" $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) new_inherited_linker_flags+=" $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && dlfiles+=" $dlopen"
+	  test -n "$dlpreopen" && dlprefiles+=" $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    convenience+=" $ladir/$objdir/$old_library"
+	    old_convenience+=" $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	if test -n "$old_library" &&
+	   { test "$prefer_static_libs" = yes ||
+	     test "$prefer_static_libs,$installed" = "built,no"; }; then
+	  linklib=$old_library
+	else
+	  for l in $old_library $library_names; do
+	    linklib="$l"
+	  done
+	fi
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    dlprefiles+=" $lib $dependency_libs"
+	  else
+	    newdlfiles+=" $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$lt_sysroot$libdir"
+	    absdir="$lt_sysroot$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  case "$host" in
+	    # special handling for platforms with PE-DLLs.
+	    *cygwin* | *mingw* | *cegcc* )
+	      # Linker will automatically link against shared library if both
+	      # static and shared are present.  Therefore, ensure we extract
+	      # symbols from the import library if a shared library is present
+	      # (otherwise, the dlopen module name will be incorrect).  We do
+	      # this by putting the import library name into $newdlprefiles.
+	      # We recover the dlopen module name by 'saving' the la file
+	      # name in a special purpose variable, and (later) extracting the
+	      # dlname from the la file.
+	      if test -n "$dlname"; then
+	        func_tr_sh "$dir/$linklib"
+	        eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+	        newdlprefiles+=" $dir/$linklib"
+	      else
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      fi
+	    ;;
+	    * )
+	      # Prefer using a static library (so that no silly _DYNAMIC symbols
+	      # are required to link).
+	      if test -n "$old_library"; then
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      # Otherwise, use the dlname, so that lt_dlopen finds it.
+	      elif test -n "$dlname"; then
+	        newdlprefiles+=" $dir/$dlname"
+	      else
+	        newdlprefiles+=" $dir/$linklib"
+	      fi
+	    ;;
+	  esac
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  newlib_search_path+=" $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         func_resolve_sysroot "$func_stripname_result"
+	         newlib_search_path+=" $func_resolve_sysroot_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) temp_rpath+="$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      notinst_deplibs+=" $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      notinst_deplibs+=" $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$opt_mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$absdir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      add_dir+=" -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) compile_shlibpath+="$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) finalize_shlibpath+="$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$opt_mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) finalize_shlibpath+="$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    add_dir+=" -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) xrpath+=" $temp_xrpath";;
+		   esac;;
+	      *) temp_deplibs+=" $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  newlib_search_path+=" $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    case $deplib in
+              -L*) func_stripname '-L' '' "$deplib"
+                   func_resolve_sysroot "$func_stripname_result";;
+              *) func_resolve_sysroot "$deplib" ;;
+            esac
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $func_resolve_sysroot_result "*)
+                specialdeplibs+=" $func_resolve_sysroot_result" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $func_resolve_sysroot_result"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_resolve_sysroot "$deplib"
+	        deplib=$func_resolve_sysroot_result
+	        func_dirname "$deplib" "" "."
+		dir=$func_dirname_result
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      compiler_flags+=" ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      linker_flags+=" -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) lib_search_path+=" $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) tmp_libs+=" $deplib" ;;
+	      esac
+	      ;;
+	    *) tmp_libs+=" $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  tmp_libs+=" $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      objs+="$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  libobjs+=" $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  # correct linux to gnu/linux during the next big refactor
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux) # correct to gnu/linux during the next big refactor
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  verstring+=":${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      libobjs+=" $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$opt_mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       removelist+=" $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	oldlibs+=" $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  func_replace_sysroot "$libdir"
+	  temp_xrpath+=" -R$func_replace_sysroot_result"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) dlfiles+=" $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) dlprefiles+=" $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    deplibs+=" System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      deplibs+=" -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    newdeplibs+=" $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    newdeplibs+=" $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      newdeplibs+=" $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      newdeplibs+=" $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		if test -n "$file_magic_glob"; then
+		  libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+		else
+		  libnameglob=$libname
+		fi
+		test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  if test "$want_nocaseglob" = yes; then
+		    shopt -s nocaseglob
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		    $nocaseglob
+		  else
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		  fi
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			newdeplibs+=" $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      newdeplibs+=" $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	# Remove ${wl} instances when linking with ld.
+	# FIXME: should test the right _cmds variable.
+	case $archive_cmds in
+	  *\$LD\ *) wl= ;;
+        esac
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		func_replace_sysroot "$libdir"
+		libdir=$func_replace_sysroot_result
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		dep_rpath+=" $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) perm_rpath+=" $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      rpath+="$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  linknames+=" $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  delfiles+=" $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd1 in $cmds; do
+	      IFS="$save_ifs"
+	      # Take the normal branch if the nm_file_list_spec branch
+	      # doesn't work or if tool conversion is not needed.
+	      case $nm_file_list_spec~$to_tool_file_cmd in
+		*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+		  try_normal_branch=yes
+		  eval cmd=\"$cmd1\"
+		  func_len " $cmd"
+		  len=$func_len_result
+		  ;;
+		*)
+		  try_normal_branch=no
+		  ;;
+	      esac
+	      if test "$try_normal_branch" = yes \
+		 && { test "$len" -lt "$max_cmd_len" \
+		      || test "$max_cmd_len" -le -1; }
+	      then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      elif test -n "$nm_file_list_spec"; then
+		func_basename "$output"
+		output_la=$func_basename_result
+		save_libobjs=$libobjs
+		save_output=$output
+		output=${output_objdir}/${output_la}.nm
+		func_to_tool_file "$output"
+		libobjs=$nm_file_list_spec$func_to_tool_file_result
+		delfiles+=" $output"
+		func_verbose "creating $NM input file list: $output"
+		for obj in $save_libobjs; do
+		  func_to_tool_file "$obj"
+		  $ECHO "$func_to_tool_file_result"
+		done > "$output"
+		eval cmd=\"$cmd1\"
+		func_show_eval "$cmd" 'exit $?'
+		output=$save_output
+		libobjs=$save_libobjs
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    tmp_deplibs+=" $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    generated+=" $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    libobjs+=" $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  linker_flags+=" $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    echo ')' >> $output
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$func_to_tool_file_result
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  objlist+=" $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      delfiles+=" $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$opt_mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  libobjs+=" $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$opt_mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # If we're not building shared, we need to use non_pic_objs
+      test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      compile_command+=" ${wl}-bind_at_load"
+	      finalize_command+=" ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      compile_command+=" $compile_deplibs"
+      finalize_command+=" $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) dllsearchpath+=":$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      if test -n "$postlink_cmds"; then
+	func_to_tool_file "$output_objdir/$outputname"
+	postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	func_execute_cmds "$postlink_cmds" 'exit $?'
+      fi
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    oldobjs+=" $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	generated+=" $gentop"
+
+	func_extract_archives $gentop $addlibs
+	oldobjs+=" $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  oldobjs+=" $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      oldobjs+=" $gentop/$newobj"
+	      ;;
+	    *) oldobjs+=" $obj" ;;
+	    esac
+	  done
+	fi
+	func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+	tool_oldlib=$func_to_tool_file_result
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	elif test -n "$archiver_list_spec"; then
+	  func_verbose "using command file archive linking..."
+	  for obj in $oldobjs
+	  do
+	    func_to_tool_file "$obj"
+	    $ECHO "$func_to_tool_file_result"
+	  done > $output_objdir/$libname.libcmd
+	  func_to_tool_file "$output_objdir/$libname.libcmd"
+	  oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    objlist+=" $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		func_resolve_sysroot "$deplib"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		newdependency_libs+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      -L*)
+		func_stripname -L '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -L$func_replace_sysroot_result"
+		;;
+	      -R*)
+		func_stripname -R '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -R$func_replace_sysroot_result"
+		;;
+	      *) newdependency_libs+=" $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlfiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      *) newdlfiles+=" $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlprefiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlfiles+=" $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlprefiles+=" $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) RM+=" $arg"; rmforce=yes ;;
+      -*) RM+=" $arg" ;;
+      *) files+=" $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	odir="$objdir"
+      else
+	odir="$dir/$objdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$opt_mode" = uninstall && odir="$dir"
+
+      # Remember odir for removal later, being careful to avoid duplicates
+      if test "$opt_mode" = clean; then
+	case " $rmdirs " in
+	  *" $odir "*) ;;
+	  *) rmdirs+=" $odir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    rmfiles+=" $odir/$n"
+	  done
+	  test -n "$old_library" && rmfiles+=" $odir/$old_library"
+
+	  case "$opt_mode" in
+	  clean)
+	    case " $library_names " in
+	    *" $dlname "*) ;;
+	    *) test -n "$dlname" && rmfiles+=" $odir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && rmfiles+=" $odir/$name $odir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    rmfiles+=" $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    rmfiles+=" $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$opt_mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    rmfiles+=" $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      rmfiles+=" $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    rmfiles+=" $odir/$name $odir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      rmfiles+=" $odir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      rmfiles+=" $odir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
+
+# ### BEGIN LIBTOOL TAG CONFIG: CXX
+
+# The linker used to build libraries.
+LD="/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="g++"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fno-common -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\`for conv in \$convenience\\\"\\\"; do test  -n \\\"\$conv\\\" && new_convenience=\\\"\$new_convenience \${wl}-force_load,\$conv\\\"; done; func_echo_all \\\"\$new_convenience\\\"\`"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring \$single_module"
+archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring \$single_module \${wl}-exported_symbols_list,\$output_objdir/\${libname}-symbols.expsym"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags"
+module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags \${wl}-exported_symbols_list,\$output_objdir/\${libname}-symbols.expsym"
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="no"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag="\${wl}-undefined \${wl}dynamic_lookup"
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=yes
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=yes
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: CXX
+
+# ### BEGIN LIBTOOL TAG CONFIG: F77
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: F77
+
+# ### BEGIN LIBTOOL TAG CONFIG: FC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: FC
+
+# ### BEGIN LIBTOOL TAG CONFIG: GO
+
+# The linker used to build libraries.
+LD="/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GO
+
+# ### BEGIN LIBTOOL TAG CONFIG: GCJ
+
+# The linker used to build libraries.
+LD="/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LTCC \$LTCFLAGS -nostdlib \${wl}-r -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GCJ
+
+# ### BEGIN LIBTOOL TAG CONFIG: RC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=""
+reload_cmds=""
+
+# Commands used to build an old-style archive.
+old_archive_cmds=""
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: RC
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtoolize b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtoolize
new file mode 100755
index 0000000..b842088
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/libtoolize
@@ -0,0 +1,2555 @@
+#! /bin/sh
+# Generated from libtoolize.m4sh by GNU Autoconf 2.68.
+
+# libtoolize (GNU libtool) 2.4.2
+# Written by Gary V. Vaughan <gary@gnu.org>, 2003
+
+# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
+# Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# Libtoolize is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Libtoolize is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with libtoolize; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]...
+#
+# Prepare a package to use libtool.
+#
+#   -c, --copy          copy files rather than symlinking them
+#       --debug         enable verbose shell tracing
+#   -n, --dry-run       print commands rather than running them
+#   -f, --force         replace existing files
+#   -i, --install       copy missing auxiliary files
+#       --ltdl[=DIR]    install libltdl sources [default: libltdl]
+#       --no-warn       don't display warning messages
+#       --nonrecursive  prepare ltdl for non-recursive make
+#   -q, --quiet         work silently
+#       --recursive     prepare ltdl for recursive make
+#       --subproject    prepare ltdl to configure and build independently
+#   -v, --verbose       verbosely report processing
+#       --version       print version information and exit
+#   -h, --help          print short or long help message
+#
+# The following space or comma delimited options can be passed to $progname
+# via the environment variable LIBTOOLIZE_OPTIONS, unknown environment
+# options are ignored:
+#
+#   --debug             enable verbose shell tracing
+#   --no-warn           don't display warning messages
+#   --quiet             work silently
+#   --verbose           verbosely report processing
+#
+# You must `cd' to the top directory of your package before you run
+# `$progname'.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#       host-triplet:	x86_64-apple-darwin13.4.0
+#       $progname:	(GNU libtool) 2.4.2
+#       automake:		$automake_version
+#       autoconf:		$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+: ${TAR=tar}
+
+PROGRAM=libtoolize
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="/usr/bin/grep -E"}
+: ${FGREP="/usr/bin/grep -F"}
+: ${GREP="/usr/bin/grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="/usr/bin/sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+# ltdl can be installed to be self-contained (subproject, the default);
+# or to be configured by a parent project, either with a recursive or
+# nonrecursive automake driven make:
+ltdl_mode=
+
+# Locations for important files:
+ltdldir=
+
+# Parse environment options
+{
+  my_sed_env_opt='1s/^\([^,:; ]*\).*$/\1/;q'
+  my_sed_env_rest='1s/^[^,:; ]*[,:; ]*\(.*\)$/\1/;q'
+
+  while test -n "$LIBTOOLIZE_OPTIONS"; do
+    opt=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_opt"`
+    LIBTOOLIZE_OPTIONS=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_rest"`
+
+    case $opt in
+      --debug|--no-warn|--quiet|--verbose)
+		envopts="${envopts+$envopts }$opt"			  ;;
+      --*)	env_warning="${env_warning+$env_warning
+}unrecognized environment option \`$opt'" 				  ;;
+      *)	func_fatal_help "garbled LIBTOOLIZE_OPTIONS near \`$opt'" ;;
+    esac
+  done
+
+  test -n "$envopts" && {
+    func_quote_for_eval "$envopts"
+    eval set dummy "$func_quote_for_eval_result" ${1+"$@"}
+    shift
+  }
+}
+
+
+
+# Option defaults:
+opt_debug=:
+opt_copy=false
+opt_force=false
+opt_install=false
+opt_dry_run=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+opt_nonrecursive=false
+opt_recursive=false
+opt_standalone=false
+opt_ltdl="false"
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --copy|-c)
+			opt_copy=:
+			;;
+      --force|-f)
+			opt_force=:
+			;;
+      --install|-i)
+			opt_install=:
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+CP="func_echo_all $CP"
+test -n "$LN_S" && LN_S="func_echo_all $LN_S"
+MKDIR="func_echo_all $MKDIR"
+RM="func_echo_all $RM"
+TAR="func_echo_all $TAR"
+			;;
+      --quiet|--automake|-q)
+			opt_quiet=:
+			;;
+      --verbose|-v)
+			opt_verbose=:
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+			;;
+      --nonrecursive|--non-recursive)
+			opt_nonrecursive=:
+			;;
+      --recursive)
+			opt_recursive=:
+			;;
+      --standalone)
+			opt_standalone=:
+			;;
+      --ltdl)
+			optarg="$1"
+			if test $# -gt 0; then
+			    case $optarg in # ((
+			        -*) ;;
+			        *) opt_ltdl="$optarg"; shift ;;
+			    esac
+			fi
+# This is tricky, since we're overloading $opt_ltdl to be the
+# optarg for --ltdl during option processing, but then stashing
+# the (optional) optarg in $ltdldir and reusing $opt_ltdl to
+# indicate that --ltdl was seen during option processing.  Also,
+# be careful that --ltdl=foo --ltdl=bar results in ltdldir=bar:
+case $opt_ltdl in
+          false|:) ;;  # a bare '--ltdl' followed by another option
+  *)       ltdldir=`$ECHO "$optarg" | $SED 's,/*$,,'` ;;
+esac
+opt_ltdl=:
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-c*|-f*|-i*|-n*|-q*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # show any warnings saved by LIBTOOLIZE_OPTIONS parsing
+  test -n "$env_warning" &&
+    echo "$env_warning" |while read line; do func_warning "$line"; done
+
+  # validate $opt_nonrecursive, $opt_recursive and $opt_standalone
+  if $opt_nonrecursive; then
+    if $opt_recursive || $opt_standalone; then
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    fi
+    ltdl_mode=nonrecursive
+  elif $opt_recursive; then
+    $opt_standalone &&
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    ltdl_mode=recursive
+  elif $opt_standalone; then
+    ltdl_mode=standalone
+  fi
+
+  # any remaining arguments are an error
+  test $# -gt 0 &&
+    func_fatal_help "unknown additional arguments: \`${1+}'"
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+# func_echo_once msg_var
+# Calls func_echo with the value of MSG_VAR, and then sets MSG_VAR="" so
+# that subsequent calls have no effect.
+func_echo_once ()
+{
+    $opt_debug
+    if test -n "$1"; then
+      eval my_msg=\$$1
+
+      if test -n "$my_msg"; then
+        func_echo "$my_msg"
+        eval $1=""
+      fi
+    fi
+}
+
+
+# func_copy srcfile destfile [msg_var]
+# A wrapper for func_copy_cb that accepts arguments in the same order
+# as the cp(1) shell command.
+func_copy ()
+{
+    $opt_debug
+
+    test -f "$1" || \
+      { func_error "\`$1' not copied:  not a regular file"; return 1; }
+
+    func_dirname_and_basename "$1"
+    my_f1=$func_basename_result
+
+    if test -d "$2"; then
+
+      func_copy_cb "$my_f1" \
+	`$ECHO "$1" | $SED "$dirname"` "$2" "$3"
+
+    else
+
+      # Supporting this would mean changing the timestamp:
+      func_dirname_and_basename "$2"
+      my_tname=$func_basename_result
+      test "X$my_f1" = "X$my_tname" \
+        || func_fatal_error "func_copy() cannot change filename on copy"
+
+      func_copy_cb "$my_f1" \
+        `$ECHO "$1" | $SED "$dirname"` \
+        `$ECHO "$2" | $SED "$dirname"` \
+	"$3"
+
+    fi
+
+    return $copy_return_status # set in func_copy_cb
+}
+
+
+# func_copy_cb filename srcdir destdir [msg_var]
+# If option `--copy' was specified, or soft-linking SRCFILE to DESTFILE fails,
+# then try to copy SRCFILE to DESTFILE (without changing the timestamp if
+# possible).
+func_copy_cb ()
+{
+    $opt_debug
+    my_file="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    copy_return_status=1
+
+    # Libtool is probably misinstalled if this happens:
+    test -f "$my_srcdir/$my_file" ||
+        func_fatal_error "\`$my_file' not found in \`$my_srcdir'"
+
+    case $opt_verbose in
+      false) my_copy_msg="file \`$my_destdir/$my_file'"     ;;
+      *)     my_copy_msg="file from \`$my_srcdir/$my_file'" ;;
+    esac
+    func_mkdir_p `$ECHO "$my_destdir/$my_file" | $SED "$dirname"`
+
+    $RM "$my_destdir/$my_file"
+    if $opt_copy; then
+      if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+           | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1; } \
+	 && touch "$my_destdir/$my_file"; then
+	$opt_quiet || func_echo_once "$my_msg_var"
+	$opt_quiet || func_echo "copying $my_copy_msg"
+	copy_return_status=0
+      fi
+    else
+      if test "$my_file" = "aclocal.m4"; then
+	if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+	     | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1 ; }
+	then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "copying $my_copy_msg"
+	  copy_return_status=0
+	fi
+      else
+	if $LN_S "$my_srcdir/$my_file" "$my_destdir/$my_file"; then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "linking $my_copy_msg"
+	  copy_return_status=0
+	fi
+      fi
+    fi
+    if test "$copy_return_status" != 0; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      func_error "can not copy \`$my_srcdir/$my_file' to \`$my_destdir/'"
+      exit_status=$EXIT_FAILURE
+    fi
+}
+
+
+# func_copy_some_files srcfile_spec srcdir destdir [msg_var] [cb=func_copy_cb]
+# Call COPY_CB for each regular file in SRCDIR named by the ':' delimited
+# names in SRCFILE_SPEC.  The odd calling convention is needed to allow
+# spaces in file and directory names.
+func_copy_some_files ()
+{
+    $opt_debug
+    my_srcfile_spec="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_copy_cb="${5-func_copy_cb}"
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for my_filename in $my_srcfile_spec; do
+      IFS="$my_save_IFS"
+      if test -f "$my_srcdir/$my_filename"; then
+        if test "X$my_copy_cb" = Xfunc_copy_cb; then
+	  $opt_force || if test -f "$my_destdir/$my_filename"; then
+	    $opt_quiet || func_echo_once "$my_msg_var"
+	    $opt_quiet \
+	      || func_error "\`$my_destdir/$my_filename' exists: use \`--force' to overwrite"
+	    continue
+	  fi
+        fi
+      else
+	func_echo_once "$my_msg_var"
+	func_fatal_error "\`$my_filename' not found in \`$my_srcdir'"
+      fi
+
+      $my_copy_cb "$my_filename" "$my_srcdir" "$my_destdir" "$my_msg_var"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_fixup_Makefile srcfile srcdir destdir
+func_fixup_Makefile ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_fixup_non_subpackage_script="\
+      s,(LIBOBJS),(ltdl_LIBOBJS),g
+      s,(LTLIBOBJS),(ltdl_LTLIBOBJS),g
+      s,libltdl/configure.ac,,
+      s,libltdl/configure,,
+      s,libltdl/aclocal.m4,,
+      s,libltdl/config-h.in,,
+      s,libltdl/Makefile.am,,
+      s,libltdl/Makefile.in,,
+      /^[	 ]*\\\\\$/d"
+    case $my_filename in
+      Makefile.am)
+	my_fixup_non_subpackage_script=`echo "$my_fixup_non_subpackage_script" | \
+		sed 's,libltdl/,,'`
+	my_fixup_inc_paths_script= ;;
+      Makefile.inc)
+	repl=$ltdldir
+	repl_uscore=`$ECHO "$repl" | $SED 's,[/.+-],_,g'`
+	my_fixup_inc_paths_script="\
+	  s,libltdl_,@repl_uscore@_,
+	  s,libltdl/,@repl@/,
+	  s,: libltdl/,: @repl@/,
+	  s, -Ilibltdl , -I@repl@ ,
+	  s,\\\$(libltdl_,\$(@repl_uscore@_,
+	  s,)/libltdl ,)/@repl@ ,
+	  s,@repl_uscore@,${repl_uscore},g
+	  s,@repl@,${repl},g"
+	;;
+    esac
+
+    $RM "$my_destdir/$my_filename" 2>/dev/null
+    $opt_quiet || func_echo "creating file \`$my_destdir/$my_filename'"
+    if $opt_dry_run; then :;
+    else
+      $SED "$my_fixup_non_subpackage_script
+	    $my_fixup_inc_paths_script" \
+	< "$my_srcdir/$my_filename" > "$my_destdir/$my_filename" ||
+	func_fatal_error "cannot create $my_destdir/$my_filename"
+    fi
+}
+
+# func_scan_files
+# Scan configure.(ac|in) and aclocal.m4 (if present) for use of libltdl
+# and libtool.  Possibly running some of these tools if necessary.
+# Libtoolize affects the contents of aclocal.m4, and should be run before
+# aclocal, so we can't use configure --trace which relies on a consistent
+# configure.(ac|in) and aclocal.m4.
+func_scan_files ()
+{
+    $opt_debug
+    # Prefer configure.ac to configure.in
+    test -f configure.ac && configure_ac=configure.ac
+    test -f "$configure_ac" || configure_ac=
+
+    # Set local variables to reflect contents of configure.ac
+    my_sed_scan_configure_ac='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,;
+	/AC_INIT/ {
+	    s,^.*$,seen_autoconf=:,
+	    p
+	}
+	d'
+    test -z "$configure_ac" \
+        || eval `$SED "$my_sed_scan_configure_ac" "$configure_ac"`
+
+    $seen_autoconf || {
+	my_configure_ac=
+	test -n "$configure_ac" && my_configure_ac="$configure_ac: "
+        func_verbose "${my_configure_ac}not using Autoconf"
+
+	# Make sure ltdldir and ltdl_mode have sensible defaults
+        # since we return early here:
+	test -n "$ltdldir" || ltdldir=libltdl
+	test -n "$ltdl_mode" || ltdl_mode=subproject
+
+	return
+    }
+
+    # ---------------------------------------------------- #
+    # Probe macro usage in configure.ac and/or aclocal.m4. #
+    # ---------------------------------------------------- #
+
+    my_sed_traces='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,
+        s,^.*AC_REQUIRE(.*$,,; s,^.*m4_require(.*$,,;
+	s,^.*m4_define(.*$,,
+	s,^.*A[CU]_DEFUN(.*$,,; s,^.*m4_defun(.*$,,
+	/AC_CONFIG_AUX_DIR(/ {
+	    s,^.*AC_CONFIG_AUX_DIR([[	 ]*\([^])]*\).*$,ac_auxdir=\1,
+	    p
+        }
+	/AC_CONFIG_MACRO_DIR(/ {
+	    s,^.*AC_CONFIG_MACRO_DIR([[	 ]*\([^])]*\).*$,ac_macrodir=\1,
+	    p
+        }
+	/_LT_CONFIG_LTDL_DIR(/d
+	/LT_CONFIG_LTDL_DIR(/ {
+	    s,^.*LT_CONFIG_LTDL_DIR([[	 ]*\([^])]*\).*$,ac_ltdldir=\1,
+	    p
+	}
+	/\[A[CM]_PROG_LIBTOOL/d
+	/A[CM]_PROG_LIBTOOL/ {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/the.*option into.*LT_INIT.*parameter/d
+	/\[LT_INIT/d
+	/LT_INIT/		 {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/\[LTDL_INIT/d
+	/LTDL_INIT/          {
+	    s,^.*LTDL_INIT([[	 ]*\([^])]*\).*$,ltdl_options="\1",
+	    s,^.*LTDL_INIT[	 ]*$,seen_ltdl=:,
+	    p
+	}
+	/LT_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_LIB_LTDL/        {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	d'
+    eval `cat aclocal.m4 "$configure_ac" 2>/dev/null | $SED "$my_sed_traces"`
+
+
+    # ----------------- #
+    # Validate ltdldir. #
+    # ----------------- #
+
+    ac_ltdldir=`$ECHO "$ac_ltdldir" | $SED 's,/*$,,'`
+
+    # If $configure_ac contains AC_CONFIG_LTDL_DIR, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ac_ltdldir" in
+      *\$*)
+        func_fatal_error "can not handle variables in LT_CONFIG_LTDL_DIR"
+        ;;
+    esac
+
+    # If neither --ltdl nor LT_CONFIG_LTDL_DIR are specified, default to
+    # `libltdl'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given! (If LT_CONFIG_LTDL_DIR is not specified
+    # we suggest adding it later in this code.)
+    case x$ac_ltdldir,x$ltdldir in
+      x,x)	ltdldir=libltdl		;;
+      x*,x)	ltdldir=$ac_ltdldir	;;
+      x,x*)	ltdldir=$ltdldir	;;
+      *)
+        test x"$ac_ltdldir" = x"$ltdldir" || \
+	    func_fatal_error "--ltdl='$ltdldir' does not match LT_CONFIG_LTDL_DIR($ac_ltdldir)"
+	;;
+    esac
+
+
+    # ------------------- #
+    # Validate ltdl_mode. #
+    # ------------------- #
+
+    test -n "$ltdl_options" && seen_ltdl=:
+
+    # If $configure_ac contains LTDL_INIT, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ltdl_options" in
+      *\$*)
+        func_fatal_error "can not handle variables in LTDL_INIT"
+        ;;
+    esac
+
+    # Extract mode name from ltdl_options
+    # FIXME: Diagnose multiple conflicting modes in ltdl_options
+    ac_ltdl_mode=
+    case " $ltdl_options " in
+      *" nonrecursive "*)  ac_ltdl_mode=nonrecursive	;;
+      *" recursive "*)     ac_ltdl_mode=recursive	;;
+      *" subproject "*)    ac_ltdl_mode=subproject	;;
+    esac
+
+    # If neither --ltdl nor an LTDL_INIT mode are specified, default to
+    # `subproject'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given!
+    case x$ac_ltdl_mode,x$ltdl_mode in
+      x,x)	ltdl_mode=subproject	;;
+      x*,x)	ltdl_mode=$ac_ltdl_mode	;;
+      x,x*)	ltdl_mode=$ltdl_mode	;;
+      *)
+        test x"$ac_ltdl_mode" = x"$ltdl_mode" || \
+	    func_fatal_error "--$ltdl_mode does not match LTDL_INIT($ac_ltdl_mode)"
+	;;
+    esac
+
+    # ---------------- #
+    # Validate auxdir. #
+    # ---------------- #
+
+    if test -n "$ac_auxdir"; then
+      # If $configure_ac contains AC_CONFIG_AUX_DIR, check that it was
+      # not given in terms of a shell variable!
+      case "$ac_auxdir" in
+      *\$*)
+        func_fatal_error "can not handle variables in AC_CONFIG_AUX_DIR"
+        ;;
+      *)
+	auxdir=$ac_auxdir
+	;;
+      esac
+    else
+      # Try to discover auxdir the same way it is discovered by configure.
+      # Note that we default to the current directory.
+      for dir in . .. ../..; do
+        if test -f "$dir/install-sh"; then
+          auxdir=$dir
+          break
+        elif test -f "$dir/install.sh"; then
+          auxdir="$dir"
+          break
+        fi
+      done
+    fi
+
+    # Just use the current directory if all else fails.
+    test -n "$auxdir" || auxdir=.
+
+
+    # ------------------------------ #
+    # Find local m4 macro directory. #
+    # ------------------------------ #
+
+    # Hunt for ACLOCAL_AMFLAGS in `Makefile.am' for a `-I' argument.
+
+    my_sed_aclocal_flags='
+        /^[	 ]*ACLOCAL_[A-Z_]*FLAGS[	 ]*=[	 ]*/ {
+	    s,,,
+	    q
+	}
+	d'
+    if test -f Makefile.am; then
+      my_macrodir_is_next=false
+      for arg in `$SED "$my_sed_aclocal_flags" Makefile.am`; do
+        if $my_macrodir_is_next; then
+          am_macrodir="$arg"
+          break
+        else
+	  case $arg in
+	    -I) my_macrodir_is_next=: ;;
+	    -I*)
+	      am_macrodir=`$ECHO "$arg" | sed 's,^-I,,'`
+	      break
+	      ;;
+	    *) my_macrodir_is_next=false ;;
+	  esac
+        fi
+      done
+    fi
+
+    macrodir="$ac_macrodir"
+    test -z "$macrodir" && macrodir="$am_macrodir"
+
+    if test -n "$am_macrodir" && test -n "$ac_macrodir"; then
+      test "$am_macrodir" = "$ac_macrodir" \
+        || func_fatal_error "AC_CONFIG_MACRO_DIR([$ac_macrodir]) conflicts with ACLOCAL_AMFLAGS=-I $am_macrodir."
+    fi
+}
+
+# func_included_files searchfile
+# Output INCLUDEFILE if SEARCHFILE m4_includes it, else output SEARCHFILE.
+func_included_files ()
+{
+    $opt_debug
+    my_searchfile="$1"
+
+    my_include_regex=
+    my_sed_include='
+        /^m4_include(\[.*\])$/ {
+	    s,^m4_include(\[\(.*\)\])$,\1,
+	    p
+	}
+        d'
+
+    if test -f "$my_searchfile"; then
+      $ECHO "$my_searchfile"
+
+      # Only recurse when we don't care if all the variables we use get
+      # trashed, since they are in global scope.
+      for my_filename in `$SED "$my_sed_include" "$my_searchfile"`; do
+	func_included_files $my_filename
+      done
+    fi
+}
+
+
+# func_serial filename [macro_regex]
+# Output the value of the serial number comment in FILENAME, where the
+# comment line must also match MACRO_REGEX, if given.
+func_serial ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_macro_regex="$2"
+    my_sed_serial='
+	/^# serial [1-9][0-9.]*[	 ]*'"$my_macro_regex"'[	 ]*$/ {
+	    s,^# serial \([1-9][0-9.]*\).*$,\1,
+	    q
+	}
+	d'
+
+    # Search FILENAME and all the files it m4_includes for a serial number
+    # in the file that AC_DEFUNs MACRO_REGEX.
+    my_serial=
+    func_dirname_and_basename "$my_filename"
+    my_filebase=$func_basename_result
+    for my_file in `func_included_files "$my_filename"`; do
+      if test -z "$my_macro_regex" ||
+         test "$my_filename" = aclocal.m4 ||
+         test "X$my_macro_regex" = "X$my_filebase" ||
+         func_grep '^AC_DEFUN(\['"$my_macro_regex" "$my_file"
+      then
+        my_serial=`$SED -e "$my_sed_serial" "$my_file"`
+	break
+      fi
+    done
+
+    # If the file has no serial number, we assume it's ancient.
+    test -n "$my_serial" || my_serial=0
+
+    $ECHO "$my_serial"
+}
+
+
+# func_serial_max serial1 serial2
+# Compare (possibly multi-part, '.' delimited) serial numbers, and
+# return the largest in $func_serial_max_result.  If they are the
+# same, func_serial_max_result will be empty.
+func_serial_max ()
+{
+    $opt_debug
+    my_serial1="$1"
+    my_serial2="$2"
+
+    my_sed_dot='s/\..*$//g'
+    my_sed_rest='s/^[0-9][1-9]*\.*//'
+    my_sed_digits='s/[^0-9.]//g'
+
+    # Incase they turn out to be the same, we'll set it to empty
+    func_serial_max_result=
+
+    test "X$1$2" = X`$ECHO "$1$2" | $SED "$my_sed_digits"` || {
+      func_error "serial numbers \`$1' or \`$2' contain non-digit chars"
+      return
+    }
+
+    while test -n "$my_serial1$my_serial2"; do
+      my_serial1_part=`$ECHO "$my_serial1" | $SED "$my_sed_dot"`
+      my_serial2_part=`$ECHO "$my_serial2" | $SED "$my_sed_dot"`
+
+      test -z "$my_serial1_part$my_serial2_part" \
+        && break
+
+      test -z "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      test -z "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial1_part" -gt "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial2_part" -gt "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      my_serial1=`$ECHO "$my_serial1" | $SED "$my_sed_rest"`
+      my_serial2=`$ECHO "$my_serial2" | $SED "$my_sed_rest"`
+    done
+}
+
+
+# func_serial_update_check srcfile src_serial destfile dest_serial
+# Unless SRC_SERIAL is newer than DEST_SERIAL set $func_serial_update_check
+# to 'false'.
+func_serial_update_check ()
+{
+    $opt_debug
+    my_srcfile="$1"
+    my_src_serial="$2"
+    my_destfile="$3"
+    my_dest_serial="$4"
+    my_update_p=:
+
+    if test -f "$my_destfile"; then
+      test "X$my_src_serial" = "X0" && {
+        func_warning "no serial number on \`$my_srcfile', not copying."
+	return
+      }
+
+      # Determine whether the destination has an older serial.
+      func_serial_max "$my_src_serial" "$my_dest_serial"
+      test "X$my_src_serial" = "X$func_serial_max_result" || my_update_p=false
+
+      test "X$my_src_serial" = "X$func_serial_max_result" \
+        && func_verbose "\`$my_srcfile' is serial $my_src_serial, greater than $my_dest_serial in \`$my_destfile'"
+
+      if test "X$my_dest_serial" = "X$func_serial_max_result"; then
+        func_verbose "\`$my_srcfile' is serial $my_src_serial, less than $my_dest_serial in \`$my_destfile'"
+	$opt_force || if test -n "$ac_macrodir$ac_ltdldir"; then
+           func_error "\`$my_destfile' is newer: use \`--force' to overwrite"
+        fi
+      fi
+    fi
+
+    func_serial_update_check_result="$my_update_p"
+}
+
+
+# func_aclocal_update_check filename
+# Unless serial number of FILENAME is newer than the matching serial number
+# in aclocal.m4, set $func_aclocal_update_check to 'false'.
+func_aclocal_update_check ()
+{
+    $opt_debug
+    my_srcfile="$aclocaldir/$1"
+    my_destfile="aclocal.m4"
+
+    case $need in
+      libtool.m4)
+	my_src_serial=`func_serial "$my_srcfile" LT_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LT_INIT`
+
+	# Strictly, this libtoolize ought not to have to deal with ancient
+	# serial formats, but we accept them here to be complete:
+	test "X$my_src_serial" = "X0" &&
+	  my_src_serial=`func_serial "$my_srcfile" 'A[CM]_PROG_LIBTOOL'`
+	test "X$my_dest_serial" = "X0" &&
+	  my_dest_serial=`func_serial "$my_destfile" 'A[CM]_PROG_LIBTOOL'`
+	;;
+      ltdl.m4)
+	my_src_serial=`func_serial "$my_srcfile" LTDL_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LTDL_INIT`
+	;;
+      *)
+	my_src_serial=`func_serial "$my_srcfile" "$need"`
+	my_dest_serial=`func_serial "$my_destfile" "$need"`
+	;;
+    esac
+
+    func_serial_update_check \
+      "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+
+    func_aclocal_update_check_result="$func_serial_update_check_result"
+}
+
+
+# func_serial_update filename srcdir destdir [msg_var] [macro_re] [old_macro_re]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer serial number, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.  If given, MACRO_REGEX or
+# OLD_MACRO_REGEX must match any text after "# serial N" in both files.
+func_serial_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_macro_regex="$5"
+    my_old_macro_regex="$6"
+
+    my_serial_update_p=:
+    my_return_status=1
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`func_serial "$my_srcfile" "$my_macro_regex"`
+      my_dest_serial=`func_serial "$my_destfile" "$my_macro_regex"`
+
+      # Strictly, this libtoolize ought not to have to deal with ancient
+      # serial formats, but we accept them here to be complete:
+      test "X$my_src_serial" = "X0" &&
+        my_src_serial=`func_serial "$my_srcfile" "$my_old_macro_regex"`
+
+      test "X$my_dest_serial" = "X0" &&
+        my_dest_serial=`func_serial "$my_destfile" "$my_old_macro_regex"`
+
+      func_serial_update_check \
+        "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_serial_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_serial_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+      my_return_status=$?
+    elif $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      $opt_quiet \
+        || func_echo "\`$my_destfile' is already up to date."
+    fi
+
+    # Do this after the copy for hand maintained `aclocal.m4', incase
+    # it has `m4_include([DESTFILE])', so the copy effectively already
+    # updated `aclocal.m4'.
+    my_included_files=`func_included_files aclocal.m4`
+    case `echo " $my_included_files " | $NL2SP` in
+
+      # Skip included files:
+      *" $my_destfile "*) ;;
+
+      # Otherwise compare to aclocal.m4 serial number (func_serial
+      # returns 0 for older macro serial numbers before we provided
+      # serial tags, so the update message will be correctly given
+      # if aclocal.m4 contains an untagged --i.e older-- macro file):
+      *)
+        if test -f aclocal.m4; then
+          func_serial_max \
+              "$my_src_serial" `func_serial aclocal.m4 "$my_macro_regex"`
+          if test "X$my_src_serial" = "X$func_serial_max_result"; then
+              func_echo_once "$my_msg_var"
+	      func_echo "You should add the contents of \`$my_destfile' to \`aclocal.m4'."
+          fi
+        fi
+        ;;
+    esac
+    return $my_return_status
+}
+
+
+# func_keyword_update filename srcdir destdir sed_script [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision according to the serial number extracted by
+# SED_SCRIPT, or DESTFILE does not yet exist, or the user specified
+# `--force' at the command line.
+func_keyword_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_sed_script="$4"
+    my_msg_var="$5"
+
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    my_keyword_update_p=:
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`$SED -e "$my_sed_script" "$my_srcfile"`
+      test -z "$my_src_serial" && {
+        func_warning "no serial number in \`$my_srcfile', not copying."
+	return
+      }
+
+      my_dest_serial=`$SED -e "$my_sed_script" "$my_destfile"`
+      test -n "$my_dest_serial" || my_dest_serial=0
+
+      func_serial_update_check \
+         "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_keyword_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_keyword_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+    elif $opt_verbose || $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      func_echo_once "$my_msg_var"
+      func_echo "\`$my_destfile' is already up to date."
+    fi
+}
+
+
+# func_ltmain_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_ltmain_update ()
+{
+    $opt_debug
+    my_sed_ltmain='
+	/^package_revision='\''*[0-9][1-9.]*'\''*/ {
+	    s,^package_revision='\''*\([0-9.]*\)'\''*[	 ]*$,\1,
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_ltmain" "$4"
+
+    return $my_return_status
+}
+
+
+# func_config_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_config_update ()
+{
+    $opt_debug
+    my_sed_config='
+	/^timestamp='\''*[0-9][1-9-]*'\''*/ {
+	    s,^timestamp='\''*\([0-9-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_config" "$4"
+
+    return $my_return_status
+}
+
+
+# func_install_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_install_update ()
+{
+    $opt_debug
+    my_sed_install='
+	/^scriptversion='\''*[0-9][1-9.-]*'\''*/ {
+	    s,[#;].*,,
+	    s,^scriptversion='\''*\([0-9.-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_install" "$4"
+
+    return $my_return_status
+}
+
+
+# func_massage_aclocal_DATA [glob_exclude]
+# @aclocal_DATA\@ is substituted as per its value in Makefile.am;
+# this function massages it into a suitable format for func_copy_some_files.
+func_massage_aclocal_DATA ()
+{
+    $opt_debug
+    pkgmacro_files=     # GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgmacro_files from the value used in Makefile.am.
+    for my_filename in m4/argz.m4 m4/libtool.m4 m4/ltdl.m4 m4/ltoptions.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4; do
+      func_dirname_and_basename "$my_filename"
+      my_filename=$func_basename_result
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      pkgmacro_files="$pkgmacro_files:$my_filename"
+    done
+
+    # strip spurious leading `:'
+    pkgmacro_files=`$ECHO "$pkgmacro_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgmacro_subproject
+# Unless --quiet was passed, display a message. Then copy pkgmacro_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgmacro_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$subproject_macrodir/$file" && func_verbose "rm -f '$subproject_macrodir/$file'"
+      rm -f "$subproject_macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test "x$macrodir" != "x$subproject_macrodir"; then
+      pkgmacro_header="putting macros in \`$subproject_macrodir'."
+    elif test -n "$subproject_macrodir"; then
+      pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$subproject_macrodir'."
+    fi
+
+    func_copy_some_files "argz.m4:libtool.m4:ltdl.m4:$pkgmacro_files" \
+      "$aclocaldir" "$subproject_macrodir" pkgmacro_header
+}
+
+
+# func_install_pkgmacro_parent
+# Unless --quiet was passed, or AC_CONFIG_MACRO_DIR was not seen, display
+# a message.  Then update appropriate macros if newer ones are available
+# from the libtool installation tree.
+func_install_pkgmacro_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$macrodir/$file" && func_verbose "rm -f '$macrodir/$file'"
+      rm -f "$macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test -n "$ac_macrodir"; then
+      my_pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$ac_macrodir'."
+    elif test -n "$macrodir"; then
+      my_pkgmacro_header="putting macros in \`$macrodir'."
+    fi
+
+    if $opt_ltdl; then
+      func_serial_update argz.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header argz.m4
+    else
+      func_verbose "Not copying \`$macrodir/argz.m4', libltdl not used."
+    fi
+
+    func_serial_update  libtool.m4 "$aclocaldir" "$macrodir" \
+      my_pkgmacro_header LT_INIT 'A[CM]_PROG_LIBTOOL'
+
+    if $opt_ltdl; then
+      func_serial_update ltdl.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header 'LTDL_INIT'
+    else
+      func_verbose "Not copying \`$macrodir/ltdl.m4', libltdl not used."
+    fi
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for file in $pkgmacro_files; do
+      IFS="$my_save_IFS"
+      func_serial_update "$file" "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header "$file"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_install_pkgmacro_files
+# Install copies of the libtool and libltdl m4 macros into this package.
+func_install_pkgmacro_files ()
+{
+    $opt_debug
+
+    # argz.m4, libtool.m4 and ltdl.m4 are handled specially:
+    func_massage_aclocal_DATA 'argz.m4|libtool.m4|ltdl.m4'
+
+  # 1. Parent has separate macrodir to subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test -n "$macrodir" && test "x$macrodir" != "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_parent
+      func_install_pkgmacro_subproject
+
+  # 2. Parent shares macrodir with subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$macrodir" = "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_subproject
+
+  # 3. Not a subproject, but macrodir was specified in parent:
+    elif test -n "$macrodir"; then
+      func_install_pkgmacro_parent
+
+  # 4. AC_CONFIG_MACRO_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_MACRO_DIR not defined, not copying libtool macros."
+    fi
+}
+
+
+# func_massage_pkgltdl_files [glob_exclude]
+# @pkgltdl_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgltdl_files ()
+{
+    $opt_debug
+    pkgltdl_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgltdl_files from the value used in Makefile.am
+    for my_filename in libltdl/COPYING.LIB libltdl/README libltdl/Makefile.inc libltdl/Makefile.am libltdl/configure.ac libltdl/aclocal.m4 libltdl/Makefile.in libltdl/config-h.in libltdl/configure libltdl/argz_.h libltdl/argz.c libltdl/loaders/dld_link.c libltdl/loaders/dlopen.c libltdl/loaders/dyld.c libltdl/loaders/load_add_on.c libltdl/loaders/loadlibrary.c libltdl/loaders/shl_load.c libltdl/lt__dirent.c libltdl/lt__strl.c libltdl/libltdl/lt__alloc.h libltdl/libltdl/lt__dirent.h libltdl/libltdl/lt__glibc.h libltdl/libltdl/lt__private.h libltdl/libltdl/lt__strl.h libltdl/libltdl/lt_dlloader.h libltdl/libltdl/lt_error.h libltdl/libltdl/lt_system.h libltdl/libltdl/slist.h libltdl/loaders/preopen.c libltdl/lt__alloc.c libltdl/lt_dlloader.c libltdl/lt_error.c libltdl/ltdl.c libltdl/ltdl.h libltdl/slist.c; do
+
+      # Strip surplus leading 'libltdl/':
+      my_filename=`expr "X$my_filename" : 'Xlibltdl/\(.*\)'`
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgltdl_files: in
+        *:$my_filename:*) ;;
+	*) pkgltdl_files="$pkgltdl_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgltdl_files=`$ECHO "$pkgltdl_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgltdl_files
+# Install copies of the libltdl files into this package.  Any auxiliary
+# or m4 macro files needed in the libltdl tree will also be copied by
+# func_install_pkgconfig_files and func_install_pkgmacro_files resp.
+func_install_pkgltdl_files ()
+{
+    $opt_debug
+    $opt_ltdl || return
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgltdl_files; do
+      test -f "$ltdldir/$file" && func_verbose "rm -f '$ltdldir/$file'"
+      rm -f "$ltdldir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified `--ltdl'.
+    $opt_quiet || if test -n "$ac_ltdldir"; then
+      pkgltdl_header="putting libltdl files in LT_CONFIG_LTDL_DIR, \`$ac_ltdldir'."
+    elif test -n "$ltdldir"; then
+      pkgltdl_header="putting libltdl files in \`$ltdldir'."
+    fi
+
+    # These files are handled specially, depending on ltdl_mode:
+    if test "x$ltdl_mode" = "xsubproject"; then
+      func_massage_pkgltdl_files 'Makefile.inc'
+    else
+      func_massage_pkgltdl_files 'Makefile.am|Makefile.in*|aclocal.m4|config*'
+    fi
+
+    func_copy_some_files "$pkgltdl_files" \
+      "$pkgltdldir/libltdl" "$ltdldir" pkgltdl_header
+
+    # For recursive ltdl modes, copy a suitable Makefile.{am,inc}:
+    case $ltdl_mode in
+      recursive)
+        func_fixup_Makefile "Makefile.am" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+      nonrecursive)
+        func_fixup_Makefile "Makefile.inc" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+    esac
+}
+
+
+# func_massage_pkgconfig_files [glob_exclude]
+# @pkgconfig_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgconfig_files ()
+{
+    $opt_debug
+    pkgconfig_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgconfig_files from the value used in Makefile.am
+    for my_filename in config/compile config/config.guess config/config.sub config/depcomp config/install-sh config/missing config/ltmain.sh; do
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgconfig_files: in
+        *:$my_filename:*) ;;
+	*) pkgconfig_files="$pkgconfig_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgconfig_files=`$ECHO "$pkgconfig_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgconfig_subproject
+# Unless --quiet was passed, display a message. Then copy pkgconfig_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgconfig_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$subproject_auxdir/$file" && func_verbose "rm -f '$subproject_auxdir/$file'"
+      rm -f "$subproject_auxdir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified an auxdir.
+    $opt_quiet || if test "x$ac_auxdir" = "x$subproject_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$subproject_auxdir'."
+    elif test -n "$auxdir"; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    func_copy_some_files "$pkgconfig_files" \
+      "$pkgdatadir" "$ltdldir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_parent
+# Unless --quiet was passed, or AC_CONFIG_AUX_DIR was not seen, display a
+# message.  Then update appropriate auxiliary files if newer ones are
+# available from the libtool installation tree.
+func_install_pkgconfig_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$auxdir/$file" && func_verbose "rm -f '$auxdir/$file'"
+      rm -f "$auxdir/$file"
+    done
+
+    if test -n "$ac_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$ac_auxdir'."
+    elif test -n "$auxdir" || test "x$ltdldir" = "x."; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    if $opt_install; then
+      func_config_update config.guess \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_config_update config.sub \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_install_update install-sh \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+    fi
+    func_ltmain_update ltmain.sh \
+      "$pkgdatadir/config" "$auxdir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_files
+# Install copies of the auxiliary files into this package according to
+# the whether libltdl is included as a subproject, and whether the parent
+# shares the AC_CONFIG_AUX_DIR setting.
+func_install_pkgconfig_files ()
+{
+    $opt_debug
+    func_massage_pkgconfig_files
+
+  # 1. Parent shares auxdir with subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test "x$ac_auxdir" = "x$subproject_auxdir"
+    then
+      func_install_pkgconfig_subproject
+
+  # 2. Parent has separate auxdir to subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$auxdir" != "x$subproject_auxdir" is implied
+    then
+      if $seen_autoconf; then
+	func_install_pkgconfig_parent
+      fi
+      func_install_pkgconfig_subproject
+
+  # 3. Not subproject, but AC_CONFIG_AUX_DIR was used in parent:
+    elif test -n "$ac_auxdir" || test "x$auxdir" = "x."; then
+      func_install_pkgconfig_parent
+
+  # 4. AC_CONFIG_AUX_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_AUX_DIR not defined, not copying libtool auxiliary files."
+    fi
+}
+
+
+# func_nonemptydir_p dirvar
+# DIRVAR is the name of a variable to evaluate.  Unless DIRVAR names
+# a directory that exists and is non-empty abort with a diagnostic.
+func_nonemptydir_p ()
+{
+    $opt_debug
+    my_dirvar="$1"
+    my_dir=`eval echo "\\\$$my_dirvar"`
+
+    # Is it a directory at all?
+    test -d "$my_dir" \
+      || func_fatal_error "\$$my_dirvar is not a directory: \`$my_dir'"
+
+    # check that the directories contents can be ls'ed
+    test -n "`{ cd $my_dir && ls; } 2>/dev/null`" \
+        || func_fatal_error "can not list files: \`$my_dir'"
+}
+
+
+# func_check_macros
+# Sanity check macros from aclocal.m4 against installed versions.
+func_check_macros ()
+{
+    $opt_debug
+    $opt_quiet && return
+    $seen_autoconf || return
+
+    ac_config_macro_dir_advised=false
+
+    if test -n "$ac_macrodir$ltdldir" && test -z "$macrodir"; then
+      my_ac_config_macro_srcdir="$aclocaldir"
+      if $opt_ltdl && test "$macrodir" != "$subproject_macrodir"; then
+	my_ac_config_macro_srcdir="$subproject_macrodir"
+      fi
+
+      my_needed="libtool.m4 ltoptions.m4 ltversion.m4 ltsugar.m4 lt~obsolete.m4"
+      $opt_ltdl && my_needed="$my_needed argz.m4 ltdl.m4"
+
+      if test -f "aclocal.m4"; then
+	for need in $my_needed; do
+	  func_aclocal_update_check $need
+	  $func_aclocal_update_check_result && my_missing="$my_missing $need"
+	done
+      else
+        my_missing="$my_needed"
+      fi
+
+      if test -n "$my_missing"; then
+        func_echo "You should add the contents of the following files to \`aclocal.m4':"
+        for need in $my_missing; do
+	  func_echo "  \`$my_ac_config_macro_srcdir/$need'"
+        done
+
+        if test "$my_ac_config_macro_srcdir" != "$aclocaldir"; then
+          func_echo "or else add \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' to $configure_ac."
+	  ac_config_macro_dir_advised=:
+        fi
+      fi
+    fi
+
+    ## ---------------------------------------------------------- ##
+    ## Since we return early here when --no-warn was given:       ##
+    ## DO NOT PUT ANYTHING BUT UPGRADE ADVICE MESSAGES BELOW HERE ##
+    ## ---------------------------------------------------------- ##
+
+    $opt_warning || return
+
+    $seen_libtool ||
+      func_echo "Remember to add \`LT_INIT' to $configure_ac."
+
+    # Suggest using LTDL_INIT if appropriate:
+    $opt_ltdl && if test x$seen_ltdl != x:; then
+      case $ltdl_mode in
+	subproject) ltdl_init_args=""               ;;
+	*)          ltdl_init_args="([$ltdl_mode])" ;;
+      esac
+      func_echo "Remember to add \`LTDL_INIT$ltdl_init_args' to $configure_ac."
+    fi
+
+    if $opt_ltdl; then
+      # Remind the user to call LT_CONFIG_LTDL_DIR:
+      test -n "$ac_ltdldir" ||
+        func_echo "Remember to add \`LT_CONFIG_LTDL_DIR([$ltdldir])' to \`$configure_ac'."
+
+      # For subproject mode, offer some suggestions for avoiding duplicate
+      # files in a project that uses libltdl:
+      if test "x$ltdl_mode" = "xsubproject"; then
+        test "$subproject_auxdir" = "$auxdir" ||
+          func_echo "Consider using \`AC_CONFIG_AUX_DIR([$subproject_auxdir])' in $configure_ac."
+        $ac_config_macro_dir_advised || test "$subproject_macrodir" = "$macrodir" ||
+          func_echo "Consider using \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' in $configure_ac."
+	ac_config_macro_dir_advised=:
+      fi
+    fi
+
+    # Suggest modern idioms for storing autoconf macros:
+    $ac_config_macro_dir_advised || if test -z "$ac_macrodir" || test x"$macrodir" = x.; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([m4])' to $configure_ac and"
+      func_echo "rerunning $progname, to keep the correct libtool macros in-tree."
+      ac_config_macro_dir_advised=:
+
+    elif test -z "$ac_macrodir$ltdldir"; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([$macrodir])' to $configure_ac,"
+      func_echo "and rerunning $progname and aclocal."
+      ac_config_macro_dir_advised=:
+    fi
+
+    if test -z "$am_macrodir$macrodir"; then
+      func_echo "Consider adding \`-I m4' to ACLOCAL_AMFLAGS in Makefile.am."
+
+    elif test -z "$am_macrodir"; then
+      if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" && test "$subproject_macrodir" != "$macrodir"; then
+	func_echo "Consider adding \`-I $subproject_macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      else
+        func_echo "Consider adding \`-I $macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      fi
+    fi
+
+    # Don't trace for this, we're just checking the user didn't invoke it
+    # directly from configure.ac.
+    $SED 's,dnl .*$,,; s,# .*$,,' "$configure_ac" | grep AC_PROG_RANLIB >/dev/null &&
+      func_echo "\`AC_PROG_RANLIB' is rendered obsolete by \`LT_INIT'"
+
+    # FIXME: Ensure ltmain.sh, libtool.m4 and ltdl.m4 are from the same release
+}
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+{
+  # Lists of all files libtoolize has ever installed.  These are removed
+  # before installing the latest files when --force was passed to help
+  # ensure a clean upgrade.
+  # Do not remove config.guess nor config.sub, we don't install them
+  # without --install, and the project may not be using Automake.
+  all_pkgconfig_files="ltmain.sh"
+  all_pkgmacro_files="argz.m4 libtool.m4 ltdl.m4 ltoptions.m4 ltsugar.m4 ltversion.in ltversion.m4 lt~obsolete.m4"
+  all_pkgltdl_files="COPYING.LIB Makefile Makefile.in Makefile.inc Makefile.am README acinclude.m4 aclocal.m4 argz_.h argz.c config.h.in config-h.in configure configure.ac configure.in libltdl/lt__alloc.h libltdl/lt__dirent.h libltdl/lt__glibc.h libltdl/lt__private.h libltdl/lt__strl.h libltdl/lt_dlloader.h libltdl/lt_error.h libltdl/lt_system.h libltdl/slist.h loaders/dld_link.c loaders/dlopen.c loaders/dyld.c loaders/load_add_on.c loaders/loadlibrary.c loaders/preopen.c loaders/shl_load.c lt__alloc.c lt__dirent.c lt__strl.c lt_dlloader.c lt_error.c ltdl.c ltdl.h slist.c"
+
+  # Locations for important files:
+  prefix=/
+  datadir=//share
+  pkgdatadir=//share/libtool
+  pkgltdldir=//share/libtool
+  aclocaldir=//share/aclocal
+  auxdir=
+  macrodir=
+  configure_ac=configure.in
+
+  seen_autoconf=false
+  seen_libtool=false
+  seen_ltdl=false
+
+  # test EBCDIC or ASCII
+  case `echo X|tr X '\101'` in
+   A) # ASCII based system
+      # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+    SP2NL='tr \040 \012'
+    NL2SP='tr \015\012 \040\040'
+    ;;
+   *) # EBCDIC based system
+    SP2NL='tr \100 \n'
+    NL2SP='tr \r\n \100\100'
+    ;;
+  esac
+
+  # Allow the user to override the master libtoolize repository:
+  if test -n "$_lt_pkgdatadir"; then
+    pkgltdldir="$_lt_pkgdatadir"
+    pkgdatadir="$_lt_pkgdatadir/libltdl"
+    aclocaldir="$_lt_pkgdatadir/libltdl/m4"
+  fi
+  func_nonemptydir_p pkgltdldir
+  func_nonemptydir_p pkgdatadir
+  func_nonemptydir_p aclocaldir
+
+  func_scan_files
+
+  case $ltdldir in
+  .) ltdlprefix= ;;
+  *) ltdlprefix=$ltdldir/ ;;
+  esac
+  subproject_auxdir=${ltdlprefix}config
+  subproject_macrodir=${ltdlprefix}m4
+
+  # :::BE CAREFUL HERE:::
+  # func_check_macros needs to check whether --ltdl was specified when
+  # LTDL_INIT was not seen, so we can't just use one variable for both
+  # conditions, or that check will be impossible.   No need to clutter the
+  # rest of the code with '$opt_ltdl || $seen_ltdl' though, because we CAN
+  # safely set opt_ltdl to true if LTDL_INIT was seen:
+  $seen_ltdl && opt_ltdl=:
+
+  func_install_pkgconfig_files
+  func_install_pkgmacro_files
+  func_install_pkgltdl_files
+
+  func_check_macros
+}
+
+exit $exit_status
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/m4
new file mode 100755
index 0000000..90b6914
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/m4
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/make b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/make
new file mode 100755
index 0000000..8bbe10a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/bin/make
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.7.dylib b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.7.dylib
new file mode 100755
index 0000000..1f5d134
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.7.dylib
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.a b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.a
new file mode 100644
index 0000000..b2c4dac
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.a
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.dylib b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.dylib
new file mode 120000
index 0000000..7836ab4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.dylib
@@ -0,0 +1 @@
+libltdl.7.dylib
\ No newline at end of file
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.la b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.la
new file mode 100755
index 0000000..8071902
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-apple-darwin/lib/libltdl.la
@@ -0,0 +1,41 @@
+# libltdl.la - a libtool library file
+# Generated by libtool (GNU libtool) 2.4.2
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='libltdl.7.dylib'
+
+# Names of this library.
+library_names='libltdl.7.dylib libltdl.dylib'
+
+# The name of the static archive.
+old_library='libltdl.a'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags=' '
+
+# Libraries that this one depends upon.
+dependency_libs=''
+
+# Names of additional weak libraries provided by this library
+weak_library_names=''
+
+# Version information for libltdl.
+current=10
+age=3
+revision=0
+
+# Is this an already installed library?
+installed=yes
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=no
+
+# Files to dlopen/dlpreopen
+dlopen=''
+dlpreopen=''
+
+# Directory that this library needs to be installed in:
+libdir='/x86_64-apple-darwin/lib'
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal-1.14
new file mode 100755
index 0000000..cf7f3ed
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/aclocal-1.14
@@ -0,0 +1,1235 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/aclocal.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# aclocal - create aclocal.m4 by scanning configure.ac
+
+# Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+BEGIN
+{
+  @Aclocal::perl_libdirs = ('//share/automake-1.14')
+    unless @Aclocal::perl_libdirs;
+  unshift @INC, @Aclocal::perl_libdirs;
+}
+
+use strict;
+
+use Automake::Config;
+use Automake::General;
+use Automake::Configure_ac;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::XFile;
+use Automake::FileUtils;
+use File::Basename;
+use File::Path ();
+
+# Some globals.
+
+# Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+# FIXME: To be removed in Automake 2.0, once we can assume autoconf
+#        2.70 or later.
+# FIXME: keep in sync with 'internal/ac-config-macro-dirs.m4'.
+my $ac_config_macro_dirs_fallback =
+  'm4_ifndef([AC_CONFIG_MACRO_DIRS], [' .
+    'm4_defun([_AM_CONFIG_MACRO_DIRS], [])' .
+    'm4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])' .
+  '])';
+
+# We do not operate in threaded mode.
+$perl_threads = 0;
+
+# Include paths for searching macros.  We search macros in this order:
+# user-supplied directories first, then the directory containing the
+# automake macros, and finally the system-wide directories for
+# third-party macros.
+# @user_includes can be augmented with -I or AC_CONFIG_MACRO_DIRS.
+# @automake_includes can be reset with the '--automake-acdir' option.
+# @system_includes can be augmented with the 'dirlist' file or the
+# ACLOCAL_PATH environment variable, and reset with the '--system-acdir'
+# option.
+my @user_includes = ();
+my @automake_includes = ("//share/aclocal-$APIVERSION");
+my @system_includes = ('//share/aclocal');
+
+# Whether we should copy M4 file in $user_includes[0].
+my $install = 0;
+
+# --diff
+my @diff_command;
+
+# --dry-run
+my $dry_run = 0;
+
+# configure.ac or configure.in.
+my $configure_ac;
+
+# Output file name.
+my $output_file = 'aclocal.m4';
+
+# Option --force.
+my $force_output = 0;
+
+# Modification time of the youngest dependency.
+my $greatest_mtime = 0;
+
+# Which macros have been seen.
+my %macro_seen = ();
+
+# Remember the order into which we scanned the files.
+# It's important to output the contents of aclocal.m4 in the opposite order.
+# (Definitions in first files we have scanned should override those from
+# later files.  So they must appear last in the output.)
+my @file_order = ();
+
+# Map macro names to file names.
+my %map = ();
+
+# Ditto, but records the last definition of each macro as returned by --trace.
+my %map_traced_defs = ();
+
+# Map basenames to macro names.
+my %invmap = ();
+
+# Map file names to file contents.
+my %file_contents = ();
+
+# Map file names to file types.
+my %file_type = ();
+use constant FT_USER => 1;
+use constant FT_AUTOMAKE => 2;
+use constant FT_SYSTEM => 3;
+
+# Map file names to included files (transitively closed).
+my %file_includes = ();
+
+# Files which have already been added.
+my %file_added = ();
+
+# Files that have already been scanned.
+my %scanned_configure_dep = ();
+
+# Serial numbers, for files that have one.
+# The key is the basename of the file,
+# the value is the serial number represented as a list.
+my %serial = ();
+
+# Matches a macro definition.
+#   AC_DEFUN([macroname], ...)
+# or
+#   AC_DEFUN(macroname, ...)
+# When macroname is '['-quoted , we accept any character in the name,
+# except ']'.  Otherwise macroname stops on the first ']', ',', ')',
+# or '\n' encountered.
+my $ac_defun_rx =
+  "(?:AU_ALIAS|A[CU]_DEFUN|AC_DEFUN_ONCE)\\((?:\\[([^]]+)\\]|([^],)\n]+))";
+
+# Matches an AC_REQUIRE line.
+my $ac_require_rx = "AC_REQUIRE\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Matches an m4_include line.
+my $m4_include_rx = "(m4_|m4_s|s)include\\((?:\\[([^]]+)\\]|([^],)\n]+))\\)";
+
+# Match a serial number.
+my $serial_line_rx = '^#\s*serial\s+(\S*)';
+my $serial_number_rx = '^\d+(?:\.\d+)*$';
+
+# Autoconf version.  This variable is set by 'trace_used_macros'.
+my $ac_version;
+
+# User directory containing extra m4 files for macros definition,
+# as extracted from calls to the macro AC_CONFIG_MACRO_DIRS.
+# This variable is updated by 'trace_used_macros'.
+my @ac_config_macro_dirs;
+
+# If set, names a temporary file that must be erased on abnormal exit.
+my $erase_me;
+
+# Constants for the $ERR_LEVEL parameter of the 'scan_m4_dirs' function.
+use constant SCAN_M4_DIRS_SILENT => 0;
+use constant SCAN_M4_DIRS_WARN => 1;
+use constant SCAN_M4_DIRS_ERROR => 2;
+
+################################################################
+
+# Prototypes for all subroutines.
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub add_file ($);
+sub add_macro ($);
+sub check_acinclude ();
+sub install_file ($$);
+sub list_compare (\@\@);
+sub parse_ACLOCAL_PATH ();
+sub parse_arguments ();
+sub reset_maps ();
+sub scan_configure ();
+sub scan_configure_dep ($);
+sub scan_file ($$$);
+sub scan_m4_dirs ($$@);
+sub scan_m4_files ();
+sub strip_redundant_includes (%);
+sub trace_used_macros ();
+sub unlink_tmp (;$);
+sub usage ($);
+sub version ();
+sub write_aclocal ($@);
+sub xmkdir_p ($);
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+################################################################
+
+# Erase temporary file ERASE_ME.  Handle signals.
+sub unlink_tmp (;$)
+{
+  my ($sig) = @_;
+
+  if ($sig)
+    {
+      verb "caught SIG$sig, bailing out";
+    }
+  if (defined $erase_me && -e $erase_me && !unlink ($erase_me))
+    {
+      fatal "could not remove '$erase_me': $!";
+    }
+  undef $erase_me;
+
+  # reraise default handler.
+  if ($sig)
+    {
+      $SIG{$sig} = 'DEFAULT';
+      kill $sig => $$;
+    }
+}
+
+$SIG{'INT'} = $SIG{'TERM'} = $SIG{'QUIT'} = $SIG{'HUP'} = 'unlink_tmp';
+END { unlink_tmp }
+
+sub xmkdir_p ($)
+{
+  my $dir = shift;
+  local $@ = undef;
+  return
+    if -d $dir or eval { File::Path::mkpath $dir };
+  chomp $@;
+  $@ =~ s/\s+at\s.*\bline\s\d+.*$//;
+  fatal "could not create directory '$dir': $@";
+}
+
+# Check macros in acinclude.m4.  If one is not used, warn.
+sub check_acinclude ()
+{
+  foreach my $key (keys %map)
+    {
+      # FIXME: should print line number of acinclude.m4.
+      msg ('syntax', "macro '$key' defined in acinclude.m4 but never used")
+	if $map{$key} eq 'acinclude.m4' && ! exists $macro_seen{$key};
+    }
+}
+
+sub reset_maps ()
+{
+  $greatest_mtime = 0;
+  %macro_seen = ();
+  @file_order = ();
+  %map = ();
+  %map_traced_defs = ();
+  %file_contents = ();
+  %file_type = ();
+  %file_includes = ();
+  %file_added = ();
+  %scanned_configure_dep = ();
+  %invmap = ();
+  %serial = ();
+  undef &search;
+}
+
+# install_file ($SRC, $DESTDIR)
+sub install_file ($$)
+{
+  my ($src, $destdir) = @_;
+  my $dest = $destdir . "/" . basename ($src);
+  my $diff_dest;
+
+  verb "installing $src to $dest";
+
+  if ($force_output
+      || !exists $file_contents{$dest}
+      || $file_contents{$src} ne $file_contents{$dest})
+    {
+      if (-e $dest)
+	{
+	  msg 'note', "overwriting '$dest' with '$src'";
+	  $diff_dest = $dest;
+	}
+      else
+	{
+	  msg 'note', "installing '$dest' from '$src'";
+	}
+
+      if (@diff_command)
+	{
+	  if (! defined $diff_dest)
+	    {
+	      # $dest does not exist.  We create an empty one just to
+	      # run diff, and we erase it afterward.  Using the real
+	      # the destination file (rather than a temporary file) is
+	      # good when diff is run with options that display the
+	      # file name.
+	      #
+	      # If creating $dest fails, fall back to /dev/null.  At
+	      # least one diff implementation (Tru64's) cannot deal
+	      # with /dev/null.  However working around this is not
+	      # worth the trouble since nobody run aclocal on a
+	      # read-only tree anyway.
+	      $erase_me = $dest;
+	      my $f = new IO::File "> $dest";
+	      if (! defined $f)
+		{
+		  undef $erase_me;
+		  $diff_dest = '/dev/null';
+		}
+	      else
+		{
+		  $diff_dest = $dest;
+		  $f->close;
+		}
+	    }
+	  my @cmd = (@diff_command, $diff_dest, $src);
+	  $! = 0;
+	  verb "running: @cmd";
+	  my $res = system (@cmd);
+	  Automake::FileUtils::handle_exec_errors "@cmd", 1
+	    if $res;
+	  unlink_tmp;
+	}
+      elsif (!$dry_run)
+	{
+          xmkdir_p ($destdir);
+	  xsystem ('cp', $src, $dest);
+	}
+    }
+}
+
+# Compare two lists of numbers.
+sub list_compare (\@\@)
+{
+  my @l = @{$_[0]};
+  my @r = @{$_[1]};
+  while (1)
+    {
+      if (0 == @l)
+	{
+	  return (0 == @r) ? 0 : -1;
+	}
+      elsif (0 == @r)
+	{
+	  return 1;
+	}
+      elsif ($l[0] < $r[0])
+	{
+	  return -1;
+	}
+      elsif ($l[0] > $r[0])
+	{
+	  return 1;
+	}
+      shift @l;
+      shift @r;
+    }
+}
+
+################################################################
+
+# scan_m4_dirs($TYPE, $ERR_LEVEL, @DIRS)
+# -----------------------------------------------
+# Scan all M4 files installed in @DIRS for new macro definitions.
+# Register each file as of type $TYPE (one of the FT_* constants).
+# If a directory in @DIRS cannot be read:
+#  - fail hard                if $ERR_LEVEL == SCAN_M4_DIRS_ERROR
+#  - just print a warning     if $ERR_LEVEL == SCAN_M4_DIRS_WA
+#  - continue silently        if $ERR_LEVEL == SCAN_M4_DIRS_SILENT
+sub scan_m4_dirs ($$@)
+{
+  my ($type, $err_level, @dirlist) = @_;
+
+  foreach my $m4dir (@dirlist)
+    {
+      if (! opendir (DIR, $m4dir))
+	{
+	  # TODO: maybe avoid complaining only if errno == ENONENT?
+          my $message = "couldn't open directory '$m4dir': $!";
+
+          if ($err_level == SCAN_M4_DIRS_ERROR)
+            {
+              fatal $message;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_WARN)
+            {
+              msg ('unsupported', $message);
+              next;
+            }
+          elsif ($err_level == SCAN_M4_DIRS_SILENT)
+            {
+              next; # Silently ignore.
+            }
+          else
+            {
+               prog_error "invalid \$err_level value '$err_level'";
+            }
+	}
+
+      # We reverse the directory contents so that foo2.m4 gets
+      # used in preference to foo1.m4.
+      foreach my $file (reverse sort grep (! /^\./, readdir (DIR)))
+	{
+	  # Only examine .m4 files.
+	  next unless $file =~ /\.m4$/;
+
+	  # Skip some files when running out of srcdir.
+	  next if $file eq 'aclocal.m4';
+
+	  my $fullfile = File::Spec->canonpath ("$m4dir/$file");
+	  scan_file ($type, $fullfile, 'aclocal');
+	}
+      closedir (DIR);
+    }
+}
+
+# Scan all the installed m4 files and construct a map.
+sub scan_m4_files ()
+{
+  # First, scan configure.ac.  It may contain macro definitions,
+  # or may include other files that define macros.
+  scan_file (FT_USER, $configure_ac, 'aclocal');
+
+  # Then, scan acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      scan_file (FT_USER, 'acinclude.m4', 'aclocal');
+    }
+
+  # Finally, scan all files in our search paths.
+
+  if (@user_includes)
+    {
+      # Don't explore the same directory multiple times.  This is here not
+      # only for speedup purposes.  We need this when the user has e.g.
+      # specified 'ACLOCAL_AMFLAGS = -I m4' and has also set
+      # AC_CONFIG_MACRO_DIR[S]([m4]) in configure.ac.  This makes the 'm4'
+      # directory to occur twice here and fail on the second call to
+      # scan_m4_dirs([m4]) when the 'm4' directory doesn't exist.
+      # TODO: Shouldn't there be rather a check in scan_m4_dirs for
+      #       @user_includes[0]?
+      @user_includes = uniq @user_includes;
+
+      # Don't complain if the first user directory doesn't exist, in case
+      # we need to create it later (can happen if '--install' was given).
+      scan_m4_dirs (FT_USER,
+                    $install ? SCAN_M4_DIRS_SILENT : SCAN_M4_DIRS_WARN,
+                    $user_includes[0]);
+      scan_m4_dirs (FT_USER,
+                    SCAN_M4_DIRS_ERROR,
+		    @user_includes[1..$#user_includes]);
+    }
+  scan_m4_dirs (FT_AUTOMAKE, SCAN_M4_DIRS_ERROR, @automake_includes);
+  scan_m4_dirs (FT_SYSTEM, SCAN_M4_DIRS_ERROR, @system_includes);
+
+  # Construct a new function that does the searching.  We use a
+  # function (instead of just evaluating $search in the loop) so that
+  # "die" is correctly and easily propagated if run.
+  my $search = "sub search {\nmy \$found = 0;\n";
+  foreach my $key (reverse sort keys %map)
+    {
+      $search .= ('if (/\b\Q' . $key . '\E(?!\w)/) { add_macro ("' . $key
+		  . '"); $found = 1; }' . "\n");
+    }
+  $search .= "return \$found;\n};\n";
+  eval $search;
+  prog_error "$@\n search is $search" if $@;
+}
+
+################################################################
+
+# Add a macro to the output.
+sub add_macro ($)
+{
+  my ($macro) = @_;
+
+  # Ignore unknown required macros.  Either they are not really
+  # needed (e.g., a conditional AC_REQUIRE), in which case aclocal
+  # should be quiet, or they are needed and Autoconf itself will
+  # complain when we trace for macro usage later.
+  return unless defined $map{$macro};
+
+  verb "saw macro $macro";
+  $macro_seen{$macro} = 1;
+  add_file ($map{$macro});
+}
+
+# scan_configure_dep ($file)
+# --------------------------
+# Scan a configure dependency (configure.ac, or separate m4 files)
+# for uses of known macros and AC_REQUIREs of possibly unknown macros.
+# Recursively scan m4_included files.
+sub scan_configure_dep ($)
+{
+  my ($file) = @_;
+  # Do not scan a file twice.
+  return ()
+    if exists $scanned_configure_dep{$file};
+  $scanned_configure_dep{$file} = 1;
+
+  my $mtime = mtime $file;
+  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+
+  my $contents = exists $file_contents{$file} ?
+    $file_contents{$file} : contents $file;
+
+  my $line = 0;
+  my @rlist = ();
+  my @ilist = ();
+  foreach (split ("\n", $contents))
+    {
+      ++$line;
+      # Remove comments from current line.
+      s/\bdnl\b.*$//;
+      s/\#.*$//;
+      # Avoid running all the following regexes on white lines.
+      next if /^\s*$/;
+
+      while (/$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push @ilist, $ifile;
+	}
+
+      while (/$ac_require_rx/go)
+	{
+	  push (@rlist, $1 || $2);
+	}
+
+      # The search function is constructed dynamically by
+      # scan_m4_files.  The last parenthetical match makes sure we
+      # don't match things that look like macro assignments or
+      # AC_SUBSTs.
+      if (! &search && /(^|\s+)(AM_[A-Z0-9_]+)($|[^\]\)=A-Z0-9_])/)
+	{
+	  # Macro not found, but AM_ prefix found.
+	  # Make this just a warning, because we do not know whether
+	  # the macro is actually used (it could be called conditionally).
+	  msg ('unsupported', "$file:$line",
+	       "macro '$2' not found in library");
+	}
+    }
+
+  add_macro ($_) foreach (@rlist);
+  scan_configure_dep ($_) foreach @ilist;
+}
+
+# add_file ($FILE)
+# ----------------
+# Add $FILE to output.
+sub add_file ($)
+{
+  my ($file) = @_;
+
+  # Only add a file once.
+  return if ($file_added{$file});
+  $file_added{$file} = 1;
+
+  scan_configure_dep $file;
+}
+
+# Point to the documentation for underquoted AC_DEFUN only once.
+my $underquoted_manual_once = 0;
+
+# scan_file ($TYPE, $FILE, $WHERE)
+# --------------------------------
+# Scan a single M4 file ($FILE), and all files it includes.
+# Return the list of included files.
+# $TYPE is one of FT_USER, FT_AUTOMAKE, or FT_SYSTEM, depending
+# on where the file comes from.
+# $WHERE is the location to use in the diagnostic if the file
+# does not exist.
+sub scan_file ($$$)
+{
+  my ($type, $file, $where) = @_;
+  my $basename = basename $file;
+
+  # Do not scan the same file twice.
+  return @{$file_includes{$file}} if exists $file_includes{$file};
+  # Prevent potential infinite recursion (if two files include each other).
+  return () if exists $file_contents{$file};
+
+  unshift @file_order, $file;
+
+  $file_type{$file} = $type;
+
+  fatal "$where: file '$file' does not exist" if ! -e $file;
+
+  my $fh = new Automake::XFile $file;
+  my $contents = '';
+  my @inc_files = ();
+  my %inc_lines = ();
+
+  my $defun_seen = 0;
+  my $serial_seen = 0;
+  my $serial_older = 0;
+
+  while ($_ = $fh->getline)
+    {
+      # Ignore '##' lines.
+      next if /^##/;
+
+      $contents .= $_;
+      my $line = $_;
+
+      if ($line =~ /$serial_line_rx/go)
+	{
+	  my $number = $1;
+	  if ($number !~ /$serial_number_rx/go)
+	    {
+	      msg ('syntax', "$file:$.",
+		   "ill-formed serial number '$number', "
+		   . "expecting a version string with only digits and dots");
+	    }
+	  elsif ($defun_seen)
+	    {
+	      # aclocal removes all definitions from M4 file with the
+	      # same basename if a greater serial number is found.
+	      # Encountering a serial after some macros will undefine
+	      # these macros...
+	      msg ('syntax', "$file:$.",
+		   'the serial number must appear before any macro definition');
+	    }
+	  # We really care about serials only for non-automake macros
+	  # and when --install is used.  But the above diagnostics are
+	  # made regardless of this, because not using --install is
+	  # not a reason not the fix macro files.
+	  elsif ($install && $type != FT_AUTOMAKE)
+	    {
+	      $serial_seen = 1;
+	      my @new = split (/\./, $number);
+
+	      verb "$file:$.: serial $number";
+
+	      if (!exists $serial{$basename}
+		  || list_compare (@new, @{$serial{$basename}}) > 0)
+		{
+		  # Delete any definition we knew from the old macro.
+		  foreach my $def (@{$invmap{$basename}})
+		    {
+		      verb "$file:$.: ignoring previous definition of $def";
+		      delete $map{$def};
+		    }
+		  $invmap{$basename} = [];
+		  $serial{$basename} = \@new;
+		}
+	      else
+		{
+		  $serial_older = 1;
+		}
+	    }
+	}
+
+      # Remove comments from current line.
+      # Do not do it earlier, because the serial line is a comment.
+      $line =~ s/\bdnl\b.*$//;
+      $line =~ s/\#.*$//;
+
+      while ($line =~ /$ac_defun_rx/go)
+	{
+	  $defun_seen = 1;
+	  if (! defined $1)
+	    {
+	      msg ('syntax', "$file:$.", "underquoted definition of $2"
+		   . "\n  run info Automake 'Extending aclocal'\n"
+		   . "  or see http://www.gnu.org/software/automake/manual/"
+		   . "automake.html#Extending-aclocal")
+		unless $underquoted_manual_once;
+	      $underquoted_manual_once = 1;
+	    }
+
+	  # If this macro does not have a serial and we have already
+	  # seen a macro with the same basename earlier, we should
+	  # ignore the macro (don't exit immediately so we can still
+	  # diagnose later #serial numbers and underquoted macros).
+	  $serial_older ||= ($type != FT_AUTOMAKE
+			     && !$serial_seen && exists $serial{$basename});
+
+	  my $macro = $1 || $2;
+	  if (!$serial_older && !defined $map{$macro})
+	    {
+	      verb "found macro $macro in $file: $.";
+	      $map{$macro} = $file;
+	      push @{$invmap{$basename}}, $macro;
+	    }
+	  else
+	    {
+	      # Note: we used to give an error here if we saw a
+	      # duplicated macro.  However, this turns out to be
+	      # extremely unpopular.  It causes actual problems which
+	      # are hard to work around, especially when you must
+	      # mix-and-match tool versions.
+	      verb "ignoring macro $macro in $file: $.";
+	    }
+	}
+
+      while ($line =~ /$m4_include_rx/go)
+	{
+	  my $ifile = $2 || $3;
+	  # Skip missing 'sinclude'd files.
+	  next if $1 ne 'm4_' && ! -f $ifile;
+	  push (@inc_files, $ifile);
+	  $inc_lines{$ifile} = $.;
+	}
+    }
+
+  # Ignore any file that has an old serial (or no serial if we know
+  # another one with a serial).
+  return ()
+    if ($serial_older ||
+	($type != FT_AUTOMAKE && !$serial_seen && exists $serial{$basename}));
+
+  $file_contents{$file} = $contents;
+
+  # For some reason I don't understand, it does not work
+  # to do "map { scan_file ($_, ...) } @inc_files" below.
+  # With Perl 5.8.2 it undefines @inc_files.
+  my @copy = @inc_files;
+  my @all_inc_files = (@inc_files,
+		       map { scan_file ($type, $_,
+					"$file:$inc_lines{$_}") } @copy);
+  $file_includes{$file} = \@all_inc_files;
+  return @all_inc_files;
+}
+
+# strip_redundant_includes (%FILES)
+# ---------------------------------
+# Each key in %FILES is a file that must be present in the output.
+# However some of these files might already include other files in %FILES,
+# so there is no point in including them another time.
+# This removes items of %FILES which are already included by another file.
+sub strip_redundant_includes (%)
+{
+  my %files = @_;
+
+  # Always include acinclude.m4, even if it does not appear to be used.
+  $files{'acinclude.m4'} = 1 if -f 'acinclude.m4';
+  # File included by $configure_ac are redundant.
+  $files{$configure_ac} = 1;
+
+  # Files at the end of @file_order should override those at the beginning,
+  # so it is important to preserve these trailing files.  We can remove
+  # a file A if it is going to be output before a file B that includes
+  # file A, not the converse.
+  foreach my $file (reverse @file_order)
+    {
+      next unless exists $files{$file};
+      foreach my $ifile (@{$file_includes{$file}})
+	{
+	  next unless exists $files{$ifile};
+	  delete $files{$ifile};
+	  verb "$ifile is already included by $file";
+	}
+    }
+
+  # configure.ac is implicitly included.
+  delete $files{$configure_ac};
+
+  return %files;
+}
+
+sub trace_used_macros ()
+{
+  my %files = map { $map{$_} => 1 } keys %macro_seen;
+  %files = strip_redundant_includes %files;
+
+  # When AC_CONFIG_MACRO_DIRS is used, avoid possible spurious warnings
+  # from autom4te about macros being "m4_require'd but not m4_defun'd";
+  # for more background, see:
+  # http://lists.gnu.org/archive/html/autoconf-patches/2012-11/msg00004.html
+  # as well as autoconf commit 'v2.69-44-g1ed0548', "warn: allow aclocal
+  # to silence m4_require warnings".
+  my $early_m4_code .= "m4_define([m4_require_silent_probe], [-])";
+
+  my $traces = ($ENV{AUTOM4TE} || 'autom4te');
+  $traces .= " --language Autoconf-without-aclocal-m4 ";
+  $traces = "echo '$early_m4_code' | $traces - ";
+
+  # Support AC_CONFIG_MACRO_DIRS also with older autoconf.
+  # Note that we can't use '$ac_config_macro_dirs_fallback' here, because
+  # a bug in option parsing code of autom4te 2.68 and earlier will cause
+  # it to read standard input last, even if the "-" argument is specified
+  # early.
+  # FIXME: To be removed in Automake 2.0, once we can assume autoconf
+  #        2.70 or later.
+  $traces .= "$automake_includes[0]/internal/ac-config-macro-dirs.m4 ";
+
+  # All candidate files.
+  $traces .= join (' ',
+		   (map { "'$_'" }
+		    (grep { exists $files{$_} } @file_order))) . " ";
+
+  # All candidate macros.
+  $traces .= join (' ',
+		   (map { "--trace='$_:\$f::\$n::\${::}%'" }
+		    ('AC_DEFUN',
+		     'AC_DEFUN_ONCE',
+		     'AU_DEFUN',
+		     '_AM_AUTOCONF_VERSION',
+		     'AC_CONFIG_MACRO_DIR_TRACE',
+                     # FIXME: Tracing the next two macros is a hack for
+                     # compatibility with older autoconf.  Remove this in
+                     # Automake 2.0, when we can assume Autoconf 2.70 or
+                     # later.
+		     'AC_CONFIG_MACRO_DIR',
+		     '_AM_CONFIG_MACRO_DIRS')),
+		   # Do not trace $1 for all other macros as we do
+		   # not need it and it might contains harmful
+		   # characters (like newlines).
+		   (map { "--trace='$_:\$f::\$n'" } (keys %macro_seen)));
+
+  verb "running $traces $configure_ac";
+
+  my $tracefh = new Automake::XFile ("$traces $configure_ac |");
+
+  @ac_config_macro_dirs = ();
+
+  my %traced = ();
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($file, $macro, $arg1) = split (/::/);
+
+      $traced{$macro} = 1 if exists $macro_seen{$macro};
+
+      if ($macro eq 'AC_DEFUN' || $macro eq 'AC_DEFUN_ONCE'
+            || $macro eq 'AU_DEFUN')
+        {
+          $map_traced_defs{$arg1} = $file;
+        }
+      elsif ($macro eq '_AM_AUTOCONF_VERSION')
+        {
+          $ac_version = $arg1;
+        }
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR_TRACE')
+        {
+          push @ac_config_macro_dirs, $arg1;
+        }
+      # FIXME: We still need to trace AC_CONFIG_MACRO_DIR
+      # for compatibility with older autoconf.  Remove this
+      # once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq 'AC_CONFIG_MACRO_DIR')
+        {
+          @ac_config_macro_dirs = ($arg1);
+        }
+      # FIXME:This is an hack for compatibility with older autoconf.
+      # Remove this once we can assume Autoconf 2.70 or later.
+      elsif ($macro eq '_AM_CONFIG_MACRO_DIRS')
+        {
+           # Empty leading/trailing fields might be produced by split,
+           # hence the grep is really needed.
+           push @ac_config_macro_dirs, grep (/./, (split /\s+/, $arg1));
+        }
+    }
+
+  # FIXME: in Autoconf >= 2.70, AC_CONFIG_MACRO_DIR calls
+  # AC_CONFIG_MACRO_DIR_TRACE behind the scenes, which could
+  # leave unwanted duplicates in @ac_config_macro_dirs.
+  # Remove this in Automake 2.0, when we'll stop tracing
+  # AC_CONFIG_MACRO_DIR explicitly.
+  @ac_config_macro_dirs = uniq @ac_config_macro_dirs;
+
+  $tracefh->close;
+
+  return %traced;
+}
+
+sub scan_configure ()
+{
+  # Make sure we include acinclude.m4 if it exists.
+  if (-f 'acinclude.m4')
+    {
+      add_file ('acinclude.m4');
+    }
+  scan_configure_dep ($configure_ac);
+}
+
+################################################################
+
+# Write output.
+# Return 0 iff some files were installed locally.
+sub write_aclocal ($@)
+{
+  my ($output_file, @macros) = @_;
+  my $output = '';
+
+  my %files = ();
+  # Get the list of files containing definitions for the macros used.
+  # (Filter out unused macro definitions with $map_traced_defs.  This
+  # can happen when an Autoconf macro is conditionally defined:
+  # aclocal sees the potential definition, but this definition is
+  # actually never processed and the Autoconf implementation is used
+  # instead.)
+  for my $m (@macros)
+    {
+      $files{$map{$m}} = 1
+	if (exists $map_traced_defs{$m}
+	    && $map{$m} eq $map_traced_defs{$m});
+    }
+  # Do not explicitly include a file that is already indirectly included.
+  %files = strip_redundant_includes %files;
+
+  my $installed = 0;
+
+  for my $file (grep { exists $files{$_} } @file_order)
+    {
+      # Check the time stamp of this file, and of all files it includes.
+      for my $ifile ($file, @{$file_includes{$file}})
+	{
+	  my $mtime = mtime $ifile;
+	  $greatest_mtime = $mtime if $greatest_mtime < $mtime;
+	}
+
+      # If the file to add looks like outside the project, copy it
+      # to the output.  The regex catches filenames starting with
+      # things like '/', '\', or 'c:\'.
+      if ($file_type{$file} != FT_USER
+	  || $file =~ m,^(?:\w:)?[\\/],)
+	{
+	  if (!$install || $file_type{$file} != FT_SYSTEM)
+	    {
+	      # Copy the file into aclocal.m4.
+	      $output .= $file_contents{$file} . "\n";
+	    }
+	  else
+	    {
+	      # Install the file (and any file it includes).
+	      my $dest;
+	      for my $ifile (@{$file_includes{$file}}, $file)
+		{
+		  install_file ($ifile, $user_includes[0]);
+		}
+	      $installed = 1;
+	    }
+	}
+      else
+	{
+	  # Otherwise, simply include the file.
+	  $output .= "m4_include([$file])\n";
+	}
+    }
+
+  if ($installed)
+    {
+      verb "running aclocal anew, because some files were installed locally";
+      return 0;
+    }
+
+  # Nothing to output?!
+  # FIXME: Shouldn't we diagnose this?
+  return 1 if ! length ($output);
+
+  if ($ac_version)
+    {
+      # Do not use "$output_file" here for the same reason we do not
+      # use it in the header below.  autom4te will output the name of
+      # the file in the diagnostic anyway.
+      $output = "m4_ifndef([AC_AUTOCONF_VERSION],
+  [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+m4_if(m4_defn([AC_AUTOCONF_VERSION]), [$ac_version],,
+[m4_warning([this file was generated for autoconf $ac_version.
+You have another version of autoconf.  It may work, but is not guaranteed to.
+If you have problems, you may need to regenerate the build system entirely.
+To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+$output";
+    }
+
+  # We used to print "# $output_file generated automatically etc."  But
+  # this creates spurious differences when using autoreconf.  Autoreconf
+  # creates aclocal.m4t and then rename it to aclocal.m4, but the
+  # rebuild rules generated by Automake create aclocal.m4 directly --
+  # this would gives two ways to get the same file, with a different
+  # name in the header.
+  $output = "# generated automatically by aclocal $VERSION -*- Autoconf -*-
+
+# Copyright (C) 1996-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+$ac_config_macro_dirs_fallback
+$output";
+
+  # We try not to update $output_file unless necessary, because
+  # doing so invalidate Autom4te's cache and therefore slows down
+  # tools called after aclocal.
+  #
+  # We need to overwrite $output_file in the following situations.
+  #   * The --force option is in use.
+  #   * One of the dependencies is younger.
+  #     (Not updating $output_file in this situation would cause
+  #     make to call aclocal in loop.)
+  #   * The contents of the current file are different from what
+  #     we have computed.
+  if (!$force_output
+      && $greatest_mtime < mtime ($output_file)
+      && $output eq contents ($output_file))
+    {
+      verb "$output_file unchanged";
+      return 1;
+    }
+
+  verb "writing $output_file";
+
+  if (!$dry_run)
+    {
+      if (-e $output_file && !unlink $output_file)
+        {
+	  fatal "could not remove '$output_file': $!";
+	}
+      my $out = new Automake::XFile "> $output_file";
+      print $out $output;
+    }
+  return 1;
+}
+
+################################################################
+
+# Print usage and exit.
+sub usage ($)
+{
+  my ($status) = @_;
+
+  print <<'EOF';
+Usage: aclocal [OPTION]...
+
+Generate 'aclocal.m4' by scanning 'configure.ac' or 'configure.in'
+
+Options:
+      --automake-acdir=DIR  directory holding automake-provided m4 files
+      --system-acdir=DIR    directory holding third-party system-wide files
+      --diff[=COMMAND]      run COMMAND [diff -u] on M4 files that would be
+                            changed (implies --install and --dry-run)
+      --dry-run             pretend to, but do not actually update any file
+      --force               always update output file
+      --help                print this help, then exit
+  -I DIR                    add directory to search list for .m4 files
+      --install             copy third-party files to the first -I directory
+      --output=FILE         put output in FILE (default aclocal.m4)
+      --print-ac-dir        print name of directory holding system-wide
+                              third-party m4 files, then exit
+      --verbose             don't be silent
+      --version             print version number, then exit
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY
+
+Warning categories include:
+  syntax        dubious syntactic constructs (default)
+  unsupported   unknown macros (default)
+  all           all the warnings (default)
+  no-CATEGORY   turn off warnings in CATEGORY
+  none          turn off all the warnings
+  error         treat warnings as errors
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+EOF
+  exit $status;
+}
+
+# Print version and exit.
+sub version ()
+{
+  print <<EOF;
+aclocal (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  exit 0;
+}
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $print_and_exit = 0;
+  my $diff_command;
+
+  my %cli_options =
+    (
+     'help'		=> sub { usage(0); },
+     'version'		=> \&version,
+     'system-acdir=s'	=> sub { shift; @system_includes = @_; },
+     'automake-acdir=s'	=> sub { shift; @automake_includes = @_; },
+     'diff:s'		=> \$diff_command,
+     'dry-run'		=> \$dry_run,
+     'force'		=> \$force_output,
+     'I=s'		=> \@user_includes,
+     'install'          => \$install,
+     'output=s'		=> \$output_file,
+     'print-ac-dir'     => \$print_and_exit,
+     'verbose'		=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \&parse_warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  if (@ARGV > 0)
+    {
+      fatal ("non-option arguments are not accepted: '$ARGV[0]'.\n"
+             . "Try '$0 --help' for more information.");
+    }
+
+  if ($print_and_exit)
+    {
+      print "@system_includes\n";
+      exit 0;
+    }
+
+  if (defined $diff_command)
+    {
+      $diff_command = 'diff -u' if $diff_command eq '';
+      @diff_command = split (' ', $diff_command);
+      $install = 1;
+      $dry_run = 1;
+    }
+
+  # Finally, adds any directory listed in the 'dirlist' file.
+  if (open (DIRLIST, "$system_includes[0]/dirlist"))
+    {
+      while (<DIRLIST>)
+        {
+          # Ignore '#' lines.
+          next if /^#/;
+          # strip off newlines and end-of-line comments
+          s/\s*\#.*$//;
+          chomp;
+          foreach my $dir (glob)
+            {
+              push (@system_includes, $dir) if -d $dir;
+            }
+        }
+      close (DIRLIST);
+    }
+}
+
+# Add any directory listed in the 'ACLOCAL_PATH' environment variable
+# to the list of system include directories.
+sub parse_ACLOCAL_PATH ()
+{
+  return if not defined $ENV{"ACLOCAL_PATH"};
+  # Directories in ACLOCAL_PATH should take precedence over system
+  # directories, so we use unshift.  However, directories that
+  # come first in ACLOCAL_PATH take precedence over directories
+  # coming later, which is why the result of split is reversed.
+  foreach my $dir (reverse split /:/, $ENV{"ACLOCAL_PATH"})
+    {
+      unshift (@system_includes, $dir) if $dir ne '' && -d $dir;
+    }
+}
+
+################################################################
+
+parse_WARNINGS;		    # Parse the WARNINGS environment variable.
+parse_arguments;
+parse_ACLOCAL_PATH;
+$configure_ac = require_configure_ac;
+
+# We may have to rerun aclocal if some file have been installed, but
+# it should not happen more than once.  The reason we must run again
+# is that once the file has been moved from /usr/share/aclocal/ to the
+# local m4/ directory it appears at a new place in the search path,
+# hence it should be output at a different position in aclocal.m4.  If
+# we did not rerun aclocal, the next run of aclocal would produce a
+# different aclocal.m4.
+my $loop = 0;
+my $rerun_due_to_macrodir = 0;
+while (1)
+  {
+    ++$loop;
+    prog_error "too many loops" if $loop > 2 + $rerun_due_to_macrodir;
+
+    reset_maps;
+    scan_m4_files;
+    scan_configure;
+    last if $exit_code;
+    my %macro_traced = trace_used_macros;
+
+    if (!$rerun_due_to_macrodir && @ac_config_macro_dirs)
+      {
+        # The directory specified in calls to the AC_CONFIG_MACRO_DIRS
+        # m4 macro (if any) must go after the user includes specified
+        # explicitly with the '-I' option.
+        push @user_includes, @ac_config_macro_dirs;
+        # We might have to scan some new directory of .m4 files.
+        $rerun_due_to_macrodir++;
+        next;
+      }
+
+    if ($install && !@user_includes)
+      {
+        fatal "installation of third-party macros impossible without " .
+              "-I options nor AC_CONFIG_MACRO_DIR{,S} m4 macro(s)";
+      }
+
+    last if write_aclocal ($output_file, keys %macro_traced);
+    last if $dry_run;
+  }
+check_acinclude;
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoconf
new file mode 100755
index 0000000..ab9f4ea
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoconf
@@ -0,0 +1,500 @@
+#! /bin/sh
+# Generated from autoconf.in; do not edit by hand.
+# autoconf -- create `configure' using m4 macros
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2003,
+# 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+
+# as_fn_error STATUS ERROR
+# ------------------------
+# Output "`basename $0`: error: ERROR" to stderr. Then exit the script with
+# STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+usage="\
+Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Generate a configuration script from a TEMPLATE-FILE if given, or
+\`configure.ac' if present, or else \`configure.in'.  Output is sent
+to the standard output if TEMPLATE-FILE is given, else into
+\`configure'.
+
+Operation modes:
+  -h, --help                print this help, then exit
+  -V, --version             print version number, then exit
+  -v, --verbose             verbosely report processing
+  -d, --debug               don't remove temporary files
+  -f, --force               consider all files obsolete
+  -o, --output=FILE         save output in FILE (stdout is the default)
+  -W, --warnings=CATEGORY   report the warnings falling in CATEGORY [syntax]
+
+Warning categories include:
+  \`cross'         cross compilation issues
+  \`obsolete'      obsolete constructs
+  \`syntax'        dubious syntactic constructs
+  \`all'           all the warnings
+  \`no-CATEGORY'   turn off the warnings on CATEGORY
+  \`none'          turn off all the warnings
+  \`error'         warnings are error
+
+The environment variables \`M4' and \`WARNINGS' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the list of calls to MACRO
+  -i, --initialization        also trace Autoconf's initialization process
+
+In tracing mode, no configuration script is created.  FORMAT defaults
+to \`\$f:\$l:\$n:\$%'; see \`autom4te --help' for information about FORMAT.
+
+Report bugs to <bug-autoconf@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>."
+
+version="\
+autoconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille."
+
+help="\
+Try \`$as_me --help' for more information."
+
+exit_missing_arg='
+  as_fn_error $? "option \`$1'\'' requires an argument$as_nl$help"'
+# restore font-lock: '
+
+# Variables.
+: ${AUTOM4TE='/x86_64-unknown-linux-gnu/bin/autom4te'}
+autom4te_options=
+outfile=
+verbose=false
+
+# Parse command line.
+while test $# -gt 0 ; do
+  option=`expr "x$1" : 'x\(--[^=]*\)' \| \
+	       "x$1" : 'x\(-.\)'`
+  optarg=`expr "x$1" : 'x--[^=]*=\(.*\)' \| \
+	       "x$1" : 'x-.\(.*\)'`
+  case $1 in
+    --version | -V )
+       echo "$version" ; exit ;;
+    --help | -h )
+       $as_echo "$usage"; exit ;;
+
+    --verbose | -v )
+       verbose=:
+       autom4te_options="$autom4te_options $1"; shift ;;
+
+    # Arguments passed as is to autom4te.
+    --debug      | -d   | \
+    --force      | -f   | \
+    --include=*  | -I?* | \
+    --prepend-include=* | -B?* | \
+    --warnings=* | -W?* )
+       case $1 in
+	 *\'*) arg=`$as_echo "$1" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$1 ;;
+       esac
+       autom4te_options="$autom4te_options '$arg'"; shift ;;
+    # Options with separated arg passed as is to autom4te.
+    --include  | -I | \
+    --prepend-include  | -B | \
+    --warnings | -W )
+       test $# = 1 && eval "$exit_missing_arg"
+       case $2 in
+	 *\'*) arg=`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"` ;; #'
+	 *) arg=$2 ;;
+       esac
+       autom4te_options="$autom4te_options $option '$arg'"
+       shift; shift ;;
+
+    --trace=* | -t?* )
+       traces="$traces --trace='"`$as_echo "$optarg" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift ;;
+    --trace | -t )
+       test $# = 1 && eval "$exit_missing_arg"
+       traces="$traces --trace='"`$as_echo "$2" | sed "s/'/'\\\\\\\\''/g"`"'"
+       shift; shift ;;
+    --initialization | -i )
+       autom4te_options="$autom4te_options --melt"
+       shift;;
+
+    --output=* | -o?* )
+       outfile=$optarg
+       shift ;;
+    --output | -o )
+       test $# = 1 && eval "$exit_missing_arg"
+       outfile=$2
+       shift; shift ;;
+
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       exec >&2
+       as_fn_error $? "invalid option \`$1'$as_nl$help" ;; #`
+    * )
+       break ;;
+  esac
+done
+
+# Find the input file.
+case $# in
+  0)
+    if test -f configure.ac; then
+      if test -f configure.in; then
+	$as_echo "$as_me: warning: both \`configure.ac' and \`configure.in' are present." >&2
+	$as_echo "$as_me: warning: proceeding with \`configure.ac'." >&2
+      fi
+      infile=configure.ac
+    elif test -f configure.in; then
+      infile=configure.in
+    else
+      as_fn_error $? "no input file"
+    fi
+    test -z "$traces" && test -z "$outfile" && outfile=configure;;
+  1)
+    infile=$1 ;;
+  *) exec >&2
+     as_fn_error $? "invalid number of arguments$as_nl$help" ;;
+esac
+
+# Unless specified, the output is stdout.
+test -z "$outfile" && outfile=-
+
+# Run autom4te with expansion.
+eval set x "$autom4te_options" \
+  --language=autoconf --output=\"\$outfile\" "$traces" \"\$infile\"
+shift
+$verbose && $as_echo "$as_me: running $AUTOM4TE $*" >&2
+exec "$AUTOM4TE" "$@"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoheader b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoheader
new file mode 100755
index 0000000..892ffbf
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoheader
@@ -0,0 +1,304 @@
+#! /usr/bin/perl
+# -*- Perl -*-
+# Generated from autoheader.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoheader -- create `config.h.in' from `configure.ac'
+
+# Copyright (C) 1992, 1993, 1994, 1996, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by Roland McGrath.
+# Rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, "$pkgdatadir";
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use strict;
+
+# Using `do FILE', we need `local' vars.
+use vars qw ($config_h %verbatim %symbol);
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-unknown-linux-gnu/bin/autom4te';
+local $config_h;
+my $config_h_in;
+my @prepend_include;
+my @include;
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]
+
+Create a template file of C \`\#define\' statements for \`configure\' to
+use.  To this end, scan TEMPLATE-FILE, or \`configure.ac\' if present,
+or else \`configure.in\'.
+
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -f, --force              consider all files obsolete
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+" . Autom4te::ChannelDefs::usage () . "
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "autoheader (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Roland McGrath and Akim Demaille.
+";
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  parse_WARNINGS;
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'W|warnings=s'        => \&parse_warnings);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('ah');
+switch_warning 'obsolete';
+parse_args;
+
+# Preach.
+my $config_h_top = find_file ("config.h.top?",
+			      reverse (@prepend_include), @include);
+my $config_h_bot = find_file ("config.h.bot?",
+			      reverse (@prepend_include), @include);
+my $acconfig_h = find_file ("acconfig.h?",
+			    reverse (@prepend_include), @include);
+if ($config_h_top || $config_h_bot || $acconfig_h)
+  {
+    my $msg = << "END";
+    Using auxiliary files such as \`acconfig.h\', \`config.h.bot\'
+    and \`config.h.top\', to define templates for \`config.h.in\'
+    is deprecated and discouraged.
+
+    Using the third argument of \`AC_DEFINE\' and
+    \`AC_DEFINE_UNQUOTED\' allows one to define a template without
+    \`acconfig.h\':
+
+      AC_DEFINE([NEED_FUNC_MAIN], 1,
+		[Define if a function \`main\' is needed.])
+
+    More sophisticated templates can also be produced, see the
+    documentation.
+END
+    $msg =~ s/^    /WARNING: /gm;
+    msg 'obsolete', $msg;
+  }
+
+# Set up autoconf.
+my $autoconf = "'$autom4te' --language=autoconf ";
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+$autoconf .= ' --debug' if $debug;
+$autoconf .= ' --force' if $force;
+$autoconf .= ' --verbose' if $verbose;
+
+# ----------------------- #
+# Real work starts here.  #
+# ----------------------- #
+
+# Source what the traces are trying to tell us.
+verb "$me: running $autoconf to trace from $ARGV[0]";
+my $quoted_tmp = shell_quote ($tmp);
+xsystem ("$autoconf"
+	 # If you change this list, update the
+	 # `Autoheader-preselections' section of autom4te.in.
+	 . ' --trace AC_CONFIG_HEADERS:\'$$config_h ||= \'"\'"\'$1\'"\'"\';\''
+	 . ' --trace AH_OUTPUT:\'$$verbatim{\'"\'"\'$1\'"\'"\'} = \'"\'"\'$2\'"\'"\';\''
+	 . ' --trace AC_DEFINE_TRACE_LITERAL:\'$$symbol{\'"\'"\'$1\'"\'"\'} = 1;\''
+	 . " " . shell_quote ($ARGV[0]) . " >$quoted_tmp/traces.pl");
+
+local (%verbatim, %symbol);
+debug "$me: \`do'ing $tmp/traces.pl:\n" . `sed 's/^/| /' $quoted_tmp/traces.pl`;
+do "$tmp/traces.pl";
+warn "couldn't parse $tmp/traces.pl: $@" if $@;
+unless ($config_h)
+  {
+    error "error: AC_CONFIG_HEADERS not found in $ARGV[0]";
+    exit 1;
+  }
+
+# We template only the first CONFIG_HEADER.
+$config_h =~ s/ .*//;
+# Support "outfile[:infile]", defaulting infile="outfile.in".
+($config_h, $config_h_in) = split (':', $config_h, 2);
+$config_h_in ||= "$config_h.in";
+
+# %SYMBOL might contain things like `F77_FUNC(name,NAME)', but we keep
+# only the name of the macro.
+%symbol = map { s/\(.*//; $_ => 1 } keys %symbol;
+
+my $out = new Autom4te::XFile ("> " . open_quote ("$tmp/config.hin"));
+
+# Don't write "do not edit" -- it will get copied into the
+# config.h, which it's ok to edit.
+print $out "/* $config_h_in.  Generated from $ARGV[0] by autoheader.  */\n";
+
+# Dump the top.
+if ($config_h_top)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_top));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+# Dump `acconfig.h', except for its bottom portion.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    while ($_ = $in->getline)
+      {
+	last if /\@BOTTOM\@/;
+	next if /\@TOP\@/;
+	print $out $_;
+      }
+  }
+
+# Dump the templates from `configure.ac'.
+foreach (sort keys %verbatim)
+  {
+    print $out "\n$verbatim{$_}\n";
+  }
+
+# Dump bottom portion of `acconfig.h'.
+if ($acconfig_h)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($acconfig_h));
+    my $dump = 0;
+    while ($_ = $in->getline)
+      {
+	print $out $_ if $dump;
+	$dump = 1  if /\@BOTTOM\@/;
+      }
+  }
+
+# Dump the bottom.
+if ($config_h_bot)
+  {
+    my $in = new Autom4te::XFile ("< " . open_quote ($config_h_bot));
+    while ($_ = $in->getline)
+      {
+	print $out $_;
+      }
+  }
+
+$out->close;
+
+# Check that all the symbols have a template.
+{
+  my $in = new Autom4te::XFile ("< " . open_quote ("$tmp/config.hin"));
+  my $suggest_ac_define = 1;
+  while ($_ = $in->getline)
+    {
+      my ($symbol) = /^\#\s*\w+\s+(\w+)/
+	or next;
+      delete $symbol{$symbol};
+    }
+  foreach (sort keys %symbol)
+    {
+      msg 'syntax', "warning: missing template: $_";
+      if ($suggest_ac_define)
+	{
+	  msg 'syntax',  "Use AC_DEFINE([$_], [], [Description])";
+	  $suggest_ac_define = 0;
+	}
+
+    }
+  exit 1
+    if keys %symbol;
+}
+
+update_file ("$tmp/config.hin", "$config_h_in", $force);
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autom4te b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autom4te
new file mode 100755
index 0000000..d29ca29
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autom4te
@@ -0,0 +1,1075 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autom4te.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autom4te - Wrapper around M4 libraries.
+# Copyright (C) 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::C4che;
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Data directory.
+my $pkgdatadir = $ENV{'AC_MACRODIR'} || '//share/autoconf';
+
+# $LANGUAGE{LANGUAGE} -- Automatic options for LANGUAGE.
+my %language;
+
+my $output = '-';
+
+# Mode of the output file except for traces.
+my $mode = "0666";
+
+# If melt, don't use frozen files.
+my $melt = 0;
+
+# Names of the cache directory, cache directory index, trace cache
+# prefix, and output cache prefix.  And the IO object for the index.
+my $cache;
+my $icache;
+my $tcache;
+my $ocache;
+my $icache_file;
+
+my $flock_implemented = 'yes';
+
+# The macros to trace mapped to their format, as specified by the
+# user.
+my %trace;
+
+# The macros the user will want to trace in the future.
+# We need `include' to get the included file, `m4_pattern_forbid' and
+# `m4_pattern_allow' to check the output.
+#
+# FIXME: What about `sinclude'?
+my @preselect = ('include',
+		 'm4_pattern_allow', 'm4_pattern_forbid',
+		 '_m4_warn');
+
+# M4 include path.
+my @include;
+
+# Do we freeze?
+my $freeze = 0;
+
+# $M4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+# Some non-GNU m4's don't reject the --help option, so give them /dev/null.
+fatal "need GNU m4 1.4 or later: $m4"
+  if system "$m4 --help </dev/null 2>&1 | grep reload-state >/dev/null";
+
+# Set some high recursion limit as the default limit, 250, has already
+# been hit with AC_OUTPUT.  Don't override the user's choice.
+$m4 .= ' --nesting-limit=1024'
+  if " $m4 " !~ / (--nesting-limit(=[0-9]+)?|-L[0-9]*) /;
+
+
+# @M4_BUILTIN -- M4 builtins and a useful comment.
+my @m4_builtin = `echo dumpdef | $m4 2>&1 >/dev/null`;
+map { s/:.*//;s/\W// } @m4_builtin;
+
+
+# %M4_BUILTIN_ALTERNATE_NAME
+# --------------------------
+# The builtins are renamed, e.g., `define' is renamed `m4_define'.
+# So map `define' to `m4_define' and conversely.
+# Some macros don't follow this scheme: be sure to properly map to their
+# alternate name too.
+#
+# FIXME: Trace status of renamed builtins was fixed in M4 1.4.5, which
+# we now depend on; do we still need to do this mapping?
+#
+# So we will merge them, i.e., tracing `BUILTIN' or tracing
+# `m4_BUILTIN' will be the same: tracing both, but honoring the
+# *last* trace specification.
+#
+# FIXME: This is not enough: in the output `$0' will be `BUILTIN'
+# sometimes and `m4_BUILTIN' at others.  We should return a unique name,
+# the one specified by the user.
+#
+# FIXME: To be absolutely rigorous, I would say that given that we
+# _redefine_ divert (instead of _copying_ it), divert and the like
+# should not be part of this list.
+my %m4_builtin_alternate_name;
+@m4_builtin_alternate_name{"$_", "m4_$_"} = ("m4_$_", "$_")
+  foreach (grep { !/m4wrap|m4exit|dnl|ifelse|__.*__/ } @m4_builtin);
+@m4_builtin_alternate_name{"ifelse", "m4_if"}   = ("m4_if", "ifelse");
+@m4_builtin_alternate_name{"m4exit", "m4_exit"} = ("m4_exit", "m4exit");
+@m4_builtin_alternate_name{"m4wrap", "m4_wrap"} = ("m4_wrap", "m4wrap");
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILES]
+
+Run GNU M4 on the FILES, avoiding useless runs.  Output the traces if tracing,
+the frozen file if freezing, otherwise the expansion of the FILES.
+
+If some of the FILES are named \`FILE.m4f\' they are considered to be M4
+frozen files of all the previous files (which are therefore not loaded).
+If \`FILE.m4f\' is not found, then \`FILE.m4\' will be used, together with
+all the previous files.
+
+Some files may be optional, i.e., will only be processed if found in the
+include path, but then must end in \`.m4?\';  the question mark is not part of
+the actual file name.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don\'t remove temporary files
+  -o, --output=FILE        save output in FILE (defaults to \`-\', stdout)
+  -f, --force              don\'t rely on cached values
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+  -l, --language=LANG      specify the set of M4 macros to use
+  -C, --cache=DIRECTORY    preserve results for future runs in DIRECTORY
+      --no-cache           disable the cache
+  -m, --mode=OCTAL         change the non trace output file mode (0666)
+  -M, --melt               don\'t use M4 frozen files
+
+Languages include:
+  \`Autoconf\'   create Autoconf configure scripts
+  \`Autotest\'   create Autotest test suites
+  \`M4sh\'       create M4sh shell scripts
+  \`M4sugar\'    create M4sugar output
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variables \`M4\' and \`WARNINGS\' are honored.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Tracing:
+  -t, --trace=MACRO[:FORMAT]  report the MACRO invocations
+  -p, --preselect=MACRO       prepare to trace MACRO in a future run
+
+Freezing:
+  -F, --freeze   produce an M4 frozen state file for FILES
+
+FORMAT defaults to \`\$f:\$l:\$n:\$%\', and can use the following escapes:
+  \$\$     literal \$
+  \$f     file where macro was called
+  \$l     line where macro was called
+  \$d     nesting depth of macro call
+  \$n     name of the macro
+  \$NUM   argument NUM, unquoted and with newlines
+  \$SEP\@  all arguments, with newlines, quoted, and separated by SEP
+  \$SEP*  all arguments, with newlines, unquoted, and separated by SEP
+  \$SEP%  all arguments, without newlines, unquoted, and separated by SEP
+SEP can be empty for the default (comma for \@ and *, colon for %),
+a single character for that character, or {STRING} to use a string.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version =  <<"EOF";
+autom4te (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Akim Demaille.
+EOF
+
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# $OPTION
+# files_to_options (@FILE)
+# ------------------------
+# Transform Autom4te conventions (e.g., using foo.m4f to designate a frozen
+# file) into a suitable command line for M4 (e.g., using --reload-state).
+# parse_args guarantees that we will see at most one frozen file, and that
+# if a frozen file is present, it is the first argument.
+sub files_to_options (@)
+{
+  my (@file) = @_;
+  my @res;
+  foreach my $file (@file)
+    {
+      my $arg = shell_quote ($file);
+      if ($file =~ /\.m4f$/)
+	{
+	  $arg = "--reload-state=$arg";
+	  # If the user downgraded M4 from 1.6 to 1.4.x after freezing
+	  # the file, then we ensure the frozen __m4_version__ will
+	  # not cause m4_init to make the wrong decision about the
+	  # current M4 version.
+	  $arg .= " --undefine=__m4_version__"
+	    unless grep {/__m4_version__/} @m4_builtin;
+	}
+      push @res, $arg;
+    }
+  return join ' ', @res;
+}
+
+
+# load_configuration ($FILE)
+# --------------------------
+# Load the configuration $FILE.
+sub load_configuration ($)
+{
+  my ($file) = @_;
+  use Text::ParseWords;
+
+  my $cfg = new Autom4te::XFile ("< " . open_quote ($file));
+  my $lang;
+  while ($_ = $cfg->getline)
+    {
+      chomp;
+      # Comments.
+      next
+	if /^\s*(\#.*)?$/;
+
+      my @words = shellwords ($_);
+      my $type = shift @words;
+      if ($type eq 'begin-language:')
+	{
+	  fatal "$file:$.: end-language missing for: $lang"
+	    if defined $lang;
+	  $lang = lc $words[0];
+	}
+      elsif ($type eq 'end-language:')
+	{
+	  error "$file:$.: end-language mismatch: $lang"
+	    if $lang ne lc $words[0];
+	  $lang = undef;
+	}
+      elsif ($type eq 'args:')
+	{
+	  fatal "$file:$.: no current language"
+	    unless defined $lang;
+	  push @{$language{$lang}}, @words;
+	}
+      else
+	{
+	  error "$file:$.: unknown directive: $type";
+	}
+    }
+}
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  # We want to look for the early options, which should not be found
+  # in the configuration file.  Prepend to the user arguments.
+  # Perform this repeatedly so that we can use --language in language
+  # definitions.  Beware that there can be several --language
+  # invocations.
+  my @language;
+  do {
+    @language = ();
+    use Getopt::Long;
+    Getopt::Long::Configure ("pass_through", "permute");
+    GetOptions ("l|language=s" => \@language);
+
+    foreach (@language)
+      {
+	error "unknown language: $_"
+	  unless exists $language{lc $_};
+	unshift @ARGV, @{$language{lc $_}};
+      }
+  } while @language;
+
+  # --debug is useless: it is parsed below.
+  if (exists $ENV{'AUTOM4TE_DEBUG'})
+    {
+      print STDERR "$me: concrete arguments:\n";
+      foreach my $arg (@ARGV)
+	{
+	  print STDERR "| $arg\n";
+	}
+    }
+
+  # Process the arguments for real this time.
+  my @trace;
+  my @prepend_include;
+  parse_WARNINGS;
+  getopt
+    (
+     # Operation modes:
+     "o|output=s"   => \$output,
+     "W|warnings=s" => \&parse_warnings,
+     "m|mode=s"     => \$mode,
+     "M|melt"       => \$melt,
+
+     # Library directories:
+     "B|prepend-include=s" => \@prepend_include,
+     "I|include=s"         => \@include,
+
+     # Tracing:
+     # Using a hash for traces is seducing.  Unfortunately, upon `-t FOO',
+     # instead of mapping `FOO' to undef, Getopt maps it to `1', preventing
+     # us from distinguishing `-t FOO' from `-t FOO=1'.  So let's do it
+     # by hand.
+     "t|trace=s"     => \@trace,
+     "p|preselect=s" => \@preselect,
+
+     # Freezing.
+     "F|freeze" => \$freeze,
+
+     # Caching.
+     "C|cache=s" => \$cache,
+     "no-cache"  => sub { $cache = undef; },
+    );
+
+  fatal "too few arguments
+Try `$me --help' for more information."
+    unless @ARGV;
+
+  # Freezing:
+  # We cannot trace at the same time (well, we can, but it sounds insane).
+  # And it implies melting: there is risk not to update properly using
+  # old frozen files, and worse yet: we could load a frozen file and
+  # refreeze it!  A sort of caching :)
+  fatal "cannot freeze and trace"
+    if $freeze && @trace;
+  $melt = 1
+    if $freeze;
+
+  # Names of the cache directory, cache directory index, trace cache
+  # prefix, and output cache prefix.  If the cache is not to be
+  # preserved, default to a temporary directory (automatically removed
+  # on exit).
+  $cache = $tmp
+    unless $cache;
+  $icache = "$cache/requests";
+  $tcache = "$cache/traces.";
+  $ocache = "$cache/output.";
+
+  # Normalize the includes: the first occurrence is enough, several is
+  # a pain since it introduces a useless difference in the path which
+  # invalidates the cache.  And strip `.' which is implicit and always
+  # first.
+  @include = grep { !/^\.$/ } uniq (reverse(@prepend_include), @include);
+
+  # Convert @trace to %trace, and work around the M4 builtins tracing
+  # problem.
+  # The default format is `$f:$l:$n:$%'.
+  foreach (@trace)
+    {
+      /^([^:]+)(?::(.*))?$/ms;
+      $trace{$1} = defined $2 ? $2 : '$f:$l:$n:$%';
+      $trace{$m4_builtin_alternate_name{$1}} = $trace{$1}
+	if exists $m4_builtin_alternate_name{$1};
+    }
+
+  # Work around the M4 builtins tracing problem for @PRESELECT.
+  # FIXME: Is this still needed, now that we rely on M4 1.4.5?
+  push (@preselect,
+	map { $m4_builtin_alternate_name{$_} }
+	grep { exists $m4_builtin_alternate_name{$_} } @preselect);
+
+  # If we find frozen files, then all the files before it are
+  # discarded: the frozen file is supposed to include them all.
+  #
+  # We don't want to depend upon m4's --include to find the top level
+  # files, so we use `find_file' here.  Try to get a canonical name,
+  # as it's part of the key for caching.  And some files are optional
+  # (also handled by `find_file').
+  my @argv;
+  foreach (@ARGV)
+    {
+      if ($_ eq '-')
+	{
+	  push @argv, $_;
+	}
+      elsif (/\.m4f$/)
+	{
+	  # Frozen files are optional => pass a `?' to `find_file'.
+	  my $file = find_file ("$_?", @include);
+	  if (!$melt && $file)
+	    {
+	      @argv = ($file);
+	    }
+	  else
+	    {
+	      s/\.m4f$/.m4/;
+	      push @argv, find_file ($_, @include);
+	    }
+	}
+      else
+	{
+	  my $file = find_file ($_, @include);
+	  push @argv, $file
+	    if $file;
+	}
+    }
+  @ARGV = @argv;
+}
+
+
+# handle_m4 ($REQ, @MACRO)
+# ------------------------
+# Run m4 on the input files, and save the traces on the @MACRO.
+sub handle_m4 ($@)
+{
+  my ($req, @macro) = @_;
+
+  # GNU m4 appends when using --debugfile/--error-output.
+  unlink ($tcache . $req->id . "t");
+
+  # Run m4.
+  #
+  # We don't output directly to the cache files, to avoid problems
+  # when we are interrupted (that leaves corrupted files).
+  xsystem ("$m4 --gnu"
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . ' --debug=aflq'
+	   . (!exists $ENV{'AUTOM4TE_NO_FATAL'} ? ' --fatal-warning' : '')
+	   . " --debugfile=" . shell_quote ("$tcache" . $req->id . "t")
+	   . join (' --trace=', '', map { shell_quote ($_) } sort @macro)
+	   . " " . files_to_options (@ARGV)
+	   . " > " . shell_quote ("$ocache" . $req->id . "t"));
+
+  # Everything went ok: preserve the outputs.
+  foreach my $file (map { $_ . $req->id } ($tcache, $ocache))
+    {
+      use File::Copy;
+      move ("${file}t", "$file")
+	or fatal "cannot rename ${file}t as $file: $!";
+    }
+}
+
+
+# warn_forbidden ($WHERE, $WORD, %FORBIDDEN)
+# ------------------------------------------
+# $WORD is forbidden.  Warn with a dedicated error message if in
+# %FORBIDDEN, otherwise a simple `error: possibly undefined macro'
+# will do.
+my $first_warn_forbidden = 1;
+sub warn_forbidden ($$%)
+{
+  my ($where, $word, %forbidden) = @_;
+  my $message;
+
+  for my $re (sort keys %forbidden)
+    {
+      if ($word =~ $re)
+	{
+	  $message = $forbidden{$re};
+	  last;
+	}
+    }
+  $message ||= "possibly undefined macro: $word";
+  warn "$where: error: $message\n";
+  if ($first_warn_forbidden)
+    {
+      warn <<EOF;
+      If this token and others are legitimate, please use m4_pattern_allow.
+      See the Autoconf documentation.
+EOF
+      $first_warn_forbidden = 0;
+    }
+}
+
+
+# handle_output ($REQ, $OUTPUT)
+# -----------------------------
+# Run m4 on the input files, perform quadrigraphs substitution, check for
+# forbidden tokens, and save into $OUTPUT.
+sub handle_output ($$)
+{
+  my ($req, $output) = @_;
+
+  verb "creating $output";
+
+  # Load the forbidden/allowed patterns.
+  handle_traces ($req, "$tmp/patterns",
+		 ('m4_pattern_forbid' => 'forbid:$1:$2',
+		  'm4_pattern_allow'  => 'allow:$1'));
+  my @patterns = new Autom4te::XFile ("< " . open_quote ("$tmp/patterns"))->getlines;
+  chomp @patterns;
+  my %forbidden =
+    map { /^forbid:([^:]+):.+$/ => /^forbid:[^:]+:(.+)$/ } @patterns;
+  my $forbidden = join ('|', map { /^forbid:([^:]+)/ } @patterns) || "^\$";
+  my $allowed   = join ('|', map { /^allow:([^:]+)/  } @patterns) || "^\$";
+
+  verb "forbidden tokens: $forbidden";
+  verb "forbidden token : $_ => $forbidden{$_}"
+    foreach (sort keys %forbidden);
+  verb "allowed   tokens: $allowed";
+
+  # Read the (cached) raw M4 output, produce the actual result.  We
+  # have to use the 2nd arg to have Autom4te::XFile honor the third, but then
+  # stdout is to be handled by hand :(.  Don't use fdopen as it means
+  # we will close STDOUT, which we already do in END.
+  my $out = new Autom4te::XFile;
+  if ($output eq '-')
+    {
+      $out->open (">$output");
+    }
+  else
+    {
+      $out->open($output, O_CREAT | O_WRONLY | O_TRUNC, oct ($mode));
+    }
+  fatal "cannot create $output: $!"
+    unless $out;
+  my $in = new Autom4te::XFile ("< " . open_quote ($ocache . $req->id));
+
+  my %prohibited;
+  my $res;
+  while ($_ = $in->getline)
+    {
+      s/\s+$//;
+      s/__oline__/$./g;
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+
+      $res = $_;
+
+      # Don't complain in comments.  Well, until we have something
+      # better, don't consider `#include' etc. are comments.
+      s/\#.*//
+	unless /^\#\s*(if|include|endif|ifdef|ifndef|define)\b/;
+      foreach (split (/\W+/))
+	{
+	  $prohibited{$_} = $.
+	    if !/^$/ && /$forbidden/o && !/$allowed/o && ! exists $prohibited{$_};
+	}
+
+      # Performed *last*: the empty quadrigraph.
+      $res =~ s/\@&t\@//g;
+
+      print $out "$res\n";
+    }
+
+  $out->close();
+
+  # If no forbidden words, we're done.
+  return
+    if ! %prohibited;
+
+  # Locate the forbidden words in the last input file.
+  # This is unsatisfying but...
+  $exit_code = 1;
+  if ($ARGV[$#ARGV] ne '-')
+    {
+      my $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+      my $file = new Autom4te::XFile ("< " . open_quote ($ARGV[$#ARGV]));
+
+      while ($_ = $file->getline)
+	{
+	  # Don't complain in comments.  Well, until we have something
+	  # better, don't consider `#include' etc. to be comments.
+	  s/\#.*//
+	    unless /^\#(if|include|endif|ifdef|ifndef|define)\b/;
+
+	  # Complain once per word, but possibly several times per line.
+	  while (/$prohibited/)
+	    {
+	      my $word = $1;
+	      warn_forbidden ("$ARGV[$#ARGV]:$.", $word, %forbidden);
+	      delete $prohibited{$word};
+	      # If we're done, exit.
+	      return
+		if ! %prohibited;
+	      $prohibited = '\b(' . join ('|', keys %prohibited) . ')\b';
+	    }
+	}
+    }
+  warn_forbidden ("$output:$prohibited{$_}", $_, %forbidden)
+    foreach (sort { $prohibited{$a} <=> $prohibited{$b} } keys %prohibited);
+}
+
+
+## --------------------- ##
+## Handling the traces.  ##
+## --------------------- ##
+
+
+# $M4_MACRO
+# trace_format_to_m4 ($FORMAT)
+# ----------------------------
+# Convert a trace $FORMAT into a M4 trace processing macro's body.
+sub trace_format_to_m4 ($)
+{
+  my ($format) = @_;
+  my $underscore = $_;
+  my %escape = (# File name.
+		'f' => '$1',
+		# Line number.
+		'l' => '$2',
+		# Depth.
+		'd' => '$3',
+		# Name (also available as $0).
+		'n' => '$4',
+		# Escaped dollar.
+		'$' => '$');
+
+  my $res = '';
+  $_ = $format;
+  while ($_)
+    {
+      # $n -> $(n + 4)
+      if (s/^\$(\d+)//)
+	{
+	  $res .= "\$" . ($1 + 4);
+	}
+      # $x, no separator given.
+      elsif (s/^\$([fldn\$])//)
+	{
+	  $res .= $escape{$1};
+	}
+      # $.x or ${sep}x.
+      elsif (s/^\$\{([^}]*)\}([@*%])//
+	    || s/^\$(.?)([@*%])//)
+	{
+	  # $@, list of quoted effective arguments.
+	  if ($2 eq '@')
+	    {
+	      $res .= ']at_at([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $*, list of unquoted effective arguments.
+	  elsif ($2 eq '*')
+	    {
+	      $res .= ']at_star([' . ($1 ? $1 : ',') . '], $@)[';
+	    }
+	  # $%, list of flattened unquoted effective arguments.
+	  elsif ($2 eq '%')
+	    {
+	      $res .= ']at_percent([' . ($1 ? $1 : ':') . '], $@)[';
+	    }
+	}
+      elsif (/^(\$.)/)
+	{
+	  error "invalid escape: $1";
+	}
+      else
+	{
+	  s/^([^\$]+)//;
+	  $res .= $1;
+	}
+    }
+
+  $_ = $underscore;
+  return '[[' . $res . ']]';
+}
+
+
+# handle_traces($REQ, $OUTPUT, %TRACE)
+# ------------------------------------
+# We use M4 itself to process the traces.  But to avoid name clashes when
+# processing the traces, the builtins are disabled, and moved into `at_'.
+# Actually, all the low level processing macros are in `at_' (and `_at_').
+# To avoid clashes between user macros and `at_' macros, the macros which
+# implement tracing are in `AT_'.
+#
+# Having $REQ is needed to neutralize the macros which have been traced,
+# but are not wanted now.
+sub handle_traces ($$%)
+{
+  my ($req, $output, %trace) = @_;
+
+  verb "formatting traces for `$output': " . join (', ', sort keys %trace);
+
+  # Processing the traces.
+  my $trace_m4 = new Autom4te::XFile ("> " . open_quote ("$tmp/traces.m4"));
+
+  $_ = <<'EOF';
+  divert(-1)
+  changequote([, ])
+  # _at_MODE(SEPARATOR, ELT1, ELT2...)
+  # ----------------------------------
+  # List the elements, separating then with SEPARATOR.
+  # MODE can be:
+  #  `at'       -- the elements are enclosed in brackets.
+  #  `star'     -- the elements are listed as are.
+  #  `percent'  -- the elements are `flattened': spaces are singled out,
+  #                and no new line remains.
+  define([_at_at],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[[$2]]],
+	     [[[$2]][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_percent],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [at_flatten([$2])],
+	     [at_flatten([$2])[$1]$0([$1], at_shift(at_shift($@)))])])
+
+  define([_at_star],
+  [at_ifelse([$#], [1], [],
+	     [$#], [2], [[$2]],
+	     [[$2][$1]$0([$1], at_shift(at_shift($@)))])])
+
+  # FLATTEN quotes its result.
+  # Note that the second pattern is `newline, tab or space'.  Don't lose
+  # the tab!
+  define([at_flatten],
+  [at_patsubst(at_patsubst([[[$1]]], [\\\n]), [[\n\t ]+], [ ])])
+
+  define([at_args],    [at_shift(at_shift(at_shift(at_shift(at_shift($@)))))])
+  define([at_at],      [_$0([$1], at_args($@))])
+  define([at_percent], [_$0([$1], at_args($@))])
+  define([at_star],    [_$0([$1], at_args($@))])
+
+EOF
+  s/^  //mg;s/\\t/\t/mg;s/\\n/\n/mg;
+  print $trace_m4 $_;
+
+  # If you trace `define', then on `define([m4_exit], defn([m4exit])' you
+  # will produce
+  #
+  #    AT_define([m4sugar.m4], [115], [1], [define], [m4_exit], <m4exit>)
+  #
+  # Since `<m4exit>' is not quoted, the outer m4, when processing
+  # `trace.m4' will exit prematurely.  Hence, move all the builtins to
+  # the `at_' name space.
+
+  print $trace_m4 "# Copy the builtins.\n";
+  map { print $trace_m4 "define([at_$_], defn([$_]))\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+  print $trace_m4 "# Disable them.\n";
+  map { print $trace_m4 "at_undefine([$_])\n" } @m4_builtin;
+  print $trace_m4 "\n";
+
+
+  # Neutralize traces: we don't want traces of cached requests (%REQUEST).
+  print $trace_m4
+   "## -------------------------------------- ##\n",
+   "## By default neutralize all the traces.  ##\n",
+   "## -------------------------------------- ##\n",
+   "\n";
+  print $trace_m4 "at_define([AT_$_], [at_dnl])\n"
+    foreach (sort keys %{$req->macro});
+  print $trace_m4 "\n";
+
+  # Implement traces for current requests (%TRACE).
+  print $trace_m4
+    "## ------------------------- ##\n",
+    "## Trace processing macros.  ##\n",
+    "## ------------------------- ##\n",
+    "\n";
+  foreach (sort keys %trace)
+    {
+      # Trace request can be embed \n.
+      (my $comment = "Trace $_:$trace{$_}") =~ s/^/\# /;
+      print $trace_m4 "$comment\n";
+      print $trace_m4 "at_define([AT_$_],\n";
+      print $trace_m4 trace_format_to_m4 ($trace{$_}) . ")\n\n";
+    }
+  print $trace_m4 "\n";
+
+  # Reenable output.
+  print $trace_m4 "at_divert(0)at_dnl\n";
+
+  # Transform the traces from m4 into an m4 input file.
+  # Typically, transform:
+  #
+  # | m4trace:configure.ac:3: -1- AC_SUBST([exec_prefix], [NONE])
+  #
+  # into
+  #
+  # | AT_AC_SUBST([configure.ac], [3], [1], [AC_SUBST], [exec_prefix], [NONE])
+  #
+  # Pay attention that the file name might include colons, if under DOS
+  # for instance, so we don't use `[^:]+'.
+  my $traces = new Autom4te::XFile ("< " . open_quote ($tcache . $req->id));
+  while ($_ = $traces->getline)
+    {
+      # Trace with arguments, as the example above.  We don't try
+      # to match the trailing parenthesis as it might be on a
+      # separate line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^(]+)\((.*)$}
+       {AT_$4([$1], [$2], [$3], [$4], $5};
+      # Traces without arguments, always on a single line.
+      s{^m4trace:(.+):(\d+): -(\d+)- ([^)]*)\n$}
+       {AT_$4([$1], [$2], [$3], [$4])\n};
+      print $trace_m4 "$_";
+    }
+  $trace_m4->close;
+
+  my $in = new Autom4te::XFile ("$m4 " . shell_quote ("$tmp/traces.m4") . " |");
+  my $out = new Autom4te::XFile ("> " . open_quote ($output));
+
+  # This is dubious: should we really transform the quadrigraphs in
+  # traces?  It might break balanced [ ] etc. in the output.  The
+  # consensus seeems to be that traces are more useful this way.
+  while ($_ = $in->getline)
+    {
+      # It makes no sense to try to transform __oline__.
+      s/\@<:\@/[/g;
+      s/\@:>\@/]/g;
+      s/\@\{:\@/(/g;
+      s/\@:\}\@/)/g;
+      s/\@S\|\@/\$/g;
+      s/\@%:\@/#/g;
+      s/\@&t\@//g;
+      print $out $_;
+    }
+}
+
+
+# $BOOL
+# up_to_date ($REQ)
+# -----------------
+# Are the cache files of $REQ up to date?
+# $REQ is `valid' if it corresponds to the request and exists, which
+# does not mean it is up to date.  It is up to date if, in addition,
+# its files are younger than its dependencies.
+sub up_to_date ($)
+{
+  my ($req) = @_;
+
+  return 0
+    if ! $req->valid;
+
+  my $tfile = $tcache . $req->id;
+  my $ofile = $ocache . $req->id;
+
+  # We can't answer properly if the traces are not computed since we
+  # need to know what other files were included.  Actually, if any of
+  # the cache files is missing, we are not up to date.
+  return 0
+    if ! -f $tfile || ! -f $ofile;
+
+  # The youngest of the cache files must be older than the oldest of
+  # the dependencies.
+  my $tmtime = mtime ($tfile);
+  my $omtime = mtime ($ofile);
+  my ($file, $mtime) = ($tmtime < $omtime
+			? ($ofile, $omtime) : ($tfile, $tmtime));
+
+  # We depend at least upon the arguments.
+  my @dep = @ARGV;
+
+  # stdin is always out of date.
+  if (grep { $_ eq '-' } @dep)
+    { return 0 }
+
+  # Files may include others.  We can use traces since we just checked
+  # if they are available.
+  handle_traces ($req, "$tmp/dependencies",
+		 ('include'    => '$1',
+		  'm4_include' => '$1'));
+  my $deps = new Autom4te::XFile ("< " . open_quote ("$tmp/dependencies"));
+  while ($_ = $deps->getline)
+    {
+      chomp;
+      my $file = find_file ("$_?", @include);
+      # If a file which used to be included is no longer there, then
+      # don't say it's missing (it might no longer be included).  But
+      # of course, that causes the output to be outdated (as if the
+      # time stamp of that missing file was newer).
+      return 0
+	if ! $file;
+      push @dep, $file;
+    }
+
+  # If $FILE is younger than one of its dependencies, it is outdated.
+  return up_to_date_p ($file, @dep);
+}
+
+
+## ---------- ##
+## Freezing.  ##
+## ---------- ##
+
+# freeze ($OUTPUT)
+# ----------------
+sub freeze ($)
+{
+  my ($output) = @_;
+
+  # When processing the file with diversion disabled, there must be no
+  # output but comments and empty lines.
+  my $result = xqx ("$m4"
+		    . ' --fatal-warning'
+		    . join (' --include=', '', map { shell_quote ($_) } @include)
+		    . ' --define=divert'
+		    . " " . files_to_options (@ARGV)
+		    . ' </dev/null');
+  $result =~ s/#.*\n//g;
+  $result =~ s/^\n//mg;
+
+  fatal "freezing produced output:\n$result"
+    if $result;
+
+  # If freezing produces output, something went wrong: a bad `divert',
+  # or an improper paren etc.
+  xsystem ("$m4"
+	   . ' --fatal-warning'
+	   . join (' --include=', '', map { shell_quote ($_) } @include)
+	   . " --freeze-state=" . shell_quote ($output)
+	   . " " . files_to_options (@ARGV)
+	   . ' </dev/null');
+}
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+mktmpdir ('am4t');
+load_configuration ($ENV{'AUTOM4TE_CFG'} || "$pkgdatadir/autom4te.cfg");
+load_configuration ("$ENV{'HOME'}/.autom4te.cfg")
+  if exists $ENV{'HOME'} && -f "$ENV{'HOME'}/.autom4te.cfg";
+load_configuration (".autom4te.cfg")
+  if -f ".autom4te.cfg";
+parse_args;
+
+# Freezing does not involve the cache.
+if ($freeze)
+  {
+    freeze ($output);
+    exit $exit_code;
+  }
+
+# We need our cache directory.  Don't fail with parallel creation.
+if (! -d "$cache")
+  {
+    mkdir "$cache", 0755
+      or -d "$cache"
+      or fatal "cannot create $cache: $!";
+  }
+
+# Open the index for update, and lock it.  autom4te handles several
+# files, but the index is the first and last file to be updated, so
+# locking it is sufficient.
+$icache_file = new Autom4te::XFile $icache, O_RDWR|O_CREAT;
+$icache_file->lock (LOCK_EX)
+  if ($flock_implemented eq "yes");
+
+# Read the cache index if available and older than autom4te itself.
+# If autom4te is younger, then some structures such as C4che might
+# have changed, which would corrupt its processing.
+Autom4te::C4che->load ($icache_file)
+  if -f $icache && mtime ($icache) > mtime ($0);
+
+# Add the new trace requests.
+my $req = Autom4te::C4che->request ('input' => \@ARGV,
+				    'path'  => \@include,
+				    'macro' => [keys %trace, @preselect]);
+
+# If $REQ's cache files are not up to date, or simply if the user
+# discarded them (-f), declare it invalid.
+$req->valid (0)
+  if $force || ! up_to_date ($req);
+
+# We now know whether we can trust the Request object.  Say it.
+verb "the trace request object is:\n" . $req->marshall;
+
+# We need to run M4 if (i) the user wants it (--force), (ii) $REQ is
+# invalid.
+handle_m4 ($req, keys %{$req->macro})
+  if $force || ! $req->valid;
+
+# Issue the warnings each time autom4te was run.
+my $separator = "\n" . ('-' x 25) . " END OF WARNING " . ('-' x 25) . "\n\n";
+handle_traces ($req, "$tmp/warnings",
+	       ('_m4_warn' => "\$1::\$f:\$l::\$2::\$3$separator"));
+# Swallow excessive newlines.
+for (split (/\n*$separator\n*/o, contents ("$tmp/warnings")))
+{
+  # The message looks like:
+  # | syntax::input.as:5::ouch
+  # | ::input.as:4: baz is expanded from...
+  # | input.as:2: bar is expanded from...
+  # | input.as:3: foo is expanded from...
+  # | input.as:5: the top level
+  # In particular, m4_warn guarantees that either $stackdump is empty, or
+  # it consists of lines where only the last line ends in "top level".
+  my ($cat, $loc, $msg, $stacktrace) = split ('::', $_, 4);
+  msg $cat, $loc, "warning: $msg",
+    partial => ($stacktrace =~ /top level$/) + 0;
+  for (split /\n/, $stacktrace)
+    {
+      my ($loc, $trace) = split (': ', $_, 2);
+      msg $cat, $loc, $trace, partial => ($trace !~ /top level$/) + 0;
+    }
+}
+
+# Now output...
+if (%trace)
+  {
+    # Always produce traces, since even if the output is young enough,
+    # there is no guarantee that the traces use the same *format*
+    # (e.g., `-t FOO:foo' and `-t FOO:bar' are both using the same M4
+    # traces, hence the M4 traces cache is usable, but its formatting
+    # will yield different results).
+    handle_traces ($req, $output, %trace);
+  }
+else
+  {
+    # Actual M4 expansion, if the user wants it, or if $output is old
+    # (STDOUT is pretty old).
+    handle_output ($req, $output)
+      if $force || mtime ($output) < mtime ($ocache . $req->id);
+  }
+
+# If we ran up to here, the cache is valid.
+$req->valid (1);
+Autom4te::C4che->save ($icache_file);
+
+exit $exit_code;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake-1.14 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake-1.14
new file mode 100755
index 0000000..9c74dd3
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/automake-1.14
@@ -0,0 +1,8298 @@
+#!/usr/bin/perl -w
+# -*- perl -*-
+# Generated from bin/automake.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# automake - create Makefile.in from Makefile.am
+# Copyright (C) 1994-2013 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David Mackenzie <djm@gnu.ai.mit.edu>.
+# Perl reimplementation by Tom Tromey <tromey@redhat.com>, and
+# Alexandre Duret-Lutz <adl@gnu.org>.
+
+package Automake;
+
+use strict;
+
+BEGIN
+{
+  @Automake::perl_libdirs = ('//share/automake-1.14')
+    unless @Automake::perl_libdirs;
+  unshift @INC, @Automake::perl_libdirs;
+
+  # Override SHELL.  This is required on DJGPP so that system() uses
+  # bash, not COMMAND.COM which doesn't quote arguments properly.
+  # Other systems aren't expected to use $SHELL when Automake
+  # runs, but it should be safe to drop the "if DJGPP" guard if
+  # it turns up other systems need the same thing.  After all,
+  # if SHELL is used, ./configure's SHELL is always better than
+  # the user's SHELL (which may be something like tcsh).
+  $ENV{'SHELL'} = '/bin/sh' if exists $ENV{'DJDIR'};
+}
+
+use Automake::Config;
+BEGIN
+{
+  if ($perl_threads)
+    {
+      require threads;
+      import threads;
+      require Thread::Queue;
+      import Thread::Queue;
+    }
+}
+use Automake::General;
+use Automake::XFile;
+use Automake::Channels;
+use Automake::ChannelDefs;
+use Automake::Configure_ac;
+use Automake::FileUtils;
+use Automake::Location;
+use Automake::Condition qw/TRUE FALSE/;
+use Automake::DisjConditions;
+use Automake::Options;
+use Automake::Variable;
+use Automake::VarDef;
+use Automake::Rule;
+use Automake::RuleDef;
+use Automake::Wrap 'makefile_wrap';
+use Automake::Language;
+use File::Basename;
+use File::Spec;
+use Carp;
+
+## ----------------------- ##
+## Subroutine prototypes.  ##
+## ----------------------- ##
+
+# BEGIN AUTOMATICALLY GENERATED PROTOTYPES
+sub append_exeext (&$);
+sub check_gnits_standards ();
+sub check_gnu_standards ();
+sub check_trailing_slash ($\$);
+sub check_typos ();
+sub define_files_variable ($\@$$);
+sub define_standard_variables ();
+sub define_verbose_libtool ();
+sub define_verbose_texinfo ();
+sub do_check_merge_target ();
+sub get_number_of_threads ();
+sub handle_compile ();
+sub handle_data ();
+sub handle_dist ();
+sub handle_emacs_lisp ();
+sub handle_factored_dependencies ();
+sub handle_footer ();
+sub handle_gettext ();
+sub handle_headers ();
+sub handle_install ();
+sub handle_java ();
+sub handle_languages ();
+sub handle_libraries ();
+sub handle_libtool ();
+sub handle_ltlibraries ();
+sub handle_makefiles_serial ();
+sub handle_man_pages ();
+sub handle_minor_options ();
+sub handle_options ();
+sub handle_programs ();
+sub handle_python ();
+sub handle_scripts ();
+sub handle_silent ();
+sub handle_subdirs ();
+sub handle_tags ();
+sub handle_tests ();
+sub handle_tests_dejagnu ();
+sub handle_texinfo ();
+sub handle_user_recursion ();
+sub initialize_per_input ();
+sub lang_lex_finish ();
+sub lang_sub_obj ();
+sub lang_vala_finish ();
+sub lang_yacc_finish ();
+sub locate_aux_dir ();
+sub parse_arguments ();
+sub scan_aclocal_m4 ();
+sub scan_autoconf_files ();
+sub silent_flag ();
+sub transform ($\%);
+sub transform_token ($\%$);
+sub usage ();
+sub version ();
+sub yacc_lex_finish_helper ();
+# END AUTOMATICALLY GENERATED PROTOTYPES
+
+
+## ----------- ##
+## Constants.  ##
+## ----------- ##
+
+# Some regular expressions.  One reason to put them here is that it
+# makes indentation work better in Emacs.
+
+# Writing singled-quoted-$-terminated regexes is a pain because
+# perl-mode thinks of $' as the ${'} variable (instead of a $ followed
+# by a closing quote.  Letting perl-mode think the quote is not closed
+# leads to all sort of misindentations.  On the other hand, defining
+# regexes as double-quoted strings is far less readable.  So usually
+# we will write:
+#
+#  $REGEX = '^regex_value' . "\$";
+
+my $IGNORE_PATTERN = '^\s*##([^#\n].*)?\n';
+my $WHITE_PATTERN = '^\s*' . "\$";
+my $COMMENT_PATTERN = '^#';
+my $TARGET_PATTERN='[$a-zA-Z0-9_.@%][-.a-zA-Z0-9_(){}/$+@%]*';
+# A rule has three parts: a list of targets, a list of dependencies,
+# and optionally actions.
+my $RULE_PATTERN =
+  "^($TARGET_PATTERN(?:(?:\\\\\n|\\s)+$TARGET_PATTERN)*) *:([^=].*|)\$";
+
+# Only recognize leading spaces, not leading tabs.  If we recognize
+# leading tabs here then we need to make the reader smarter, because
+# otherwise it will think rules like 'foo=bar; \' are errors.
+my $ASSIGNMENT_PATTERN = '^ *([^ \t=:+]*)\s*([:+]?)=\s*(.*)' . "\$";
+# This pattern recognizes a Gnits version id and sets $1 if the
+# release is an alpha release.  We also allow a suffix which can be
+# used to extend the version number with a "fork" identifier.
+my $GNITS_VERSION_PATTERN = '\d+\.\d+([a-z]|\.\d+)?(-[A-Za-z0-9]+)?';
+
+my $IF_PATTERN = '^if\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*)\s*(?:#.*)?' . "\$";
+my $ELSE_PATTERN =
+  '^else(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $ENDIF_PATTERN =
+  '^endif(?:\s+(!?)\s*([A-Za-z][A-Za-z0-9_]*))?\s*(?:#.*)?' . "\$";
+my $PATH_PATTERN = '(\w|[+/.-])+';
+# This will pass through anything not of the prescribed form.
+my $INCLUDE_PATTERN = ('^include\s+'
+		       . '((\$\(top_srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|(\$\(srcdir\)/' . $PATH_PATTERN . ')'
+		       . '|([^/\$]' . $PATH_PATTERN . '))\s*(#.*)?' . "\$");
+
+# Directories installed during 'install-exec' phase.
+my $EXEC_DIR_PATTERN =
+  '^(?:bin|sbin|libexec|sysconf|localstate|lib|pkglib|.*exec.*)' . "\$";
+
+# Values for AC_CANONICAL_*
+use constant AC_CANONICAL_BUILD  => 1;
+use constant AC_CANONICAL_HOST   => 2;
+use constant AC_CANONICAL_TARGET => 3;
+
+# Values indicating when something should be cleaned.
+use constant MOSTLY_CLEAN     => 0;
+use constant CLEAN            => 1;
+use constant DIST_CLEAN       => 2;
+use constant MAINTAINER_CLEAN => 3;
+
+# Libtool files.
+my @libtool_files = qw(ltmain.sh config.guess config.sub);
+# ltconfig appears here for compatibility with old versions of libtool.
+my @libtool_sometimes = qw(ltconfig ltcf-c.sh ltcf-cxx.sh ltcf-gcj.sh);
+
+# Commonly found files we look for and automatically include in
+# DISTFILES.
+my @common_files =
+    (qw(ABOUT-GNU ABOUT-NLS AUTHORS BACKLOG COPYING COPYING.DOC COPYING.LIB
+	COPYING.LESSER ChangeLog INSTALL NEWS README THANKS TODO
+	ar-lib compile config.guess config.rpath
+	config.sub depcomp install-sh libversion.in mdate-sh
+	missing mkinstalldirs py-compile texinfo.tex ylwrap),
+     @libtool_files, @libtool_sometimes);
+
+# Commonly used files we auto-include, but only sometimes.  This list
+# is used for the --help output only.
+my @common_sometimes =
+  qw(aclocal.m4 acconfig.h config.h.top config.h.bot configure
+     configure.ac configure.in stamp-vti);
+
+# Standard directories from the GNU Coding Standards, and additional
+# pkg* directories from Automake.  Stored in a hash for fast member check.
+my %standard_prefix =
+    map { $_ => 1 } (qw(bin data dataroot doc dvi exec html include info
+			lib libexec lisp locale localstate man man1 man2
+			man3 man4 man5 man6 man7 man8 man9 oldinclude pdf
+			pkgdata pkginclude pkglib pkglibexec ps sbin
+			sharedstate sysconf));
+
+# Copyright on generated Makefile.ins.
+my $gen_copyright = "\
+# Copyright (C) 1994-$RELEASE_YEAR Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+";
+
+# These constants are returned by the lang_*_rewrite functions.
+# LANG_SUBDIR means that the resulting object file should be in a
+# subdir if the source file is.  In this case the file name cannot
+# have '..' components.
+use constant LANG_IGNORE  => 0;
+use constant LANG_PROCESS => 1;
+use constant LANG_SUBDIR  => 2;
+
+# These are used when keeping track of whether an object can be built
+# by two different paths.
+use constant COMPILE_LIBTOOL  => 1;
+use constant COMPILE_ORDINARY => 2;
+
+# We can't always associate a location to a variable or a rule,
+# when it's defined by Automake.  We use INTERNAL in this case.
+use constant INTERNAL => new Automake::Location;
+
+# Serialization keys for message queues.
+use constant QUEUE_MESSAGE   => "msg";
+use constant QUEUE_CONF_FILE => "conf file";
+use constant QUEUE_LOCATION  => "location";
+use constant QUEUE_STRING    => "string";
+
+## ---------------------------------- ##
+## Variables related to the options.  ##
+## ---------------------------------- ##
+
+# TRUE if we should always generate Makefile.in.
+my $force_generation = 1;
+
+# From the Perl manual.
+my $symlink_exists = (eval 'symlink ("", "");', $@ eq '');
+
+# TRUE if missing standard files should be installed.
+my $add_missing = 0;
+
+# TRUE if we should copy missing files; otherwise symlink if possible.
+my $copy_missing = 0;
+
+# TRUE if we should always update files that we know about.
+my $force_missing = 0;
+
+
+## ---------------------------------------- ##
+## Variables filled during files scanning.  ##
+## ---------------------------------------- ##
+
+# Name of the configure.ac file.
+my $configure_ac;
+
+# Files found by scanning configure.ac for LIBOBJS.
+my %libsources = ();
+
+# Names used in AC_CONFIG_HEADERS call.
+my @config_headers = ();
+
+# Names used in AC_CONFIG_LINKS call.
+my @config_links = ();
+
+# List of Makefile.am's to process, and their corresponding outputs.
+my @input_files = ();
+my %output_files = ();
+
+# Complete list of Makefile.am's that exist.
+my @configure_input_files = ();
+
+# List of files in AC_CONFIG_FILES/AC_OUTPUT without Makefile.am's,
+# and their outputs.
+my @other_input_files = ();
+# Where each AC_CONFIG_FILES/AC_OUTPUT/AC_CONFIG_LINK/AC_CONFIG_HEADERS
+# appears.  The keys are the files created by these macros.
+my %ac_config_files_location = ();
+# The condition under which AC_CONFIG_FOOS appears.
+my %ac_config_files_condition = ();
+
+# Directory to search for configure-required files.  This
+# will be computed by locate_aux_dir() and can be set using
+# AC_CONFIG_AUX_DIR in configure.ac.
+# $CONFIG_AUX_DIR is the 'raw' directory, valid only in the source-tree.
+my $config_aux_dir = '';
+my $config_aux_dir_set_in_configure_ac = 0;
+# $AM_CONFIG_AUX_DIR is prefixed with $(top_srcdir), so it can be used
+# in Makefiles.
+my $am_config_aux_dir = '';
+
+# Directory to search for AC_LIBSOURCE files, as set by AC_CONFIG_LIBOBJ_DIR
+# in configure.ac.
+my $config_libobj_dir = '';
+
+# Whether AM_GNU_GETTEXT has been seen in configure.ac.
+my $seen_gettext = 0;
+# Whether AM_GNU_GETTEXT([external]) is used.
+my $seen_gettext_external = 0;
+# Where AM_GNU_GETTEXT appears.
+my $ac_gettext_location;
+# Whether AM_GNU_GETTEXT_INTL_SUBDIR has been seen.
+my $seen_gettext_intl = 0;
+
+# The arguments of the AM_EXTRA_RECURSIVE_TARGETS call (if any).
+my @extra_recursive_targets = ();
+
+# Lists of tags supported by Libtool.
+my %libtool_tags = ();
+# 1 if Libtool uses LT_SUPPORTED_TAG.  If it does, then it also
+# uses AC_REQUIRE_AUX_FILE.
+my $libtool_new_api = 0;
+
+# Most important AC_CANONICAL_* macro seen so far.
+my $seen_canonical = 0;
+
+# Where AM_MAINTAINER_MODE appears.
+my $seen_maint_mode;
+
+# Actual version we've seen.
+my $package_version = '';
+
+# Where version is defined.
+my $package_version_location;
+
+# TRUE if we've seen AM_PROG_AR
+my $seen_ar = 0;
+
+# Location of AC_REQUIRE_AUX_FILE calls, indexed by their argument.
+my %required_aux_file = ();
+
+# Where AM_INIT_AUTOMAKE is called;
+my $seen_init_automake = 0;
+
+# TRUE if we've seen AM_AUTOMAKE_VERSION.
+my $seen_automake_version = 0;
+
+# Hash table of discovered configure substitutions.  Keys are names,
+# values are 'FILE:LINE' strings which are used by error message
+# generation.
+my %configure_vars = ();
+
+# Ignored configure substitutions (i.e., variables not to be output in
+# Makefile.in)
+my %ignored_configure_vars = ();
+
+# Files included by $configure_ac.
+my @configure_deps = ();
+
+# Greatest timestamp of configure's dependencies.
+my $configure_deps_greatest_timestamp = 0;
+
+# Hash table of AM_CONDITIONAL variables seen in configure.
+my %configure_cond = ();
+
+# This maps extensions onto language names.
+my %extension_map = ();
+
+# List of the DIST_COMMON files we discovered while reading
+# configure.ac.
+my $configure_dist_common = '';
+
+# This maps languages names onto objects.
+my %languages = ();
+# Maps each linker variable onto a language object.
+my %link_languages = ();
+
+# maps extensions to needed source flags.
+my %sourceflags = ();
+
+# List of targets we must always output.
+# FIXME: Complete, and remove falsely required targets.
+my %required_targets =
+  (
+   'all'          => 1,
+   'dvi'	  => 1,
+   'pdf'	  => 1,
+   'ps'		  => 1,
+   'info'	  => 1,
+   'install-info' => 1,
+   'install'      => 1,
+   'install-data' => 1,
+   'install-exec' => 1,
+   'uninstall'    => 1,
+
+   # FIXME: Not required, temporary hacks.
+   # Well, actually they are sort of required: the -recursive
+   # targets will run them anyway...
+   'html-am'         => 1,
+   'dvi-am'          => 1,
+   'pdf-am'          => 1,
+   'ps-am'           => 1,
+   'info-am'         => 1,
+   'install-data-am' => 1,
+   'install-exec-am' => 1,
+   'install-html-am' => 1,
+   'install-dvi-am'  => 1,
+   'install-pdf-am'  => 1,
+   'install-ps-am'   => 1,
+   'install-info-am' => 1,
+   'installcheck-am' => 1,
+   'uninstall-am'    => 1,
+   'tags-am'         => 1,
+   'ctags-am'        => 1,
+   'cscopelist-am'   => 1,
+   'install-man'     => 1,
+  );
+
+# Queue to push require_conf_file requirements to.
+my $required_conf_file_queue;
+
+# The name of the Makefile currently being processed.
+my $am_file = 'BUG';
+
+################################################################
+
+## ------------------------------------------ ##
+## Variables reset by &initialize_per_input.  ##
+## ------------------------------------------ ##
+
+# Relative dir of the output makefile.
+my $relative_dir;
+
+# Greatest timestamp of the output's dependencies (excluding
+# configure's dependencies).
+my $output_deps_greatest_timestamp;
+
+# These variables are used when generating each Makefile.in.
+# They hold the Makefile.in until it is ready to be printed.
+my $output_vars;
+my $output_all;
+my $output_header;
+my $output_rules;
+my $output_trailer;
+
+# This is the conditional stack, updated on if/else/endif, and
+# used to build Condition objects.
+my @cond_stack;
+
+# This holds the set of included files.
+my @include_stack;
+
+# List of dependencies for the obvious targets.
+my @all;
+my @check;
+my @check_tests;
+
+# Keys in this hash table are files to delete.  The associated
+# value tells when this should happen (MOSTLY_CLEAN, DIST_CLEAN, etc.)
+my %clean_files;
+
+# Keys in this hash table are object files or other files in
+# subdirectories which need to be removed.  This only holds files
+# which are created by compilations.  The value in the hash indicates
+# when the file should be removed.
+my %compile_clean_files;
+
+# Keys in this hash table are directories where we expect to build a
+# libtool object.  We use this information to decide what directories
+# to delete.
+my %libtool_clean_directories;
+
+# Value of $(SOURCES), used by tags.am.
+my @sources;
+# Sources which go in the distribution.
+my @dist_sources;
+
+# This hash maps object file names onto their corresponding source
+# file names.  This is used to ensure that each object is created
+# by a single source file.
+my %object_map;
+
+# This hash maps object file names onto an integer value representing
+# whether this object has been built via ordinary compilation or
+# libtool compilation (the COMPILE_* constants).
+my %object_compilation_map;
+
+
+# This keeps track of the directories for which we've already
+# created dirstamp code.  Keys are directories, values are stamp files.
+# Several keys can share the same stamp files if they are equivalent
+# (as are './/foo' and 'foo').
+my %directory_map;
+
+# All .P files.
+my %dep_files;
+
+# This is a list of all targets to run during "make dist".
+my @dist_targets;
+
+# Keep track of all programs declared in this Makefile, without
+# $(EXEEXT).  @substitutions@ are not listed.
+my %known_programs;
+my %known_libraries;
+
+# This keeps track of which extensions we've seen (that we care
+# about).
+my %extension_seen;
+
+# This is random scratch space for the language finish functions.
+# Don't randomly overwrite it; examine other uses of keys first.
+my %language_scratch;
+
+# We keep track of which objects need special (per-executable)
+# handling on a per-language basis.
+my %lang_specific_files;
+
+# This is set when 'handle_dist' has finished.  Once this happens,
+# we should no longer push on dist_common.
+my $handle_dist_run;
+
+# Used to store a set of linkers needed to generate the sources currently
+# under consideration.
+my %linkers_used;
+
+# True if we need 'LINK' defined.  This is a hack.
+my $need_link;
+
+# Does the generated Makefile have to build some compiled object
+# (for binary programs, or plain or libtool libraries)?
+my $must_handle_compiled_objects;
+
+# Record each file processed by make_paragraphs.
+my %transformed_files;
+
+################################################################
+
+## ---------------------------------------------- ##
+## Variables not reset by &initialize_per_input.  ##
+## ---------------------------------------------- ##
+
+# Cache each file processed by make_paragraphs.
+# (This is different from %transformed_files because
+# %transformed_files is reset for each file while %am_file_cache
+# it global to the run.)
+my %am_file_cache;
+
+################################################################
+
+# var_SUFFIXES_trigger ($TYPE, $VALUE)
+# ------------------------------------
+# This is called by Automake::Variable::define() when SUFFIXES
+# is defined ($TYPE eq '') or appended ($TYPE eq '+').
+# The work here needs to be performed as a side-effect of the
+# macro_define() call because SUFFIXES definitions impact
+# on $KNOWN_EXTENSIONS_PATTERN which is used used when parsing
+# the input am file.
+sub var_SUFFIXES_trigger
+{
+    my ($type, $value) = @_;
+    accept_extensions (split (' ', $value));
+}
+Automake::Variable::hook ('SUFFIXES', \&var_SUFFIXES_trigger);
+
+################################################################
+
+
+# initialize_per_input ()
+# -----------------------
+# (Re)-Initialize per-Makefile.am variables.
+sub initialize_per_input ()
+{
+    reset_local_duplicates ();
+
+    $relative_dir = undef;
+
+    $output_deps_greatest_timestamp = 0;
+
+    $output_vars = '';
+    $output_all = '';
+    $output_header = '';
+    $output_rules = '';
+    $output_trailer = '';
+
+    Automake::Options::reset;
+    Automake::Variable::reset;
+    Automake::Rule::reset;
+
+    @cond_stack = ();
+
+    @include_stack = ();
+
+    @all = ();
+    @check = ();
+    @check_tests = ();
+
+    %clean_files = ();
+    %compile_clean_files = ();
+
+    # We always include '.'.  This isn't strictly correct.
+    %libtool_clean_directories = ('.' => 1);
+
+    @sources = ();
+    @dist_sources = ();
+
+    %object_map = ();
+    %object_compilation_map = ();
+
+    %directory_map = ();
+
+    %dep_files = ();
+
+    @dist_targets = ();
+
+    %known_programs = ();
+    %known_libraries= ();
+
+    %extension_seen = ();
+
+    %language_scratch = ();
+
+    %lang_specific_files = ();
+
+    $handle_dist_run = 0;
+
+    $need_link = 0;
+
+    $must_handle_compiled_objects = 0;
+
+    %transformed_files = ();
+}
+
+
+################################################################
+
+# Initialize our list of languages that are internally supported.
+
+my @cpplike_flags =
+  qw{
+    $(DEFS)
+    $(DEFAULT_INCLUDES)
+    $(INCLUDES)
+    $(AM_CPPFLAGS)
+    $(CPPFLAGS)
+  };
+
+# C.
+register_language ('name' => 'c',
+		   'Name' => 'C',
+		   'config_vars' => ['CC'],
+		   'autodep' => '',
+		   'flags' => ['CFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'CC',
+		   'compiler' => 'COMPILE',
+		   'compile' => "\$(CC) @cpplike_flags \$(AM_CFLAGS) \$(CFLAGS)",
+		   'lder' => 'CCLD',
+		   'ld' => '$(CC)',
+		   'linker' => 'LINK',
+		   'link' => '$(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CC',
+		   'extensions' => ['.c']);
+
+# C++.
+register_language ('name' => 'cxx',
+		   'Name' => 'C++',
+		   'config_vars' => ['CXX'],
+		   'linker' => 'CXXLINK',
+		   'link' => '$(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'CXX',
+		   'flags' => ['CXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CXX) @cpplike_flags \$(AM_CXXFLAGS) \$(CXXFLAGS)",
+		   'ccer' => 'CXX',
+		   'compiler' => 'CXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'CXX',
+		   'lder' => 'CXXLD',
+		   'ld' => '$(CXX)',
+		   'pure' => 1,
+		   'extensions' => ['.c++', '.cc', '.cpp', '.cxx', '.C']);
+
+# Objective C.
+register_language ('name' => 'objc',
+		   'Name' => 'Objective C',
+		   'config_vars' => ['OBJC'],
+		   'linker' => 'OBJCLINK',
+		   'link' => '$(OBJCLD) $(AM_OBJCFLAGS) $(OBJCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJC',
+		   'flags' => ['OBJCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJC) @cpplike_flags \$(AM_OBJCFLAGS) \$(OBJCFLAGS)",
+		   'ccer' => 'OBJC',
+		   'compiler' => 'OBJCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCLD',
+		   'ld' => '$(OBJC)',
+		   'pure' => 1,
+		   'extensions' => ['.m']);
+
+# Objective C++.
+register_language ('name' => 'objcxx',
+		   'Name' => 'Objective C++',
+		   'config_vars' => ['OBJCXX'],
+		   'linker' => 'OBJCXXLINK',
+		   'link' => '$(OBJCXXLD) $(AM_OBJCXXFLAGS) $(OBJCXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'OBJCXX',
+		   'flags' => ['OBJCXXFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(OBJCXX) @cpplike_flags \$(AM_OBJCXXFLAGS) \$(OBJCXXFLAGS)",
+		   'ccer' => 'OBJCXX',
+		   'compiler' => 'OBJCXXCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'OBJCXXLD',
+		   'ld' => '$(OBJCXX)',
+		   'pure' => 1,
+		   'extensions' => ['.mm']);
+
+# Unified Parallel C.
+register_language ('name' => 'upc',
+		   'Name' => 'Unified Parallel C',
+		   'config_vars' => ['UPC'],
+		   'linker' => 'UPCLINK',
+		   'link' => '$(UPCLD) $(AM_UPCFLAGS) $(UPCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'UPC',
+		   'flags' => ['UPCFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(UPC) @cpplike_flags \$(AM_UPCFLAGS) \$(UPCFLAGS)",
+		   'ccer' => 'UPC',
+		   'compiler' => 'UPCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'lder' => 'UPCLD',
+		   'ld' => '$(UPC)',
+		   'pure' => 1,
+		   'extensions' => ['.upc']);
+
+# Headers.
+register_language ('name' => 'header',
+		   'Name' => 'Header',
+		   'extensions' => ['.h', '.H', '.hxx', '.h++', '.hh',
+				    '.hpp', '.inc'],
+		   # No output.
+		   'output_extensions' => sub { return () },
+		   # Nothing to do.
+		   '_finish' => sub { });
+
+# Vala
+register_language ('name' => 'vala',
+		   'Name' => 'Vala',
+		   'config_vars' => ['VALAC'],
+		   'flags' => [],
+		   'compile' => '$(VALAC) $(AM_VALAFLAGS) $(VALAFLAGS)',
+		   'ccer' => 'VALAC',
+		   'compiler' => 'VALACOMPILE',
+		   'extensions' => ['.vala'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ s/vala$/c/;
+						return ($ext,) },
+		   'rule_file' => 'vala',
+		   '_finish' => \&lang_vala_finish,
+		   '_target_hook' => \&lang_vala_target_hook,
+		   'nodist_specific' => 1);
+
+# Yacc (C & C++).
+register_language ('name' => 'yacc',
+		   'Name' => 'Yacc',
+		   'config_vars' => ['YACC'],
+		   'flags' => ['YFLAGS'],
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'extensions' => ['.y'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   'rule_file' => 'yacc',
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'yaccxx',
+		   'Name' => 'Yacc (C++)',
+		   'config_vars' => ['YACC'],
+		   'rule_file' => 'yacc',
+		   'flags' => ['YFLAGS'],
+		   'ccer' => 'YACC',
+		   'compiler' => 'YACCCOMPILE',
+		   'compile' => '$(YACC) $(AM_YFLAGS) $(YFLAGS)',
+		   'extensions' => ['.y++', '.yy', '.yxx', '.ypp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/y/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_yacc_finish,
+		   '_target_hook' => \&lang_yacc_target_hook,
+		   'nodist_specific' => 1);
+
+# Lex (C & C++).
+register_language ('name' => 'lex',
+		   'Name' => 'Lex',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+register_language ('name' => 'lexxx',
+		   'Name' => 'Lex (C++)',
+		   'config_vars' => ['LEX'],
+		   'rule_file' => 'lex',
+		   'flags' => ['LFLAGS'],
+		   'compile' => '$(LEX) $(AM_LFLAGS) $(LFLAGS)',
+		   'ccer' => 'LEX',
+		   'compiler' => 'LEXCOMPILE',
+		   'extensions' => ['.l++', '.ll', '.lxx', '.lpp'],
+		   'output_extensions' => sub { (my $ext = $_[0]) =~ tr/l/c/;
+						return ($ext,) },
+		   '_finish' => \&lang_lex_finish,
+		   '_target_hook' => \&lang_lex_target_hook,
+		   'nodist_specific' => 1);
+
+# Assembler.
+register_language ('name' => 'asm',
+		   'Name' => 'Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'flags' => ['CCASFLAGS'],
+		   # Users can set AM_CCASFLAGS to include DEFS, INCLUDES,
+		   # or anything else required.  They can also set CCAS.
+		   # Or simply use Preprocessed Assembler.
+		   'compile' => '$(CCAS) $(AM_CCASFLAGS) $(CCASFLAGS)',
+		   'ccer' => 'CCAS',
+		   'compiler' => 'CCASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.s']);
+
+# Preprocessed Assembler.
+register_language ('name' => 'cppasm',
+		   'Name' => 'Preprocessed Assembler',
+		   'config_vars' => ['CCAS', 'CCASFLAGS'],
+
+		   'autodep' => 'CCAS',
+		   'flags' => ['CCASFLAGS', 'CPPFLAGS'],
+		   'compile' => "\$(CCAS) @cpplike_flags \$(AM_CCASFLAGS) \$(CCASFLAGS)",
+		   'ccer' => 'CPPAS',
+		   'compiler' => 'CPPASCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'extensions' => ['.S', '.sx']);
+
+# Fortran 77
+register_language ('name' => 'f77',
+		   'Name' => 'Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FFLAGS'],
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'F77COMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'pure' => 1,
+		   'extensions' => ['.f', '.for']);
+
+# Fortran
+register_language ('name' => 'fc',
+		   'Name' => 'Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'flags' => ['FCFLAGS'],
+		   'compile' => '$(FC) $(AM_FCFLAGS) $(FCFLAGS)',
+		   'ccer' => 'FC',
+		   'compiler' => 'FCCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'pure' => 1,
+		   'extensions' => ['.f90', '.f95', '.f03', '.f08']);
+
+# Preprocessed Fortran
+register_language ('name' => 'ppfc',
+		   'Name' => 'Preprocessed Fortran',
+		   'config_vars' => ['FC'],
+		   'linker' => 'FCLINK',
+		   'link' => '$(FCLD) $(AM_FCFLAGS) $(FCFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'FCLD',
+		   'ld' => '$(FC)',
+		   'flags' => ['FCFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPFC',
+		   'compiler' => 'PPFCCOMPILE',
+		   'compile' => "\$(FC) @cpplike_flags \$(AM_FCFLAGS) \$(FCFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'FC',
+		   'pure' => 1,
+		   'extensions' => ['.F90','.F95', '.F03', '.F08']);
+
+# Preprocessed Fortran 77
+#
+# The current support for preprocessing Fortran 77 just involves
+# passing "$(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS)
+# $(CPPFLAGS)" as additional flags to the Fortran 77 compiler, since
+# this is how GNU Make does it; see the "GNU Make Manual, Edition 0.51
+# for 'make' Version 3.76 Beta" (specifically, from info file
+# '(make)Catalogue of Rules').
+#
+# A better approach would be to write an Autoconf test
+# (i.e. AC_PROG_FPP) for a Fortran 77 preprocessor, because not all
+# Fortran 77 compilers know how to do preprocessing.  The Autoconf
+# macro AC_PROG_FPP should test the Fortran 77 compiler first for
+# preprocessing capabilities, and then fall back on cpp (if cpp were
+# available).
+register_language ('name' => 'ppf77',
+		   'Name' => 'Preprocessed Fortran 77',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['FFLAGS', 'CPPFLAGS'],
+		   'ccer' => 'PPF77',
+		   'compiler' => 'PPF77COMPILE',
+		   'compile' => "\$(F77) @cpplike_flags \$(AM_FFLAGS) \$(FFLAGS)",
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.F']);
+
+# Ratfor.
+register_language ('name' => 'ratfor',
+		   'Name' => 'Ratfor',
+		   'config_vars' => ['F77'],
+		   'linker' => 'F77LINK',
+		   'link' => '$(F77LD) $(AM_FFLAGS) $(FFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'lder' => 'F77LD',
+		   'ld' => '$(F77)',
+		   'flags' => ['RFLAGS', 'FFLAGS'],
+		   # FIXME also FFLAGS.
+		   'compile' => '$(F77) $(AM_FFLAGS) $(FFLAGS) $(AM_RFLAGS) $(RFLAGS)',
+		   'ccer' => 'F77',
+		   'compiler' => 'RCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'F77',
+		   'pure' => 1,
+		   'extensions' => ['.r']);
+
+# Java via gcj.
+register_language ('name' => 'java',
+		   'Name' => 'Java',
+		   'config_vars' => ['GCJ'],
+		   'linker' => 'GCJLINK',
+		   'link' => '$(GCJLD) $(AM_GCJFLAGS) $(GCJFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@',
+		   'autodep' => 'GCJ',
+		   'flags' => ['GCJFLAGS'],
+		   'compile' => '$(GCJ) $(AM_GCJFLAGS) $(GCJFLAGS)',
+		   'ccer' => 'GCJ',
+		   'compiler' => 'GCJCOMPILE',
+		   'compile_flag' => '-c',
+		   'output_flag' => '-o',
+		   'libtool_tag' => 'GCJ',
+		   'lder' => 'GCJLD',
+		   'ld' => '$(GCJ)',
+		   'pure' => 1,
+		   'extensions' => ['.java', '.class', '.zip', '.jar']);
+
+################################################################
+
+# Error reporting functions.
+
+# err_am ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about the current Makefile.am.
+sub err_am
+{
+  msg_am ('error', @_);
+}
+
+# err_ac ($MESSAGE, [%OPTIONS])
+# -----------------------------
+# Uncategorized errors about configure.ac.
+sub err_ac
+{
+  msg_ac ('error', @_);
+}
+
+# msg_am ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about the current Makefile.am.
+sub msg_am
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, "${am_file}.am", $msg, %opts;
+}
+
+# msg_ac ($CHANNEL, $MESSAGE, [%OPTIONS])
+# ---------------------------------------
+# Messages about about configure.ac.
+sub msg_ac
+{
+  my ($channel, $msg, %opts) = @_;
+  msg $channel, $configure_ac, $msg, %opts;
+}
+
+################################################################
+
+# subst ($TEXT)
+# -------------
+# Return a configure-style substitution using the indicated text.
+# We do this to avoid having the substitutions directly in automake.in;
+# when we do that they are sometimes removed and this causes confusion
+# and bugs.
+sub subst
+{
+    my ($text) = @_;
+    return '@' . $text . '@';
+}
+
+################################################################
+
+
+# $BACKPATH
+# backname ($RELDIR)
+# -------------------
+# If I "cd $RELDIR", then to come back, I should "cd $BACKPATH".
+# For instance 'src/foo' => '../..'.
+# Works with non strictly increasing paths, i.e., 'src/../lib' => '..'.
+sub backname
+{
+    my ($file) = @_;
+    my @res;
+    foreach (split (/\//, $file))
+    {
+	next if $_ eq '.' || $_ eq '';
+	if ($_ eq '..')
+	{
+	    pop @res
+	      or prog_error ("trying to reverse path '$file' pointing outside tree");
+	}
+	else
+	{
+	    push (@res, '..');
+	}
+    }
+    return join ('/', @res) || '.';
+}
+
+################################################################
+
+# Silent rules handling functions.
+
+# verbose_var (NAME)
+# ------------------
+# The public variable stem used to implement silent rules.
+sub verbose_var
+{
+    my ($name) = @_;
+    return 'AM_V_' . $name;
+}
+
+# verbose_private_var (NAME)
+# --------------------------
+# The naming policy for the private variables for silent rules.
+sub verbose_private_var
+{
+    my ($name) = @_;
+    return 'am__v_' . $name;
+}
+
+# define_verbose_var (NAME, VAL-IF-SILENT, [VAL-IF-VERBOSE])
+# ----------------------------------------------------------
+# For  silent rules, setup VAR and dispatcher, to expand to
+# VAL-IF-SILENT if silent, to VAL-IF-VERBOSE (defaulting to
+# empty) if not.
+sub define_verbose_var
+{
+    my ($name, $silent_val, $verbose_val) = @_;
+    $verbose_val = '' unless defined $verbose_val;
+    my $var = verbose_var ($name);
+    my $pvar = verbose_private_var ($name);
+    my $silent_var = $pvar . '_0';
+    my $verbose_var = $pvar . '_1';
+    # For typical 'make's, 'configure' replaces AM_V (inside @@) with $(V)
+    # and AM_DEFAULT_V (inside @@) with $(AM_DEFAULT_VERBOSITY).
+    # For strict POSIX 2008 'make's, it replaces them with 0 or 1 instead.
+    # See AM_SILENT_RULES in m4/silent.m4.
+    define_variable ($var, '$(' . $pvar . '_@'.'AM_V'.'@)', INTERNAL);
+    define_variable ($pvar . '_', '$(' . $pvar . '_@'.'AM_DEFAULT_V'.'@)',
+                     INTERNAL);
+    Automake::Variable::define ($silent_var, VAR_AUTOMAKE, '', TRUE,
+                                $silent_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($silent_var, TRUE));
+    Automake::Variable::define ($verbose_var, VAR_AUTOMAKE, '', TRUE,
+                                $verbose_val, '', INTERNAL, VAR_ASIS)
+      if (! vardef ($verbose_var, TRUE));
+}
+
+# verbose_flag (NAME)
+# -------------------
+# Contents of '%VERBOSE%' variable to expand before rule command.
+sub verbose_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . ')';
+}
+
+sub verbose_nodep_flag
+{
+    my ($name) = @_;
+    return '$(' . verbose_var ($name) . subst ('am__nodep') . ')';
+}
+
+# silent_flag
+# -----------
+# Contents of %SILENT%: variable to expand to '@' when silent.
+sub silent_flag ()
+{
+    return verbose_flag ('at');
+}
+
+# define_verbose_tagvar (NAME)
+# ----------------------------
+# Engage the needed silent rules machinery for tag NAME.
+sub define_verbose_tagvar
+{
+    my ($name) = @_;
+    define_verbose_var ($name, '@echo "  '. $name . ' ' x (8 - length ($name)) . '" $@;');
+}
+
+# Engage the needed silent rules machinery for assorted texinfo commands.
+sub define_verbose_texinfo ()
+{
+  my @tagvars = ('DVIPS', 'MAKEINFO', 'INFOHTML', 'TEXI2DVI', 'TEXI2PDF');
+  foreach my $tag (@tagvars)
+    {
+      define_verbose_tagvar($tag);
+    }
+  define_verbose_var('texinfo', '-q');
+  define_verbose_var('texidevnull', '> /dev/null');
+}
+
+# Engage the needed silent rules machinery for 'libtool --silent'.
+sub define_verbose_libtool ()
+{
+    define_verbose_var ('lt', '--silent');
+    return verbose_flag ('lt');
+}
+
+sub handle_silent ()
+{
+    # Define "$(AM_V_P)", expanding to a shell conditional that can be
+    # used in make recipes to determine whether we are being run in
+    # silent mode or not.  The choice of the name derives from the LISP
+    # convention of appending the letter 'P' to denote a predicate (see
+    # also "the '-P' convention" in the Jargon File); we do so for lack
+    # of a better convention.
+    define_verbose_var ('P', 'false', ':');
+    # *Always* provide the user with '$(AM_V_GEN)', unconditionally.
+    define_verbose_tagvar ('GEN');
+    define_verbose_var ('at', '@');
+}
+
+
+################################################################
+
+
+# Handle AUTOMAKE_OPTIONS variable.  Return 0 on error, 1 otherwise.
+sub handle_options ()
+{
+  my $var = var ('AUTOMAKE_OPTIONS');
+  if ($var)
+    {
+      if ($var->has_conditional_contents)
+	{
+	  msg_var ('unsupported', $var,
+		   "'AUTOMAKE_OPTIONS' cannot have conditional contents");
+	}
+      my @options = map { { option => $_->[1], where => $_->[0] } }
+			$var->value_as_list_recursive (cond_filter => TRUE,
+						       location => 1);
+      return 0 unless process_option_list (@options);
+    }
+
+  if ($strictness == GNITS)
+    {
+      set_option ('readme-alpha', INTERNAL);
+      set_option ('std-options', INTERNAL);
+      set_option ('check-news', INTERNAL);
+    }
+
+  return 1;
+}
+
+# shadow_unconditionally ($varname, $where)
+# -----------------------------------------
+# Return a $(variable) that contains all possible values
+# $varname can take.
+# If the VAR wasn't defined conditionally, return $(VAR).
+# Otherwise we create an am__VAR_DIST variable which contains
+# all possible values, and return $(am__VAR_DIST).
+sub shadow_unconditionally
+{
+  my ($varname, $where) = @_;
+  my $var = var $varname;
+  if ($var->has_conditional_contents)
+    {
+      $varname = "am__${varname}_DIST";
+      my @files = uniq ($var->value_as_list_recursive);
+      define_pretty_variable ($varname, TRUE, $where, @files);
+    }
+  return "\$($varname)"
+}
+
+# check_user_variables (@LIST)
+# ----------------------------
+# Make sure each variable VAR in @LIST does not exist, suggest using AM_VAR
+# otherwise.
+sub check_user_variables
+{
+  my @dont_override = @_;
+  foreach my $flag (@dont_override)
+    {
+      my $var = var $flag;
+      if ($var)
+	{
+	  for my $cond ($var->conditions->conds)
+	    {
+	      if ($var->rdef ($cond)->owner == VAR_MAKEFILE)
+		{
+		  msg_cond_var ('gnu', $cond, $flag,
+				"'$flag' is a user variable, "
+				. "you should not override it;\n"
+				. "use 'AM_$flag' instead");
+		}
+	    }
+	}
+    }
+}
+
+# Call finish function for each language that was used.
+sub handle_languages ()
+{
+    if (! option 'no-dependencies')
+    {
+	# Include auto-dep code.  Don't include it if DEP_FILES would
+	# be empty.
+	if (keys %extension_seen && keys %dep_files)
+	{
+	    # Set location of depcomp.
+	    define_variable ('depcomp',
+			     "\$(SHELL) $am_config_aux_dir/depcomp",
+			     INTERNAL);
+	    define_variable ('am__depfiles_maybe', 'depfiles', INTERNAL);
+
+	    require_conf_file ("$am_file.am", FOREIGN, 'depcomp');
+
+	    my @deplist = sort keys %dep_files;
+	    # Generate each 'include' individually.  Irix 6 make will
+	    # not properly include several files resulting from a
+	    # variable expansion; generating many separate includes
+	    # seems safest.
+	    $output_rules .= "\n";
+	    foreach my $iter (@deplist)
+	    {
+		$output_rules .= (subst ('AMDEP_TRUE')
+				  . subst ('am__include')
+				  . ' '
+				  . subst ('am__quote')
+				  . $iter
+				  . subst ('am__quote')
+				  . "\n");
+	    }
+
+	    # Compute the set of directories to remove in distclean-depend.
+	    my @depdirs = uniq (map { dirname ($_) } @deplist);
+	    $output_rules .= file_contents ('depend',
+					    new Automake::Location,
+					    DEPDIRS => "@depdirs");
+	}
+    }
+    else
+    {
+	define_variable ('depcomp', '', INTERNAL);
+	define_variable ('am__depfiles_maybe', '', INTERNAL);
+    }
+
+    my %done;
+
+    # Is the C linker needed?
+    my $needs_c = 0;
+    foreach my $ext (sort keys %extension_seen)
+    {
+	next unless $extension_map{$ext};
+
+	my $lang = $languages{$extension_map{$ext}};
+
+	my $rule_file = $lang->rule_file || 'depend2';
+
+	# Get information on $LANG.
+	my $pfx = $lang->autodep;
+	my $fpfx = ($pfx eq '') ? 'CC' : $pfx;
+
+	my ($AMDEP, $FASTDEP) =
+	  (option 'no-dependencies' || $lang->autodep eq 'no')
+	  ? ('FALSE', 'FALSE') : ('AMDEP', "am__fastdep$fpfx");
+
+	my $verbose = verbose_flag ($lang->ccer || 'GEN');
+	my $verbose_nodep = ($AMDEP eq 'FALSE')
+	  ? $verbose : verbose_nodep_flag ($lang->ccer || 'GEN');
+	my $silent = silent_flag ();
+
+	my %transform = ('EXT'     => $ext,
+			 'PFX'     => $pfx,
+			 'FPFX'    => $fpfx,
+			 'AMDEP'   => $AMDEP,
+			 'FASTDEP' => $FASTDEP,
+			 '-c'      => $lang->compile_flag || '',
+			 # These are not used, but they need to be defined
+			 # so transform() do not complain.
+			 SUBDIROBJ     => 0,
+			 'DERIVED-EXT' => 'BUG',
+			 DIST_SOURCE   => 1,
+			 VERBOSE   => $verbose,
+			 'VERBOSE-NODEP' => $verbose_nodep,
+			 SILENT    => $silent,
+			);
+
+	# Generate the appropriate rules for this extension.
+	if (((! option 'no-dependencies') && $lang->autodep ne 'no')
+	    || defined $lang->compile)
+	{
+	    # Compute a possible derived extension.
+	    # This is not used by depend2.am.
+	    my $der_ext = ($lang->output_extensions->($ext))[0];
+
+	    # When we output an inference rule like '.c.o:' we
+	    # have two cases to consider: either subdir-objects
+	    # is used, or it is not.
+	    #
+	    # In the latter case the rule is used to build objects
+	    # in the current directory, and dependencies always
+	    # go into './$(DEPDIR)/'.  We can hard-code this value.
+	    #
+	    # In the former case the rule can be used to build
+	    # objects in sub-directories too.  Dependencies should
+	    # go into the appropriate sub-directories, e.g.,
+	    # 'sub/$(DEPDIR)/'.  The value of this directory
+	    # needs to be computed on-the-fly.
+	    #
+	    # DEPBASE holds the name of this directory, plus the
+	    # basename part of the object file (extensions Po, TPo,
+	    # Plo, TPlo will be added later as appropriate).  It is
+	    # either hardcoded, or a shell variable ('$depbase') that
+	    # will be computed by the rule.
+	    my $depbase =
+	      option ('subdir-objects') ? '$$depbase' : '$(DEPDIR)/$*';
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 1,
+
+			     'DERIVED-EXT' => $der_ext,
+
+			     DEPBASE   => $depbase,
+			     BASE      => '$*',
+			     SOURCE    => '$<',
+			     SOURCEFLAG => $sourceflags{$ext} || '',
+			     OBJ       => '$@',
+			     OBJOBJ    => '$@',
+			     LTOBJ     => '$@',
+
+			     COMPILE   => '$(' . $lang->compiler . ')',
+			     LTCOMPILE => '$(LT' . $lang->compiler . ')',
+			     -o        => $lang->output_flag,
+			     SUBDIROBJ => !! option 'subdir-objects');
+	}
+
+	# Now include code for each specially handled object with this
+	# language.
+	my %seen_files = ();
+	foreach my $file (@{$lang_specific_files{$lang->name}})
+	{
+	    my ($derived, $source, $obj, $myext, $srcext, %file_transform) = @$file;
+
+	    # We might see a given object twice, for instance if it is
+	    # used under different conditions.
+	    next if defined $seen_files{$obj};
+	    $seen_files{$obj} = 1;
+
+	    prog_error ("found " . $lang->name .
+			" in handle_languages, but compiler not defined")
+	      unless defined $lang->compile;
+
+	    my $obj_compile = $lang->compile;
+
+	    # Rewrite each occurrence of 'AM_$flag' in the compile
+	    # rule into '${derived}_$flag' if it exists.
+	    for my $flag (@{$lang->flags})
+	      {
+		my $val = "${derived}_$flag";
+		$obj_compile =~ s/\(AM_$flag\)/\($val\)/
+		  if set_seen ($val);
+	      }
+
+	    my $libtool_tag = '';
+	    if ($lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag})
+	      {
+		$libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	      }
+
+	    my $ptltflags = "${derived}_LIBTOOLFLAGS";
+	    $ptltflags = 'AM_LIBTOOLFLAGS' unless set_seen $ptltflags;
+
+	    my $ltverbose = define_verbose_libtool ();
+	    my $obj_ltcompile =
+	      "\$(LIBTOOL) $ltverbose $libtool_tag\$($ptltflags) \$(LIBTOOLFLAGS) "
+	      . "--mode=compile $obj_compile";
+
+	    # We _need_ '-o' for per object rules.
+	    my $output_flag = $lang->output_flag || '-o';
+
+	    my $depbase = dirname ($obj);
+	    $depbase = ''
+		if $depbase eq '.';
+	    $depbase .= '/'
+		unless $depbase eq '';
+	    $depbase .= '$(DEPDIR)/' . basename ($obj);
+
+	    $output_rules .=
+	      file_contents ($rule_file,
+			     new Automake::Location,
+			     %transform,
+			     GENERIC   => 0,
+
+			     DEPBASE   => $depbase,
+			     BASE      => $obj,
+			     SOURCE    => $source,
+			     SOURCEFLAG => $sourceflags{$srcext} || '',
+			     # Use $myext and not '.o' here, in case
+			     # we are actually building a new source
+			     # file -- e.g. via yacc.
+			     OBJ       => "$obj$myext",
+			     OBJOBJ    => "$obj.obj",
+			     LTOBJ     => "$obj.lo",
+
+			     VERBOSE   => $verbose,
+			     'VERBOSE-NODEP'  => $verbose_nodep,
+			     SILENT    => $silent,
+			     COMPILE   => $obj_compile,
+			     LTCOMPILE => $obj_ltcompile,
+			     -o        => $output_flag,
+			     %file_transform);
+	}
+
+	# The rest of the loop is done once per language.
+	next if defined $done{$lang};
+	$done{$lang} = 1;
+
+	# Load the language dependent Makefile chunks.
+	my %lang = map { uc ($_) => 0 } keys %languages;
+	$lang{uc ($lang->name)} = 1;
+	$output_rules .= file_contents ('lang-compile',
+					new Automake::Location,
+					%transform, %lang);
+
+	# If the source to a program consists entirely of code from a
+	# 'pure' language, for instance C++ or Fortran 77, then we
+	# don't need the C compiler code.  However if we run into
+	# something unusual then we do generate the C code.  There are
+	# probably corner cases here that do not work properly.
+	# People linking Java code to Fortran code deserve pain.
+	$needs_c ||= ! $lang->pure;
+
+	define_compiler_variable ($lang)
+	  if ($lang->compile);
+
+	define_linker_variable ($lang)
+	  if ($lang->link);
+
+	require_variables ("$am_file.am", $lang->Name . " source seen",
+			   TRUE, @{$lang->config_vars});
+
+	# Call the finisher.
+	$lang->finish;
+
+	# Flags listed in '->flags' are user variables (per GNU Standards),
+	# they should not be overridden in the Makefile...
+	my @dont_override = @{$lang->flags};
+	# ... and so is LDFLAGS.
+	push @dont_override, 'LDFLAGS' if $lang->link;
+
+	check_user_variables @dont_override;
+    }
+
+    # If the project is entirely C++ or entirely Fortran 77 (i.e., 1
+    # suffix rule was learned), don't bother with the C stuff.  But if
+    # anything else creeps in, then use it.
+    my @languages_seen = map { $languages{$extension_map{$_}}->name }
+                             (keys %extension_seen);
+    @languages_seen = uniq (@languages_seen);
+    $needs_c = 1 if @languages_seen > 1;
+    if ($need_link || $needs_c)
+      {
+	define_compiler_variable ($languages{'c'})
+	  unless defined $done{$languages{'c'}};
+	define_linker_variable ($languages{'c'});
+      }
+}
+
+
+# append_exeext { PREDICATE } $MACRO
+# ----------------------------------
+# Append $(EXEEXT) to each filename in $F appearing in the Makefile
+# variable $MACRO if &PREDICATE($F) is true.  @substitutions@ are
+# ignored.
+#
+# This is typically used on all filenames of *_PROGRAMS, and filenames
+# of TESTS that are programs.
+sub append_exeext (&$)
+{
+  my ($pred, $macro) = @_;
+
+  transform_variable_recursively
+    ($macro, $macro, 'am__EXEEXT', 0, INTERNAL,
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       # Append $(EXEEXT) unless the user did it already, or it's a
+       # @substitution@.
+       $val .= '$(EXEEXT)'
+	 if $val !~ /(?:\$\(EXEEXT\)$|^[@]\w+[@]$)/ && &$pred ($val);
+       return $val;
+     });
+}
+
+
+# Check to make sure a source defined in LIBOBJS is not explicitly
+# mentioned.  This is a separate function (as opposed to being inlined
+# in handle_source_transform) because it isn't always appropriate to
+# do this check.
+sub check_libobjs_sources
+{
+  my ($one_file, $unxformed) = @_;
+
+  foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+		      'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+      my @files;
+      my $varname = $prefix . $one_file . '_SOURCES';
+      my $var = var ($varname);
+      if ($var)
+	{
+	  @files = $var->value_as_list_recursive;
+	}
+      elsif ($prefix eq '')
+	{
+	  @files = ($unxformed . '.c');
+	}
+      else
+	{
+	  next;
+	}
+
+      foreach my $file (@files)
+	{
+	  err_var ($prefix . $one_file . '_SOURCES',
+		   "automatically discovered file '$file' should not" .
+		   " be explicitly mentioned")
+	    if defined $libsources{$file};
+	}
+    }
+}
+
+
+# @OBJECTS
+# handle_single_transform ($VAR, $TOPPARENT, $DERIVED, $OBJ, $FILE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Does much of the actual work for handle_source_transform.
+# Arguments are:
+#   $VAR is the name of the variable that the source filenames come from
+#   $TOPPARENT is the name of the _SOURCES variable which is being processed
+#   $DERIVED is the name of resulting executable or library
+#   $OBJ is the object extension (e.g., '.lo')
+#   $FILE the source file to transform
+#   %TRANSFORM contains extras arguments to pass to file_contents
+#     when producing explicit rules
+# Result is a list of the names of objects
+# %linkers_used will be updated with any linkers needed
+sub handle_single_transform
+{
+    my ($var, $topparent, $derived, $obj, $_file, %transform) = @_;
+    my @files = ($_file);
+    my @result = ();
+
+    # Turn sources into objects.  We use a while loop like this
+    # because we might add to @files in the loop.
+    while (scalar @files > 0)
+    {
+	$_ = shift @files;
+
+	# Configure substitutions in _SOURCES variables are errors.
+	if (/^\@.*\@$/)
+	{
+	  my $parent_msg = '';
+	  $parent_msg = "\nand is referred to from '$topparent'"
+	    if $topparent ne $var->name;
+	  err_var ($var,
+		   "'" . $var->name . "' includes configure substitution '$_'"
+		   . $parent_msg . ";\nconfigure " .
+		   "substitutions are not allowed in _SOURCES variables");
+	  next;
+	}
+
+	# If the source file is in a subdirectory then the '.o' is put
+	# into the current directory, unless the subdir-objects option
+	# is in effect.
+
+	# Split file name into base and extension.
+	next if ! /^(?:(.*)\/)?([^\/]*)($KNOWN_EXTENSIONS_PATTERN)$/;
+	my $full = $_;
+	my $directory = $1 || '';
+	my $base = $2;
+	my $extension = $3;
+
+	# We must generate a rule for the object if it requires its own flags.
+	my $renamed = 0;
+	my ($linker, $object);
+
+	# This records whether we've seen a derived source file (e.g.
+	# yacc output).
+	my $derived_source = 0;
+
+	# This holds the 'aggregate context' of the file we are
+	# currently examining.  If the file is compiled with
+	# per-object flags, then it will be the name of the object.
+	# Otherwise it will be 'AM'.  This is used by the target hook
+	# language function.
+	my $aggregate = 'AM';
+
+	$extension = derive_suffix ($extension, $obj);
+	my $lang;
+	if ($extension_map{$extension} &&
+	    ($lang = $languages{$extension_map{$extension}}))
+	{
+	    # Found the language, so see what it says.
+	    saw_extension ($extension);
+
+	    # Do we have per-executable flags for this executable?
+	    my $have_per_exec_flags = 0;
+	    my @peflags = @{$lang->flags};
+	    push @peflags, 'LIBTOOLFLAGS' if $obj eq '.lo';
+	    foreach my $flag (@peflags)
+	      {
+		if (set_seen ("${derived}_$flag"))
+		  {
+		    $have_per_exec_flags = 1;
+		    last;
+		  }
+	      }
+
+	    # Note: computed subr call.  The language rewrite function
+	    # should return one of the LANG_* constants.  It could
+	    # also return a list whose first value is such a constant
+	    # and whose second value is a new source extension which
+	    # should be applied.  This means this particular language
+	    # generates another source file which we must then process
+	    # further.
+	    my $subr = \&{'lang_' . $lang->name . '_rewrite'};
+	    defined &$subr or $subr = \&lang_sub_obj;
+	    my ($r, $source_extension)
+		= &$subr ($directory, $base, $extension,
+			  $obj, $have_per_exec_flags, $var);
+	    # Skip this entry if we were asked not to process it.
+	    next if $r == LANG_IGNORE;
+
+	    # Now extract linker and other info.
+	    $linker = $lang->linker;
+
+	    my $this_obj_ext;
+	    if (defined $source_extension)
+	    {
+		$this_obj_ext = $source_extension;
+		$derived_source = 1;
+	    }
+	    else
+	    {
+		$this_obj_ext = $obj;
+	    }
+	    $object = $base . $this_obj_ext;
+
+	    if ($have_per_exec_flags)
+	    {
+		# We have a per-executable flag in effect for this
+		# object.  In this case we rewrite the object's
+		# name to ensure it is unique.
+
+		# We choose the name 'DERIVED_OBJECT' to ensure
+		# (1) uniqueness, and (2) continuity between
+		# invocations.  However, this will result in a
+		# name that is too long for losing systems, in
+		# some situations.  So we provide _SHORTNAME to
+		# override.
+
+		my $dname = $derived;
+		my $var = var ($derived . '_SHORTNAME');
+		if ($var)
+		{
+		    # FIXME: should use the same Condition as
+		    # the _SOURCES variable.  But this is really
+		    # silly overkill -- nobody should have
+		    # conditional shortnames.
+		    $dname = $var->variable_value;
+		}
+		$object = $dname . '-' . $object;
+
+		prog_error ($lang->name . " flags defined without compiler")
+		  if ! defined $lang->compile;
+
+		$renamed = 1;
+	    }
+
+	    # If rewrite said it was ok, put the object into a
+	    # subdir.
+	    if ($directory ne '')
+	    {
+              if ($r == LANG_SUBDIR)
+                {
+                  $object = $directory . '/' . $object;
+                }
+              else
+                {
+                  # Since the next major version of automake (2.0) will
+                  # make the behaviour so far only activated with the
+                  # 'subdir-object' option mandatory, it's better if we
+                  # start warning users not using that option.
+                  # As suggested by Peter Johansson, we strive to avoid
+                  # the warning when it would be irrelevant, i.e., if
+                  # all source files sit in "current" directory.
+                  msg_var 'unsupported', $var,
+                          "source file '$full' is in a subdirectory,"
+                          . "\nbut option 'subdir-objects' is disabled";
+                  msg 'unsupported', INTERNAL, <<'EOF', uniq_scope => US_GLOBAL;
+possible forward-incompatibility.
+At least a source file is in a subdirectory, but the 'subdir-objects'
+automake option hasn't been enabled.  For now, the corresponding output
+object file(s) will be placed in the top-level directory.  However,
+this behaviour will change in future Automake versions: they will
+unconditionally cause object files to be placed in the same subdirectory
+of the corresponding sources.
+You are advised to start using 'subdir-objects' option throughout your
+project, to avoid future incompatibilities.
+EOF
+                }
+	    }
+
+	    # If the object file has been renamed (because per-target
+	    # flags are used) we cannot compile the file with an
+	    # inference rule: we need an explicit rule.
+	    #
+	    # If the source is in a subdirectory and the object is in
+	    # the current directory, we also need an explicit rule.
+	    #
+	    # If both source and object files are in a subdirectory
+	    # (this happens when the subdir-objects option is used),
+	    # then the inference will work.
+	    #
+	    # The latter case deserves a historical note.  When the
+	    # subdir-objects option was added on 1999-04-11 it was
+	    # thought that inferences rules would work for
+	    # subdirectory objects too.  Later, on 1999-11-22,
+	    # automake was changed to output explicit rules even for
+	    # subdir-objects.  Nobody remembers why, but this occurred
+	    # soon after the merge of the user-dep-gen-branch so it
+	    # might be related.  In late 2003 people complained about
+	    # the size of the generated Makefile.ins (libgcj, with
+	    # 2200+ subdir objects was reported to have a 9MB
+	    # Makefile), so we now rely on inference rules again.
+	    # Maybe we'll run across the same issue as in the past,
+	    # but at least this time we can document it.  However since
+	    # dependency tracking has evolved it is possible that
+	    # our old problem no longer exists.
+	    # Using inference rules for subdir-objects has been tested
+	    # with GNU make, Solaris make, Ultrix make, BSD make,
+	    # HP-UX make, and OSF1 make successfully.
+	    if ($renamed
+		|| ($directory ne '' && ! option 'subdir-objects')
+		# We must also use specific rules for a nodist_ source
+		# if its language requests it.
+		|| ($lang->nodist_specific && ! $transform{'DIST_SOURCE'}))
+	    {
+		my $obj_sans_ext = substr ($object, 0,
+					   - length ($this_obj_ext));
+		my $full_ansi;
+		if ($directory ne '')
+	          {
+			$full_ansi = $directory . '/' . $base . $extension;
+	          }
+		else
+	          {
+			$full_ansi = $base . $extension;
+	          }
+
+		my @specifics = ($full_ansi, $obj_sans_ext,
+				 # Only use $this_obj_ext in the derived
+				 # source case because in the other case we
+				 # *don't* want $(OBJEXT) to appear here.
+				 ($derived_source ? $this_obj_ext : '.o'),
+				 $extension);
+
+		# If we renamed the object then we want to use the
+		# per-executable flag name.  But if this is simply a
+		# subdir build then we still want to use the AM_ flag
+		# name.
+		if ($renamed)
+		  {
+		    unshift @specifics, $derived;
+		    $aggregate = $derived;
+		  }
+		else
+		  {
+		    unshift @specifics, 'AM';
+		  }
+
+		# Each item on this list is a reference to a list consisting
+		# of four values followed by additional transform flags for
+		# file_contents.  The four values are the derived flag prefix
+		# (e.g. for 'foo_CFLAGS', it is 'foo'), the name of the
+		# source file, the base name of the output file, and
+		# the extension for the object file.
+		push (@{$lang_specific_files{$lang->name}},
+		      [@specifics, %transform]);
+	    }
+	}
+	elsif ($extension eq $obj)
+	{
+	    # This is probably the result of a direct suffix rule.
+	    # In this case we just accept the rewrite.
+	    $object = "$base$extension";
+	    $object = "$directory/$object" if $directory ne '';
+	    $linker = '';
+	}
+	else
+	{
+	    # No error message here.  Used to have one, but it was
+	    # very unpopular.
+	    # FIXME: we could potentially do more processing here,
+	    # perhaps treating the new extension as though it were a
+	    # new source extension (as above).  This would require
+	    # more restructuring than is appropriate right now.
+	    next;
+	}
+
+	err_am "object '$object' created by '$full' and '$object_map{$object}'"
+	  if (defined $object_map{$object}
+	      && $object_map{$object} ne $full);
+
+	my $comp_val = (($object =~ /\.lo$/)
+			? COMPILE_LIBTOOL : COMPILE_ORDINARY);
+	(my $comp_obj = $object) =~ s/\.lo$/.\$(OBJEXT)/;
+	if (defined $object_compilation_map{$comp_obj}
+	    && $object_compilation_map{$comp_obj} != 0
+	    # Only see the error once.
+	    && ($object_compilation_map{$comp_obj}
+		!= (COMPILE_LIBTOOL | COMPILE_ORDINARY))
+	    && $object_compilation_map{$comp_obj} != $comp_val)
+	  {
+	    err_am "object '$comp_obj' created both with libtool and without";
+	  }
+	$object_compilation_map{$comp_obj} |= $comp_val;
+
+	if (defined $lang)
+	{
+	    # Let the language do some special magic if required.
+	    $lang->target_hook ($aggregate, $object, $full, %transform);
+	}
+
+	if ($derived_source)
+	  {
+	    prog_error ($lang->name . " has automatic dependency tracking")
+	      if $lang->autodep ne 'no';
+	    # Make sure this new source file is handled next.  That will
+	    # make it appear to be at the right place in the list.
+	    unshift (@files, $object);
+	    # Distribute derived sources unless the source they are
+	    # derived from is not.
+	    push_dist_common ($object)
+	      unless ($topparent =~ /^(?:nobase_)?nodist_/);
+	    next;
+	  }
+
+	$linkers_used{$linker} = 1;
+
+	push (@result, $object);
+
+	if (! defined $object_map{$object})
+	{
+	    my @dep_list = ();
+	    $object_map{$object} = $full;
+
+	    # If resulting object is in subdir, we need to make
+	    # sure the subdir exists at build time.
+	    if ($object =~ /\//)
+	    {
+		# FIXME: check that $DIRECTORY is somewhere in the
+		# project
+
+		# For Java, the way we're handling it right now, a
+		# '..' component doesn't make sense.
+		if ($lang && $lang->name eq 'java' && $object =~ /(\/|^)\.\.\//)
+		  {
+		    err_am "'$full' should not contain a '..' component";
+		  }
+
+                # Make sure *all* objects files in the subdirectory are
+                # removed by "make mostlyclean".  Not only this is more
+                # efficient than listing the object files to be removed
+                # individually (which would cause an 'rm' invocation for
+                # each of them -- very inefficient, see bug#10697), it
+                # would also leave stale object files in the subdirectory
+                # whenever a source file there is removed or renamed.
+                $compile_clean_files{"$directory/*.\$(OBJEXT)"} = MOSTLY_CLEAN;
+                if ($object =~ /\.lo$/)
+                  {
+                    # If we have a libtool object, then we also must remove
+                    # any '.lo' objects in its same subdirectory.
+                    $compile_clean_files{"$directory/*.lo"} = MOSTLY_CLEAN;
+                    # Remember to cleanup .libs/ in this directory.
+                    $libtool_clean_directories{$directory} = 1;
+                  }
+
+		push (@dep_list, require_build_directory ($directory));
+
+		# If we're generating dependencies, we also want
+		# to make sure that the appropriate subdir of the
+		# .deps directory is created.
+		push (@dep_list,
+		      require_build_directory ($directory . '/$(DEPDIR)'))
+		  unless option 'no-dependencies';
+	    }
+
+	    pretty_print_rule ($object . ':', "\t", @dep_list)
+		if scalar @dep_list > 0;
+	}
+
+	# Transform .o or $o file into .P file (for automatic
+	# dependency code).
+        # Properly flatten multiple adjacent slashes, as Solaris 10 make
+        # might fail over them in an include statement.
+        # Leading double slashes may be special, as per Posix, so deal
+        # with them carefully.
+        if ($lang && $lang->autodep ne 'no')
+        {
+            my $depfile = $object;
+            $depfile =~ s/\.([^.]*)$/.P$1/;
+            $depfile =~ s/\$\(OBJEXT\)$/o/;
+            my $maybe_extra_leading_slash = '';
+            $maybe_extra_leading_slash = '/' if $depfile =~ m,^//[^/],;
+            $depfile =~ s,/+,/,g;
+            my $basename = basename ($depfile);
+            # This might make $dirname empty, but we account for that below.
+            (my $dirname = dirname ($depfile)) =~ s/\/*$//;
+            $dirname = $maybe_extra_leading_slash . $dirname;
+            $dep_files{$dirname . '/$(DEPDIR)/' . $basename} = 1;
+        }
+    }
+
+    return @result;
+}
+
+
+# $LINKER
+# define_objects_from_sources ($VAR, $OBJVAR, $NODEFINE, $ONE_FILE,
+#                              $OBJ, $PARENT, $TOPPARENT, $WHERE, %TRANSFORM)
+# ---------------------------------------------------------------------------
+# Define an _OBJECTS variable for a _SOURCES variable (or subvariable)
+#
+# Arguments are:
+#   $VAR is the name of the _SOURCES variable
+#   $OBJVAR is the name of the _OBJECTS variable if known (otherwise
+#     it will be generated and returned).
+#   $NODEFINE is a boolean: if true, $OBJVAR will not be defined (but
+#     work done to determine the linker will be).
+#   $ONE_FILE is the canonical (transformed) name of object to build
+#   $OBJ is the object extension (i.e. either '.o' or '.lo').
+#   $TOPPARENT is the _SOURCES variable being processed.
+#   $WHERE context into which this definition is done
+#   %TRANSFORM extra arguments to pass to file_contents when producing
+#     rules
+#
+# Result is a pair ($LINKER, $OBJVAR):
+#    $LINKER is a boolean, true if a linker is needed to deal with the objects
+sub define_objects_from_sources
+{
+  my ($var, $objvar, $nodefine, $one_file,
+      $obj, $topparent, $where, %transform) = @_;
+
+  my $needlinker = "";
+
+  transform_variable_recursively
+    ($var, $objvar, 'am__objects', $nodefine, $where,
+     # The transform code to run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+       my @trans = handle_single_transform ($subvar, $topparent,
+					    $one_file, $obj, $val,
+					    %transform);
+       $needlinker = "true" if @trans;
+       return @trans;
+     });
+
+  return $needlinker;
+}
+
+
+# handle_source_transform ($CANON_TARGET, $TARGET, $OBJEXT, $WHERE, %TRANSFORM)
+# -----------------------------------------------------------------------------
+# Handle SOURCE->OBJECT transform for one program or library.
+# Arguments are:
+#   canonical (transformed) name of target to build
+#   actual target of object to build
+#   object extension (i.e., either '.o' or '$o')
+#   location of the source variable
+#   extra arguments to pass to file_contents when producing rules
+# Return the name of the linker variable that must be used.
+# Empty return means just use 'LINK'.
+sub handle_source_transform
+{
+    # one_file is canonical name.  unxformed is given name.  obj is
+    # object extension.
+    my ($one_file, $unxformed, $obj, $where, %transform) = @_;
+
+    my $linker = '';
+
+    # No point in continuing if _OBJECTS is defined.
+    return if reject_var ($one_file . '_OBJECTS',
+			  $one_file . '_OBJECTS should not be defined');
+
+    my %used_pfx = ();
+    my $needlinker;
+    %linkers_used = ();
+    foreach my $prefix ('', 'EXTRA_', 'dist_', 'nodist_',
+			'dist_EXTRA_', 'nodist_EXTRA_')
+    {
+	my $varname = $prefix . $one_file . "_SOURCES";
+	my $var = var $varname;
+	next unless $var;
+
+	# We are going to define _OBJECTS variables using the prefix.
+	# Then we glom them all together.  So we can't use the null
+	# prefix here as we need it later.
+	my $xpfx = ($prefix eq '') ? 'am_' : $prefix;
+
+	# Keep track of which prefixes we saw.
+	$used_pfx{$xpfx} = 1
+	  unless $prefix =~ /EXTRA_/;
+
+	push @sources, "\$($varname)";
+	push @dist_sources, shadow_unconditionally ($varname, $where)
+	  unless (option ('no-dist') || $prefix =~ /^nodist_/);
+
+	$needlinker |=
+	    define_objects_from_sources ($varname,
+					 $xpfx . $one_file . '_OBJECTS',
+					 !!($prefix =~ /EXTRA_/),
+					 $one_file, $obj, $varname, $where,
+					 DIST_SOURCE => ($prefix !~ /^nodist_/),
+					 %transform);
+    }
+    if ($needlinker)
+    {
+	$linker ||= resolve_linker (%linkers_used);
+    }
+
+    my @keys = sort keys %used_pfx;
+    if (scalar @keys == 0)
+    {
+	# The default source for libfoo.la is libfoo.c, but for
+	# backward compatibility we first look at libfoo_la.c,
+	# if no default source suffix is given.
+	my $old_default_source = "$one_file.c";
+	my $ext_var = var ('AM_DEFAULT_SOURCE_EXT');
+	my $default_source_ext = $ext_var ? variable_value ($ext_var) : '.c';
+	msg_var ('unsupported', $ext_var, $ext_var->name . " can assume at most one value")
+	  if $default_source_ext =~ /[\t ]/;
+	(my $default_source = $unxformed) =~ s,(\.[^./\\]*)?$,$default_source_ext,;
+	# TODO: Remove this backward-compatibility hack in Automake 2.0.
+	if ($old_default_source ne $default_source
+	    && !$ext_var
+	    && (rule $old_default_source
+		|| rule '$(srcdir)/' . $old_default_source
+		|| rule '${srcdir}/' . $old_default_source
+		|| -f $old_default_source))
+	  {
+	    my $loc = $where->clone;
+	    $loc->pop_context;
+	    msg ('obsolete', $loc,
+		 "the default source for '$unxformed' has been changed "
+		 . "to '$default_source'.\n(Using '$old_default_source' for "
+		 . "backward compatibility.)");
+	    $default_source = $old_default_source;
+	  }
+	# If a rule exists to build this source with a $(srcdir)
+	# prefix, use that prefix in our variables too.  This is for
+	# the sake of BSD Make.
+	if (rule '$(srcdir)/' . $default_source
+	    || rule '${srcdir}/' . $default_source)
+	  {
+	    $default_source = '$(srcdir)/' . $default_source;
+	  }
+
+	define_variable ($one_file . "_SOURCES", $default_source, $where);
+	push (@sources, $default_source);
+	push (@dist_sources, $default_source);
+
+	%linkers_used = ();
+	my (@result) =
+	  handle_single_transform ($one_file . '_SOURCES',
+				   $one_file . '_SOURCES',
+				   $one_file, $obj,
+				   $default_source, %transform);
+	$linker ||= resolve_linker (%linkers_used);
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @result);
+    }
+    else
+    {
+	@keys = map { '$(' . $_ . $one_file . '_OBJECTS)' } @keys;
+	define_pretty_variable ($one_file . '_OBJECTS', TRUE, $where, @keys);
+    }
+
+    # If we want to use 'LINK' we must make sure it is defined.
+    if ($linker eq '')
+    {
+	$need_link = 1;
+    }
+
+    return $linker;
+}
+
+
+# handle_lib_objects ($XNAME, $VAR)
+# ---------------------------------
+# Special-case ALLOCA and LIBOBJS substitutions in _LDADD or _LIBADD variables.
+# Also, generate _DEPENDENCIES variable if appropriate.
+# Arguments are:
+#   transformed name of object being built, or empty string if no object
+#   name of _LDADD/_LIBADD-type variable to examine
+# Returns 1 if LIBOBJS seen, 0 otherwise.
+sub handle_lib_objects
+{
+  my ($xname, $varname) = @_;
+
+  my $var = var ($varname);
+  prog_error "'$varname' undefined"
+    unless $var;
+  prog_error "unexpected variable name '$varname'"
+    unless $varname =~ /^(.*)(?:LIB|LD)ADD$/;
+  my $prefix = $1 || 'AM_';
+
+  my $seen_libobjs = 0;
+  my $flagvar = 0;
+
+  transform_variable_recursively
+    ($varname, $xname . '_DEPENDENCIES', 'am__DEPENDENCIES',
+     ! $xname, INTERNAL,
+     # Transformation function, run on each filename.
+     sub {
+       my ($subvar, $val, $cond, $full_cond) = @_;
+
+       if ($val =~ /^-/)
+	 {
+	   # Skip -lfoo and -Ldir silently; these are explicitly allowed.
+	   if ($val !~ /^-[lL]/ &&
+	       # Skip -dlopen and -dlpreopen; these are explicitly allowed
+	       # for Libtool libraries or programs.  (Actually we are a bit
+	       # lax here since this code also applies to non-libtool
+	       # libraries or programs, for which -dlopen and -dlopreopen
+	       # are pure nonsense.  Diagnosing this doesn't seem very
+	       # important: the developer will quickly get complaints from
+	       # the linker.)
+	       $val !~ /^-dl(?:pre)?open$/ &&
+	       # Only get this error once.
+	       ! $flagvar)
+	     {
+	       $flagvar = 1;
+	       # FIXME: should display a stack of nested variables
+	       # as context when $var != $subvar.
+	       err_var ($var, "linker flags such as '$val' belong in "
+			. "'${prefix}LDFLAGS'");
+	     }
+	   return ();
+	 }
+       elsif ($val !~ /^\@.*\@$/)
+	 {
+	   # Assume we have a file of some sort, and output it into the
+	   # dependency variable.  Autoconf substitutions are not output;
+	   # rarely is a new dependency substituted into e.g. foo_LDADD
+	   # -- but bad things (e.g. -lX11) are routinely substituted.
+	   # Note that LIBOBJS and ALLOCA are exceptions to this rule,
+	   # and handled specially below.
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?LIBOBJS\@$/)
+	 {
+	   handle_LIBOBJS ($subvar, $cond, $1);
+	   $seen_libobjs = 1;
+	   return $val;
+	 }
+       elsif ($val =~ /^\@(LT)?ALLOCA\@$/)
+	 {
+	   handle_ALLOCA ($subvar, $cond, $1);
+	   return $val;
+	 }
+       else
+	 {
+	   return ();
+	 }
+     });
+
+  return $seen_libobjs;
+}
+
+# handle_LIBOBJS_or_ALLOCA ($VAR)
+# -------------------------------
+# Definitions common to LIBOBJS and ALLOCA.
+# VAR should be one of LIBOBJS, LTLIBOBJS, ALLOCA, or LTALLOCA.
+sub handle_LIBOBJS_or_ALLOCA
+{
+  my ($var) = @_;
+
+  my $dir = '';
+
+  # If LIBOBJS files must be built in another directory we have
+  # to define LIBOBJDIR and ensure the files get cleaned.
+  # Otherwise LIBOBJDIR can be left undefined, and the cleaning
+  # is achieved by 'rm -f *.$(OBJEXT)' in compile.am.
+  if ($config_libobj_dir
+      && $relative_dir ne $config_libobj_dir)
+    {
+      if (option 'subdir-objects')
+	{
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  $dir = "$config_libobj_dir/"
+	    if $config_libobj_dir ne '.';
+	  $dir = backname ($relative_dir) . "/$dir"
+	    if $relative_dir ne '.';
+	  define_variable ('LIBOBJDIR', "$dir", INTERNAL);
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN;
+	  # If LTLIBOBJS is used, we must also clear LIBOBJS (which might
+	  # be created by libtool as a side-effect of creating LTLIBOBJS).
+	  $clean_files{"\$($var)"} = MOSTLY_CLEAN if $var =~ s/^LT//;
+	}
+      else
+	{
+	  error ("'\$($var)' cannot be used outside '$config_libobj_dir' if"
+		 . " 'subdir-objects' is not set");
+	}
+    }
+
+  return $dir;
+}
+
+sub handle_LIBOBJS
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+
+  $var->requires_variables ("\@${lt}LIBOBJS\@ used", $lt . 'LIBOBJS')
+    if ! keys %libsources;
+
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}LIBOBJS";
+
+  foreach my $iter (keys %libsources)
+    {
+      if ($iter =~ /\.[cly]$/)
+	{
+	  saw_extension ($&);
+	  saw_extension ('.c');
+	}
+
+      if ($iter =~ /\.h$/)
+	{
+	  require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	}
+      elsif ($iter ne 'alloca.c')
+	{
+	  my $rewrite = $iter;
+	  $rewrite =~ s/\.c$/.P$myobjext/;
+	  $dep_files{$dir . '$(DEPDIR)/' . $rewrite} = 1;
+	  $rewrite = "^" . quotemeta ($iter) . "\$";
+	  # Only require the file if it is not a built source.
+	  my $bs = var ('BUILT_SOURCES');
+	  if (! $bs || ! grep (/$rewrite/, $bs->value_as_list_recursive))
+	    {
+	      require_libsource_with_macro ($cond, $var, FOREIGN, $iter);
+	    }
+	}
+    }
+}
+
+sub handle_ALLOCA
+{
+  my ($var, $cond, $lt) = @_;
+  my $myobjext = $lt ? 'lo' : 'o';
+  $lt ||= '';
+  my $dir = handle_LIBOBJS_or_ALLOCA "${lt}ALLOCA";
+
+  $var->requires_variables ("\@${lt}ALLOCA\@ used", $lt . 'ALLOCA');
+  $dep_files{$dir . '$(DEPDIR)/alloca.P' . $myobjext} = 1;
+  require_libsource_with_macro ($cond, $var, FOREIGN, 'alloca.c');
+  saw_extension ('.c');
+}
+
+# Canonicalize the input parameter.
+sub canonicalize
+{
+    my ($string) = @_;
+    $string =~ tr/A-Za-z0-9_\@/_/c;
+    return $string;
+}
+
+# Canonicalize a name, and check to make sure the non-canonical name
+# is never used.  Returns canonical name.  Arguments are name and a
+# list of suffixes to check for.
+sub check_canonical_spelling
+{
+  my ($name, @suffixes) = @_;
+
+  my $xname = canonicalize ($name);
+  if ($xname ne $name)
+    {
+      foreach my $xt (@suffixes)
+	{
+	  reject_var ("$name$xt", "use '$xname$xt', not '$name$xt'");
+	}
+    }
+
+  return $xname;
+}
+
+# Set up the compile suite.
+sub handle_compile ()
+{
+   return if ! $must_handle_compiled_objects;
+
+    # Boilerplate.
+    my $default_includes = '';
+    if (! option 'nostdinc')
+      {
+	my @incs = ('-I.', subst ('am__isrc'));
+
+	my $var = var 'CONFIG_HEADER';
+	if ($var)
+	  {
+	    foreach my $hdr (split (' ', $var->variable_value))
+	      {
+		push @incs, '-I' . dirname ($hdr);
+	      }
+	  }
+	# We want '-I. -I$(srcdir)', but the latter -I is redundant
+	# and unaesthetic in non-VPATH builds.  We use `-I.@am__isrc@`
+	# instead.  It will be replaced by '-I.' or '-I. -I$(srcdir)'.
+	# Items in CONFIG_HEADER are never in $(srcdir) so it is safe
+	# to just put @am__isrc@ right after '-I.', without a space.
+	($default_includes = ' ' . uniq (@incs)) =~ s/ @/@/;
+      }
+
+    my (@mostly_rms, @dist_rms);
+    foreach my $item (sort keys %compile_clean_files)
+    {
+	if ($compile_clean_files{$item} == MOSTLY_CLEAN)
+	{
+	    push (@mostly_rms, "\t-rm -f $item");
+	}
+	elsif ($compile_clean_files{$item} == DIST_CLEAN)
+	{
+	    push (@dist_rms, "\t-rm -f $item");
+	}
+	else
+	{
+	  prog_error 'invalid entry in %compile_clean_files';
+	}
+    }
+
+    my ($coms, $vars, $rules) =
+      file_contents_internal (1, "$libdir/am/compile.am",
+			      new Automake::Location,
+			      'DEFAULT_INCLUDES' => $default_includes,
+			      'MOSTLYRMS' => join ("\n", @mostly_rms),
+			      'DISTRMS' => join ("\n", @dist_rms));
+    $output_vars .= $vars;
+    $output_rules .= "$coms$rules";
+}
+
+# Handle libtool rules.
+sub handle_libtool ()
+{
+  return unless var ('LIBTOOL');
+
+  # Libtool requires some files, but only at top level.
+  # (Starting with Libtool 2.0 we do not have to bother.  These
+  # requirements are done with AC_REQUIRE_AUX_FILE.)
+  require_conf_file_with_macro (TRUE, 'LIBTOOL', FOREIGN, @libtool_files)
+    if $relative_dir eq '.' && ! $libtool_new_api;
+
+  my @libtool_rms;
+  foreach my $item (sort keys %libtool_clean_directories)
+    {
+      my $dir = ($item eq '.') ? '' : "$item/";
+      # .libs is for Unix, _libs for DOS.
+      push (@libtool_rms, "\t-rm -rf ${dir}.libs ${dir}_libs");
+    }
+
+  check_user_variables 'LIBTOOLFLAGS';
+
+  # Output the libtool compilation rules.
+  $output_rules .= file_contents ('libtool',
+				  new Automake::Location,
+				   LTRMS => join ("\n", @libtool_rms));
+}
+
+
+sub handle_programs ()
+{
+  my @proglist = am_install_var ('progs', 'PROGRAMS',
+				 'bin', 'sbin', 'libexec', 'pkglibexec',
+				 'noinst', 'check');
+  return if ! @proglist;
+  $must_handle_compiled_objects = 1;
+
+  my $seen_global_libobjs =
+    var ('LDADD') && handle_lib_objects ('', 'LDADD');
+
+  foreach my $pair (@proglist)
+    {
+      my ($where, $one_file) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.$(OBJEXT)';
+
+      $known_programs{$one_file} = $where;
+
+      # Canonicalize names and check for misspellings.
+      my $xname = check_canonical_spelling ($one_file, '_LDADD', '_LDFLAGS',
+                                            '_SOURCES', '_OBJECTS',
+                                            '_DEPENDENCIES');
+
+      $where->push_context ("while processing program '$one_file'");
+      $where->set (INTERNAL->get);
+
+      my $linker = handle_source_transform ($xname, $one_file, $obj, $where,
+                                            NONLIBTOOL => 1, LIBTOOL => 0);
+
+      if (var ($xname . "_LDADD"))
+	{
+	  $seen_libobjs = handle_lib_objects ($xname, $xname . '_LDADD');
+	}
+      else
+	{
+	  # User didn't define prog_LDADD override.  So do it.
+	  define_variable ($xname . '_LDADD', '$(LDADD)', $where);
+
+	  # This does a bit too much work.  But we need it to
+	  # generate _DEPENDENCIES when appropriate.
+	  if (var ('LDADD'))
+	    {
+	      $seen_libobjs = handle_lib_objects ($xname, 'LDADD');
+	    }
+	}
+
+      reject_var ($xname . '_LIBADD',
+		  "use '${xname}_LDADD', not '${xname}_LIBADD'");
+
+      set_seen ($xname . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xname . '_DEPENDENCIES');
+      set_seen ($xname . '_LDFLAGS');
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xname);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      # If the resulting program lies in a subdirectory,
+      # ensure that the directory exists before we need it.
+      my $dirstamp = require_build_directory_maybe ($one_file);
+
+      $libtool_clean_directories{dirname ($one_file)} = 1;
+
+      $output_rules .= file_contents ('program',
+                                      $where,
+                                      PROGRAM  => $one_file,
+                                      XPROGRAM => $xname,
+                                      XLINK    => $xlink,
+                                      VERBOSE  => $vlink,
+                                      DIRSTAMP => $dirstamp,
+                                      EXEEXT   => '$(EXEEXT)');
+
+      if ($seen_libobjs || $seen_global_libobjs)
+	{
+	  if (var ($xname . '_LDADD'))
+	    {
+	      check_libobjs_sources ($xname, $xname . '_LDADD');
+	    }
+	  elsif (var ('LDADD'))
+	    {
+	      check_libobjs_sources ($xname, 'LDADD');
+	    }
+	}
+    }
+}
+
+
+sub handle_libraries ()
+{
+  my @liblist = am_install_var ('libs', 'LIBRARIES',
+                                'lib', 'pkglib', 'noinst', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LIBRARIES');
+      $var->requires_variables ('library used', 'RANLIB');
+    }
+
+  define_variable ('AR', 'ar', INTERNAL);
+  define_variable ('ARFLAGS', 'cru', INTERNAL);
+  define_verbose_tagvar ('AR');
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      # Check that the library fits the standard naming convention.
+      my $bn = basename ($onelib);
+      if ($bn !~ /^lib.*\.a$/)
+	{
+	  $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.a/;
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard library name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.a$//;
+
+      $where->push_context ("while processing library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      my $obj = '.$(OBJEXT)';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_SOURCES',
+                                           '_OBJECTS', '_DEPENDENCIES',
+                                           '_AR');
+
+      if (! var ($xlib . '_AR'))
+	{
+	  define_variable ($xlib . '_AR', '$(AR) $(ARFLAGS)', $where);
+	}
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ($xlib . '_LDADD',
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+      # Make sure we at look at this.
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      handle_source_transform ($xlib, $onelib, $obj, $where,
+                               NONLIBTOOL => 1, LIBTOOL => 0);
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+      my $verbose = verbose_flag ('AR');
+      my $silent = silent_flag ();
+
+      $output_rules .= file_contents ('library',
+                                       $where,
+                                       VERBOSE  => $verbose,
+                                       SILENT   => $silent,
+                                       LIBRARY  => $onelib,
+                                       XLIBRARY => $xlib,
+                                       DIRSTAMP => $dirstamp);
+
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+
+sub handle_ltlibraries ()
+{
+  my @liblist = am_install_var ('ltlib', 'LTLIBRARIES',
+                                'noinst', 'lib', 'pkglib', 'check');
+  return if ! @liblist;
+  $must_handle_compiled_objects = 1;
+
+  my @prefix = am_primary_prefixes ('LTLIBRARIES', 0, 'lib', 'pkglib',
+				    'noinst', 'check');
+
+  if (@prefix)
+    {
+      my $var = rvar ($prefix[0] . '_LTLIBRARIES');
+      $var->requires_variables ('Libtool library used', 'LIBTOOL');
+    }
+
+  my %instdirs = ();
+  my %instsubdirs = ();
+  my %instconds = ();
+  my %liblocations = ();	# Location (in Makefile.am) of each library.
+
+  foreach my $key (@prefix)
+    {
+      # Get the installation directory of each library.
+      my $dir = $key;
+      my $strip_subdir = 1;
+      if ($dir =~ /^nobase_/)
+        {
+	  $dir =~ s/^nobase_//;
+	  $strip_subdir = 0;
+	}
+      my $var = rvar ($key . '_LTLIBRARIES');
+
+      # We reject libraries which are installed in several places
+      # in the same condition, because we can only specify one
+      # '-rpath' option.
+      $var->traverse_recursively
+	(sub
+	 {
+	   my ($var, $val, $cond, $full_cond) = @_;
+	   my $hcond = $full_cond->human;
+	   my $where = $var->rdef ($cond)->location;
+	   my $ldir = '';
+	   $ldir = '/' . dirname ($val)
+	     if (!$strip_subdir);
+	   # A library cannot be installed in different directories
+	   # in overlapping conditions.
+	   if (exists $instconds{$val})
+	     {
+	       my ($msg, $acond) =
+		 $instconds{$val}->ambiguous_p ($val, $full_cond);
+
+	       if ($msg)
+		 {
+		   error ($where, $msg, partial => 1);
+		   my $dirtxt = "installed " . ($strip_subdir ? "in" : "below") . " '$dir'";
+		   $dirtxt = "built for '$dir'"
+		     if $dir eq 'EXTRA' || $dir eq 'noinst' || $dir eq 'check';
+		   my $dircond =
+		     $full_cond->true ? "" : " in condition $hcond";
+
+		   error ($where, "'$val' should be $dirtxt$dircond ...",
+			  partial => 1);
+
+		   my $hacond = $acond->human;
+		   my $adir = $instdirs{$val}{$acond};
+		   my $adirtxt = "installed in '$adir'";
+		   $adirtxt = "built for '$adir'"
+		     if ($adir eq 'EXTRA' || $adir eq 'noinst'
+			 || $adir eq 'check');
+		   my $adircond = $acond->true ? "" : " in condition $hacond";
+
+		   my $onlyone = ($dir ne $adir) ?
+		     ("\nLibtool libraries can be built for only one "
+		      . "destination") : "";
+
+		   error ($liblocations{$val}{$acond},
+			  "... and should also be $adirtxt$adircond.$onlyone");
+		   return;
+		 }
+	     }
+	   else
+	     {
+	       $instconds{$val} = new Automake::DisjConditions;
+	     }
+	   $instdirs{$val}{$full_cond} = $dir;
+	   $instsubdirs{$val}{$full_cond} = $ldir;
+	   $liblocations{$val}{$full_cond} = $where;
+	   $instconds{$val} = $instconds{$val}->merge ($full_cond);
+	 },
+	 sub
+	 {
+	   return ();
+	 },
+	 skip_ac_subst => 1);
+    }
+
+  foreach my $pair (@liblist)
+    {
+      my ($where, $onelib) = @$pair;
+
+      my $seen_libobjs = 0;
+      my $obj = '.lo';
+
+      # Canonicalize names and check for misspellings.
+      my $xlib = check_canonical_spelling ($onelib, '_LIBADD', '_LDFLAGS',
+                                           '_SOURCES', '_OBJECTS',
+                                           '_DEPENDENCIES');
+
+      # Check that the library fits the standard naming convention.
+      my $libname_rx = '^lib.*\.la';
+      my $ldvar = var ("${xlib}_LDFLAGS") || var ('AM_LDFLAGS');
+      my $ldvar2 = var ('LDFLAGS');
+      if (($ldvar && grep (/-module/, $ldvar->value_as_list_recursive))
+	  || ($ldvar2 && grep (/-module/, $ldvar2->value_as_list_recursive)))
+	{
+	  # Relax name checking for libtool modules.
+	  $libname_rx = '\.la';
+	}
+
+      my $bn = basename ($onelib);
+      if ($bn !~ /$libname_rx$/)
+	{
+	  my $type = 'library';
+	  if ($libname_rx eq '\.la')
+	    {
+	      $bn =~ s/^(lib|)(.*?)(?:\.[^.]*)?$/$1$2.la/;
+	      $type = 'module';
+	    }
+	  else
+	    {
+	      $bn =~ s/^(?:lib)?(.*?)(?:\.[^.]*)?$/lib$1.la/;
+	    }
+	  my $suggestion = dirname ($onelib) . "/$bn";
+	  $suggestion =~ s|^\./||g;
+	  msg ('error-gnu/warn', $where,
+	       "'$onelib' is not a standard libtool $type name\n"
+	       . "did you mean '$suggestion'?")
+	}
+
+      ($known_libraries{$onelib} = $bn) =~ s/\.la$//;
+
+      $where->push_context ("while processing Libtool library '$onelib'");
+      $where->set (INTERNAL->get);
+
+      # Make sure we look at these.
+      set_seen ($xlib . '_LDFLAGS');
+      set_seen ($xlib . '_DEPENDENCIES');
+      set_seen ('EXTRA_' . $xlib . '_DEPENDENCIES');
+
+      # Generate support for conditional object inclusion in
+      # libraries.
+      if (var ($xlib . '_LIBADD'))
+	{
+	  if (handle_lib_objects ($xlib, $xlib . '_LIBADD'))
+	    {
+	      $seen_libobjs = 1;
+	    }
+	}
+      else
+	{
+	  define_variable ($xlib . "_LIBADD", '', $where);
+	}
+
+      reject_var ("${xlib}_LDADD",
+		  "use '${xlib}_LIBADD', not '${xlib}_LDADD'");
+
+
+      my $linker = handle_source_transform ($xlib, $onelib, $obj, $where,
+                                            NONLIBTOOL => 0, LIBTOOL => 1);
+
+      # Determine program to use for link.
+      my($xlink, $vlink) = define_per_target_linker_variable ($linker, $xlib);
+      $vlink = verbose_flag ($vlink || 'GEN');
+
+      my $rpathvar = "am_${xlib}_rpath";
+      my $rpath = "\$($rpathvar)";
+      foreach my $rcond ($instconds{$onelib}->conds)
+	{
+	  my $val;
+	  if ($instdirs{$onelib}{$rcond} eq 'EXTRA'
+	      || $instdirs{$onelib}{$rcond} eq 'noinst'
+	      || $instdirs{$onelib}{$rcond} eq 'check')
+	    {
+	      # It's an EXTRA_ library, so we can't specify -rpath,
+	      # because we don't know where the library will end up.
+	      # The user probably knows, but generally speaking automake
+	      # doesn't -- and in fact configure could decide
+	      # dynamically between two different locations.
+	      $val = '';
+	    }
+	  else
+	    {
+	      $val = ('-rpath $(' . $instdirs{$onelib}{$rcond} . 'dir)');
+	      $val .= $instsubdirs{$onelib}{$rcond}
+	        if defined $instsubdirs{$onelib}{$rcond};
+	    }
+	  if ($rcond->true)
+	    {
+	      # If $rcond is true there is only one condition and
+	      # there is no point defining an helper variable.
+	      $rpath = $val;
+	    }
+	  else
+	    {
+	      define_pretty_variable ($rpathvar, $rcond, INTERNAL, $val);
+	    }
+	}
+
+      # If the resulting library lies in a subdirectory,
+      # make sure this directory will exist.
+      my $dirstamp = require_build_directory_maybe ($onelib);
+
+      # Remember to cleanup .libs/ in this directory.
+      my $dirname = dirname $onelib;
+      $libtool_clean_directories{$dirname} = 1;
+
+      $output_rules .= file_contents ('ltlibrary',
+                                      $where,
+                                      LTLIBRARY  => $onelib,
+                                      XLTLIBRARY => $xlib,
+                                      RPATH      => $rpath,
+                                      XLINK      => $xlink,
+                                      VERBOSE    => $vlink,
+                                      DIRSTAMP   => $dirstamp);
+      if ($seen_libobjs)
+	{
+	  if (var ($xlib . '_LIBADD'))
+	    {
+	      check_libobjs_sources ($xlib, $xlib . '_LIBADD');
+	    }
+	}
+
+      if (! $seen_ar)
+	{
+	  msg ('extra-portability', $where,
+	       "'$onelib': linking libtool libraries using a non-POSIX\n"
+	       . "archiver requires 'AM_PROG_AR' in '$configure_ac'")
+	}
+    }
+}
+
+# See if any _SOURCES variable were misspelled.
+sub check_typos ()
+{
+  # It is ok if the user sets this particular variable.
+  set_seen 'AM_LDFLAGS';
+
+  foreach my $primary ('SOURCES', 'LIBADD', 'LDADD', 'LDFLAGS', 'DEPENDENCIES')
+    {
+      foreach my $var (variables $primary)
+	{
+	  my $varname = $var->name;
+	  # A configure variable is always legitimate.
+	  next if exists $configure_vars{$varname};
+
+	  for my $cond ($var->conditions->conds)
+	    {
+	      $varname =~ /^(?:EXTRA_)?(?:nobase_)?(?:dist_|nodist_)?(.*)_[[:alnum:]]+$/;
+	      msg_var ('syntax', $var, "variable '$varname' is defined but no"
+		       . " program or\nlibrary has '$1' as canonical name"
+		       . " (possible typo)")
+		unless $var->rdef ($cond)->seen;
+	    }
+	}
+    }
+}
+
+
+sub handle_scripts ()
+{
+    # NOTE we no longer automatically clean SCRIPTS, because it is
+    # useful to sometimes distribute scripts verbatim.  This happens
+    # e.g. in Automake itself.
+    am_install_var ('-candist', 'scripts', 'SCRIPTS',
+                    'bin', 'sbin', 'libexec', 'pkglibexec', 'pkgdata',
+                    'noinst', 'check');
+}
+
+
+## ------------------------ ##
+## Handling Texinfo files.  ##
+## ------------------------ ##
+
+# ($OUTFILE, $VFILE)
+# scan_texinfo_file ($FILENAME)
+# -----------------------------
+# $OUTFILE     - name of the info file produced by $FILENAME.
+# $VFILE       - name of the version.texi file used (undef if none).
+sub scan_texinfo_file
+{
+  my ($filename) = @_;
+
+  my $texi = new Automake::XFile "< $filename";
+  verb "reading $filename";
+
+  my ($outfile, $vfile);
+  while ($_ = $texi->getline)
+    {
+      if (/^\@setfilename +(\S+)/)
+	{
+	  # Honor only the first @setfilename.  (It's possible to have
+	  # more occurrences later if the manual shows examples of how
+	  # to use @setfilename...)
+	  next if $outfile;
+
+	  $outfile = $1;
+	  if (index ($outfile, '.') < 0)
+	    {
+	      msg 'obsolete', "$filename:$.",
+	          "use of suffix-less info files is discouraged"
+	    }
+	  elsif ($outfile !~ /\.info$/)
+	    {
+	      error ("$filename:$.",
+		     "output '$outfile' has unrecognized extension");
+	      return;
+	    }
+	}
+      # A "version.texi" file is actually any file whose name matches
+      # "vers*.texi".
+      elsif (/^\@include\s+(vers[^.]*\.texi)\s*$/)
+	{
+	  $vfile = $1;
+	}
+    }
+
+  if (! $outfile)
+    {
+      err_am "'$filename' missing \@setfilename";
+      return;
+    }
+
+  return ($outfile, $vfile);
+}
+
+
+# ($DIRSTAMP, @CLEAN_FILES)
+# output_texinfo_build_rules ($SOURCE, $DEST, $INSRC, @DEPENDENCIES)
+# ------------------------------------------------------------------
+# SOURCE - the source Texinfo file
+# DEST - the destination Info file
+# INSRC - whether DEST should be built in the source tree
+# DEPENDENCIES - known dependencies
+sub output_texinfo_build_rules
+{
+  my ($source, $dest, $insrc, @deps) = @_;
+
+  # Split 'a.texi' into 'a' and '.texi'.
+  my ($spfx, $ssfx) = ($source =~ /^(.*?)(\.[^.]*)?$/);
+  my ($dpfx, $dsfx) = ($dest =~ /^(.*?)(\.[^.]*)?$/);
+
+  $ssfx ||= "";
+  $dsfx ||= "";
+
+  # We can output two kinds of rules: the "generic" rules use Make
+  # suffix rules and are appropriate when $source and $dest do not lie
+  # in a sub-directory; the "specific" rules are needed in the other
+  # case.
+  #
+  # The former are output only once (this is not really apparent here,
+  # but just remember that some logic deeper in Automake will not
+  # output the same rule twice); while the later need to be output for
+  # each Texinfo source.
+  my $generic;
+  my $makeinfoflags;
+  my $sdir = dirname $source;
+  if ($sdir eq '.' && dirname ($dest) eq '.')
+    {
+      $generic = 1;
+      $makeinfoflags = '-I $(srcdir)';
+    }
+  else
+    {
+      $generic = 0;
+      $makeinfoflags = "-I $sdir -I \$(srcdir)/$sdir";
+    }
+
+  # A directory can contain two kinds of info files: some built in the
+  # source tree, and some built in the build tree.  The rules are
+  # different in each case.  However we cannot output two different
+  # set of generic rules.  Because in-source builds are more usual, we
+  # use generic rules in this case and fall back to "specific" rules
+  # for build-dir builds.  (It should not be a problem to invert this
+  # if needed.)
+  $generic = 0 unless $insrc;
+
+  # We cannot use a suffix rule to build info files with an empty
+  # extension.  Otherwise we would output a single suffix inference
+  # rule, with separate dependencies, as in
+  #
+  #    .texi:
+  #             $(MAKEINFO) ...
+  #    foo.info: foo.texi
+  #
+  # which confuse Solaris make.  (See the Autoconf manual for
+  # details.)  Therefore we use a specific rule in this case.  This
+  # applies to info files only (dvi and pdf files always have an
+  # extension).
+  my $generic_info = ($generic && $dsfx) ? 1 : 0;
+
+  # If the resulting file lies in a subdirectory,
+  # make sure this directory will exist.
+  my $dirstamp = require_build_directory_maybe ($dest);
+
+  my $dipfx = ($insrc ? '$(srcdir)/' : '') . $dpfx;
+
+  $output_rules .= file_contents ('texibuild',
+				  new Automake::Location,
+                                  AM_V_MAKEINFO    => verbose_flag('MAKEINFO'),
+                                  AM_V_TEXI2DVI    => verbose_flag('TEXI2DVI'),
+                                  AM_V_TEXI2PDF    => verbose_flag('TEXI2PDF'),
+				  DEPS             => "@deps",
+				  DEST_PREFIX      => $dpfx,
+				  DEST_INFO_PREFIX => $dipfx,
+				  DEST_SUFFIX      => $dsfx,
+				  DIRSTAMP         => $dirstamp,
+				  GENERIC          => $generic,
+				  GENERIC_INFO     => $generic_info,
+				  INSRC		   => $insrc,
+				  MAKEINFOFLAGS    => $makeinfoflags,
+                                  SILENT           => silent_flag(),
+				  SOURCE           => ($generic
+						       ? '$<' : $source),
+				  SOURCE_INFO      => ($generic_info
+						       ? '$<' : $source),
+				  SOURCE_REAL      => $source,
+				  SOURCE_SUFFIX    => $ssfx,
+                                  TEXIQUIET        => verbose_flag('texinfo'),
+                                  TEXIDEVNULL      => verbose_flag('texidevnull'),
+				  );
+  return ($dirstamp, "$dpfx.dvi", "$dpfx.pdf", "$dpfx.ps", "$dpfx.html");
+}
+
+
+# ($MOSTLYCLEAN, $TEXICLEAN, $MAINTCLEAN)
+# handle_texinfo_helper ($info_texinfos)
+# --------------------------------------
+# Handle all Texinfo source; helper for 'handle_texinfo'.
+sub handle_texinfo_helper
+{
+  my ($info_texinfos) = @_;
+  my (@infobase, @info_deps_list, @texi_deps);
+  my %versions;
+  my $done = 0;
+  my (@mostly_cleans, @texi_cleans, @maint_cleans) = ('', '', '');
+
+  # Build a regex matching user-cleaned files.
+  my $d = var 'DISTCLEANFILES';
+  my $c = var 'CLEANFILES';
+  my @f = ();
+  push @f, $d->value_as_list_recursive (inner_expand => 1) if $d;
+  push @f, $c->value_as_list_recursive (inner_expand => 1) if $c;
+  @f = map { s|[^A-Za-z_0-9*\[\]\-]|\\$&|g; s|\*|[^/]*|g; $_; } @f;
+  my $user_cleaned_files = '^(?:' . join ('|', @f) . ')$';
+
+  foreach my $texi
+      ($info_texinfos->value_as_list_recursive (inner_expand => 1))
+    {
+      my $infobase = $texi;
+      if ($infobase =~ s/\.texi$//)
+        {
+          1; # Nothing more to do.
+        }
+      elsif ($infobase =~ s/\.(txi|texinfo)$//)
+        {
+	  msg_var 'obsolete', $info_texinfos,
+	          "suffix '.$1' for Texinfo files is discouraged;" .
+                  " use '.texi' instead";
+        }
+      else
+	{
+	  # FIXME: report line number.
+	  err_am "texinfo file '$texi' has unrecognized extension";
+	  next;
+	}
+
+      push @infobase, $infobase;
+
+      # If 'version.texi' is referenced by input file, then include
+      # automatic versioning capability.
+      my ($out_file, $vtexi) =
+	scan_texinfo_file ("$relative_dir/$texi")
+	or next;
+      # Directory of auxiliary files and build by-products used by texi2dvi
+      # and texi2pdf.
+      push @mostly_cleans, "$infobase.t2d";
+      push @mostly_cleans, "$infobase.t2p";
+
+      # If the Texinfo source is in a subdirectory, create the
+      # resulting info in this subdirectory.  If it is in the current
+      # directory, try hard to not prefix "./" because it breaks the
+      # generic rules.
+      my $outdir = dirname ($texi) . '/';
+      $outdir = "" if $outdir eq './';
+      $out_file =  $outdir . $out_file;
+
+      # Until Automake 1.6.3, .info files were built in the
+      # source tree.  This was an obstacle to the support of
+      # non-distributed .info files, and non-distributed .texi
+      # files.
+      #
+      # * Non-distributed .texi files is important in some packages
+      #   where .texi files are built at make time, probably using
+      #   other binaries built in the package itself, maybe using
+      #   tools or information found on the build host.  Because
+      #   these files are not distributed they are always rebuilt
+      #   at make time; they should therefore not lie in the source
+      #   directory.  One plan was to support this using
+      #   nodist_info_TEXINFOS or something similar.  (Doing this
+      #   requires some sanity checks.  For instance Automake should
+      #   not allow:
+      #      dist_info_TEXINFOS = foo.texi
+      #      nodist_foo_TEXINFOS = included.texi
+      #   because a distributed file should never depend on a
+      #   non-distributed file.)
+      #
+      # * If .texi files are not distributed, then .info files should
+      #   not be distributed either.  There are also cases where one
+      #   wants to distribute .texi files, but does not want to
+      #   distribute the .info files.  For instance the Texinfo package
+      #   distributes the tool used to build these files; it would
+      #   be a waste of space to distribute them.  It's not clear
+      #   which syntax we should use to indicate that .info files should
+      #   not be distributed.  Akim Demaille suggested that eventually
+      #   we switch to a new syntax:
+      #   |  Maybe we should take some inspiration from what's already
+      #   |  done in the rest of Automake.  Maybe there is too much
+      #   |  syntactic sugar here, and you want
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  with a bit of magic to have bar.info represent the whole
+      #   |  bar*info set.  That's a lot more verbose that the current
+      #   |  situation, but it is # not new, hence the user has less
+      #   |  to learn.
+      #	  |
+      #   |  But there is still too much room for meaningless specs:
+      #   |     nodist_INFO = bar.info
+      #   |     dist_bar_info_SOURCES = bar.texi
+      #   |     dist_PS = bar.ps something-written-by-hand.ps
+      #   |     nodist_bar_ps_SOURCES = bar.texi
+      #   |     bar_texi_DEPENDENCIES = foo.texi
+      #   |  here bar.texi is dist_ in line 2, and nodist_ in 4.
+      #
+      # Back to the point, it should be clear that in order to support
+      # non-distributed .info files, we need to build them in the
+      # build tree, not in the source tree (non-distributed .texi
+      # files are less of a problem, because we do not output build
+      # rules for them).  In Automake 1.7 .info build rules have been
+      # largely cleaned up so that .info files get always build in the
+      # build tree, even when distributed.  The idea was that
+      #   (1) if during a VPATH build the .info file was found to be
+      #       absent or out-of-date (in the source tree or in the
+      #       build tree), Make would rebuild it in the build tree.
+      #       If an up-to-date source-tree of the .info file existed,
+      #       make would not rebuild it in the build tree.
+      #   (2) having two copies of .info files, one in the source tree
+      #       and one (newer) in the build tree is not a problem
+      #       because 'make dist' always pick files in the build tree
+      #       first.
+      # However it turned out the be a bad idea for several reasons:
+      #   * Tru64, OpenBSD, and FreeBSD (not NetBSD) Make do not behave
+      #     like GNU Make on point (1) above.  These implementations
+      #     of Make would always rebuild .info files in the build
+      #     tree, even if such files were up to date in the source
+      #     tree.  Consequently, it was impossible to perform a VPATH
+      #     build of a package containing Texinfo files using these
+      #     Make implementations.
+      #     (Refer to the Autoconf Manual, section "Limitation of
+      #     Make", paragraph "VPATH", item "target lookup", for
+      #     an account of the differences between these
+      #     implementations.)
+      #   * The GNU Coding Standards require these files to be built
+      #     in the source-tree (when they are distributed, that is).
+      #   * Keeping a fresher copy of distributed files in the
+      #     build tree can be annoying during development because
+      #     - if the files is kept under CVS, you really want it
+      #       to be updated in the source tree
+      #     - it is confusing that 'make distclean' does not erase
+      #       all files in the build tree.
+      #
+      # Consequently, starting with Automake 1.8, .info files are
+      # built in the source tree again.  Because we still plan to
+      # support non-distributed .info files at some point, we
+      # have a single variable ($INSRC) that controls whether
+      # the current .info file must be built in the source tree
+      # or in the build tree.  Actually this variable is switched
+      # off in two cases:
+      #  (1) For '.info' files that appear to be cleaned; this is for
+      #      backward compatibility with package such as Texinfo,
+      #      which do things like
+      #        info_TEXINFOS = texinfo.txi info-stnd.texi info.texi
+      #        DISTCLEANFILES = texinfo texinfo-* info*.info*
+      #        # Do not create info files for distribution.
+      #        dist-info:
+      #      in order not to distribute .info files.
+      #  (2) When the undocumented option 'info-in-builddir' is given.
+      #      This is done to allow the developers of GCC, GDB, GNU
+      #      binutils and the GNU bfd library to force the '.info' files
+      #      to be generated in the builddir rather than the srcdir, as
+      #      was once done when the (now removed) 'cygnus' option was
+      #      given.  See automake bug#11034 for more discussion.
+      my $insrc = 1;
+      my $soutdir = '$(srcdir)/' . $outdir;
+
+      if (option 'info-in-builddir')
+        {
+          $insrc = 0;
+        }
+      elsif ($out_file =~ $user_cleaned_files)
+        {
+          $insrc = 0;
+          msg 'obsolete', "$am_file.am", <<EOF;
+Oops!
+    It appears this file (or files included by it) are triggering
+    an undocumented, soon-to-be-removed automake hack.
+    Future automake versions will no longer place in the builddir
+    (rather than in the srcdir) the generated '.info' files that
+    appear to be cleaned, by e.g. being listed in CLEANFILES or
+    DISTCLEANFILES.
+    If you want your '.info' files to be placed in the builddir
+    rather than in the srcdir, you have to use the shiny new
+    'info-in-builddir' automake option.
+EOF
+        }
+
+      $outdir = $soutdir if $insrc;
+
+      # If user specified file_TEXINFOS, then use that as explicit
+      # dependency list.
+      @texi_deps = ();
+      push (@texi_deps, "${soutdir}${vtexi}") if $vtexi;
+
+      my $canonical = canonicalize ($infobase);
+      if (var ($canonical . "_TEXINFOS"))
+	{
+	  push (@texi_deps, '$(' . $canonical . '_TEXINFOS)');
+	  push_dist_common ('$(' . $canonical . '_TEXINFOS)');
+	}
+
+      my ($dirstamp, @cfiles) =
+	output_texinfo_build_rules ($texi, $out_file, $insrc, @texi_deps);
+      push (@texi_cleans, @cfiles);
+
+      push (@info_deps_list, $out_file);
+
+      # If a vers*.texi file is needed, emit the rule.
+      if ($vtexi)
+	{
+	  err_am ("'$vtexi', included in '$texi', "
+		  . "also included in '$versions{$vtexi}'")
+	    if defined $versions{$vtexi};
+	  $versions{$vtexi} = $texi;
+
+	  # We number the stamp-vti files.  This is doable since the
+	  # actual names don't matter much.  We only number starting
+	  # with the second one, so that the common case looks nice.
+	  my $vti = ($done ? $done : 'vti');
+	  ++$done;
+
+	  # This is ugly, but it is our historical practice.
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					    'mdate-sh');
+	    }
+	  else
+	    {
+	      require_file_with_macro (TRUE, 'info_TEXINFOS',
+				       FOREIGN, 'mdate-sh');
+	    }
+
+	  my $conf_dir;
+	  if ($config_aux_dir_set_in_configure_ac)
+	    {
+	      $conf_dir = "$am_config_aux_dir/";
+	    }
+	  else
+	    {
+	      $conf_dir = '$(srcdir)/';
+	    }
+	  $output_rules .= file_contents ('texi-vers',
+					  new Automake::Location,
+					  TEXI     => $texi,
+					  VTI      => $vti,
+					  STAMPVTI => "${soutdir}stamp-$vti",
+					  VTEXI    => "$soutdir$vtexi",
+					  MDDIR    => $conf_dir,
+					  DIRSTAMP => $dirstamp);
+	}
+    }
+
+  # Handle location of texinfo.tex.
+  my $need_texi_file = 0;
+  my $texinfodir;
+  if (var ('TEXINFO_TEX'))
+    {
+      # The user defined TEXINFO_TEX so assume he knows what he is
+      # doing.
+      $texinfodir = ('$(srcdir)/'
+		     . dirname (variable_value ('TEXINFO_TEX')));
+    }
+  elsif ($config_aux_dir_set_in_configure_ac)
+    {
+      $texinfodir = $am_config_aux_dir;
+      define_variable ('TEXINFO_TEX', "$texinfodir/texinfo.tex", INTERNAL);
+      $need_texi_file = 2; # so that we require_conf_file later
+    }
+  else
+    {
+      $texinfodir = '$(srcdir)';
+      $need_texi_file = 1;
+    }
+  define_variable ('am__TEXINFO_TEX_DIR', $texinfodir, INTERNAL);
+
+  push (@dist_targets, 'dist-info');
+
+  if (! option 'no-installinfo')
+    {
+      # Make sure documentation is made and installed first.  Use
+      # $(INFO_DEPS), not 'info', because otherwise recursive makes
+      # get run twice during "make all".
+      unshift (@all, '$(INFO_DEPS)');
+    }
+
+  define_files_variable ("DVIS", @infobase, 'dvi', INTERNAL);
+  define_files_variable ("PDFS", @infobase, 'pdf', INTERNAL);
+  define_files_variable ("PSS", @infobase, 'ps', INTERNAL);
+  define_files_variable ("HTMLS", @infobase, 'html', INTERNAL);
+
+  # This next isn't strictly needed now -- the places that look here
+  # could easily be changed to look in info_TEXINFOS.  But this is
+  # probably better, in case noinst_TEXINFOS is ever supported.
+  define_variable ("TEXINFOS", variable_value ('info_TEXINFOS'), INTERNAL);
+
+  # Do some error checking.  Note that this file is not required
+  # when in Cygnus mode; instead we defined TEXINFO_TEX explicitly
+  # up above.
+  if ($need_texi_file && ! option 'no-texinfo.tex')
+    {
+      if ($need_texi_file > 1)
+	{
+	  require_conf_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+					'texinfo.tex');
+	}
+      else
+	{
+	  require_file_with_macro (TRUE, 'info_TEXINFOS', FOREIGN,
+				   'texinfo.tex');
+	}
+    }
+
+  return (makefile_wrap ("", "\t  ", @mostly_cleans),
+	  makefile_wrap ("", "\t  ", @texi_cleans),
+	  makefile_wrap ("", "\t  ", @maint_cleans));
+}
+
+
+sub handle_texinfo ()
+{
+  reject_var 'TEXINFOS', "'TEXINFOS' is an anachronism; use 'info_TEXINFOS'";
+  # FIXME: I think this is an obsolete future feature name.
+  reject_var 'html_TEXINFOS', "HTML generation not yet supported";
+
+  my $info_texinfos = var ('info_TEXINFOS');
+  my ($mostlyclean, $clean, $maintclean) = ('', '', '');
+  if ($info_texinfos)
+    {
+      define_verbose_texinfo;
+      ($mostlyclean, $clean, $maintclean) = handle_texinfo_helper ($info_texinfos);
+      chomp $mostlyclean;
+      chomp $clean;
+      chomp $maintclean;
+    }
+
+  $output_rules .=  file_contents ('texinfos',
+				   new Automake::Location,
+                                   AM_V_DVIPS    => verbose_flag('DVIPS'),
+				   MOSTLYCLEAN   => $mostlyclean,
+				   TEXICLEAN     => $clean,
+				   MAINTCLEAN    => $maintclean,
+				   'LOCAL-TEXIS' => !!$info_texinfos,
+                                   TEXIQUIET     => verbose_flag('texinfo'));
+}
+
+
+sub handle_man_pages ()
+{
+  reject_var 'MANS', "'MANS' is an anachronism; use 'man_MANS'";
+
+  # Find all the sections in use.  We do this by first looking for
+  # "standard" sections, and then looking for any additional
+  # sections used in man_MANS.
+  my (%sections, %notrans_sections, %trans_sections,
+      %notrans_vars, %trans_vars, %notrans_sect_vars, %trans_sect_vars);
+  # We handle nodist_ for uniformity.  man pages aren't distributed
+  # by default so it isn't actually very important.
+  foreach my $npfx ('', 'notrans_')
+    {
+      foreach my $pfx ('', 'dist_', 'nodist_')
+	{
+	  # Add more sections as needed.
+	  foreach my $section ('0'..'9', 'n', 'l')
+	    {
+	      my $varname = $npfx . $pfx . 'man' . $section . '_MANS';
+	      if (var ($varname))
+		{
+		  $sections{$section} = 1;
+		  $varname = '$(' . $varname . ')';
+		  if ($npfx eq 'notrans_')
+		    {
+		      $notrans_sections{$section} = 1;
+		      $notrans_sect_vars{$varname} = 1;
+		    }
+		  else
+		    {
+		      $trans_sections{$section} = 1;
+		      $trans_sect_vars{$varname} = 1;
+		    }
+
+		  push_dist_common ($varname)
+		    if $pfx eq 'dist_';
+		}
+	    }
+
+	  my $varname = $npfx . $pfx . 'man_MANS';
+	  my $var = var ($varname);
+	  if ($var)
+	    {
+	      foreach ($var->value_as_list_recursive)
+		{
+		  # A page like 'foo.1c' goes into man1dir.
+		  if (/\.([0-9a-z])([a-z]*)$/)
+		    {
+		      $sections{$1} = 1;
+		      if ($npfx eq 'notrans_')
+			{
+			  $notrans_sections{$1} = 1;
+			}
+		      else
+			{
+			  $trans_sections{$1} = 1;
+			}
+		    }
+		}
+
+	      $varname = '$(' . $varname . ')';
+	      if ($npfx eq 'notrans_')
+		{
+		  $notrans_vars{$varname} = 1;
+		}
+	      else
+		{
+		  $trans_vars{$varname} = 1;
+		}
+	      push_dist_common ($varname)
+		if $pfx eq 'dist_';
+	    }
+	}
+    }
+
+  return unless %sections;
+
+  my @unsorted_deps;
+
+  # Build section independent variables.
+  my $have_notrans = %notrans_vars;
+  my @notrans_list = sort keys %notrans_vars;
+  my $have_trans = %trans_vars;
+  my @trans_list = sort keys %trans_vars;
+
+  # Now for each section, generate an install and uninstall rule.
+  # Sort sections so output is deterministic.
+  foreach my $section (sort keys %sections)
+    {
+      # Build section dependent variables.
+      my $notrans_mans = $have_notrans || exists $notrans_sections{$section};
+      my $trans_mans = $have_trans || exists $trans_sections{$section};
+      my (%notrans_this_sect, %trans_this_sect);
+      my $expr = 'man' . $section . '_MANS';
+      foreach my $varname (keys %notrans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $notrans_this_sect{$varname} = 1;
+	    }
+	}
+      foreach my $varname (keys %trans_sect_vars)
+	{
+	  if ($varname =~ /$expr/)
+	    {
+	      $trans_this_sect{$varname} = 1;
+	    }
+	}
+      my @notrans_sect_list = sort keys %notrans_this_sect;
+      my @trans_sect_list = sort keys %trans_this_sect;
+      @unsorted_deps = (keys %notrans_vars, keys %trans_vars,
+                        keys %notrans_this_sect, keys %trans_this_sect);
+      my @deps = sort @unsorted_deps;
+      $output_rules .= file_contents ('mans',
+                                      new Automake::Location,
+                                      SECTION           => $section,
+                                      DEPS              => "@deps",
+                                      NOTRANS_MANS      => $notrans_mans,
+                                      NOTRANS_SECT_LIST => "@notrans_sect_list",
+                                      HAVE_NOTRANS      => $have_notrans,
+                                      NOTRANS_LIST      => "@notrans_list",
+                                      TRANS_MANS        => $trans_mans,
+                                      TRANS_SECT_LIST   => "@trans_sect_list",
+                                      HAVE_TRANS        => $have_trans,
+                                      TRANS_LIST        => "@trans_list");
+    }
+
+  @unsorted_deps  = (keys %notrans_vars, keys %trans_vars,
+                     keys %notrans_sect_vars, keys %trans_sect_vars);
+  my @mans = sort @unsorted_deps;
+  $output_vars .= file_contents ('mans-vars',
+				 new Automake::Location,
+				 MANS => "@mans");
+
+  push (@all, '$(MANS)')
+    unless option 'no-installman';
+}
+
+
+sub handle_data ()
+{
+    am_install_var ('-noextra', '-candist', 'data', 'DATA',
+                    'data', 'dataroot', 'doc', 'dvi', 'html', 'pdf',
+                    'ps', 'sysconf', 'sharedstate', 'localstate',
+                    'pkgdata', 'lisp', 'noinst', 'check');
+}
+
+
+sub handle_tags ()
+{
+    my @config;
+    foreach my $spec (@config_headers)
+      {
+        my ($out, @ins) = split_config_file_spec ($spec);
+	foreach my $in (@ins)
+	  {
+            # If the config header source is in this directory,
+	    # require it.
+	    push @config, basename ($in)
+              if $relative_dir eq dirname ($in);
+	   }
+      }
+
+    define_variable ('am__tagged_files',
+                     '$(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)'
+                     . "@config", INTERNAL);
+
+    if (rvar('am__tagged_files')->value_as_list_recursive
+          || var ('ETAGS_ARGS') || var ('SUBDIRS'))
+      {
+	$output_rules .= file_contents ('tags', new Automake::Location);
+	set_seen 'TAGS_DEPENDENCIES';
+      }
+    else
+      {
+        reject_var ('TAGS_DEPENDENCIES',
+		    "it doesn't make sense to define 'TAGS_DEPENDENCIES'"
+		    . " without\nsources or 'ETAGS_ARGS'");
+	# Every Makefile must define some sort of TAGS rule.
+	# Otherwise, it would be possible for a top-level "make TAGS"
+	# to fail because some subdirectory failed.  Ditto ctags and
+        # cscope.
+        $output_rules .=
+          "tags TAGS:\n\n" .
+          "ctags CTAGS:\n\n" .
+          "cscope cscopelist:\n\n";
+      }
+}
+
+
+# user_phony_rule ($NAME)
+# -----------------------
+# Return false if rule $NAME does not exist.  Otherwise,
+# declare it as phony, complete its definition (in case it is
+# conditional), and return its Automake::Rule instance.
+sub user_phony_rule
+{
+  my ($name) = @_;
+  my $rule = rule $name;
+  if ($rule)
+    {
+      depend ('.PHONY', $name);
+      # Define $NAME in all condition where it is not already defined,
+      # so that it is always OK to depend on $NAME.
+      for my $c ($rule->not_always_defined_in_cond (TRUE)->conds)
+	{
+	  Automake::Rule::define ($name, 'internal', RULE_AUTOMAKE,
+				  $c, INTERNAL);
+	  $output_rules .= $c->subst_string . "$name:\n";
+	}
+    }
+  return $rule;
+}
+
+
+# Handle 'dist' target.
+sub handle_dist ()
+{
+  # Substitutions for distdir.am
+  my %transform;
+
+  # Define DIST_SUBDIRS.  This must always be done, regardless of the
+  # no-dist setting: target like 'distclean' or 'maintainer-clean' use it.
+  my $subdirs = var ('SUBDIRS');
+  if ($subdirs)
+    {
+      # If SUBDIRS is conditionally defined, then set DIST_SUBDIRS
+      # to all possible directories, and use it.  If DIST_SUBDIRS is
+      # defined, just use it.
+
+      # Note that we check DIST_SUBDIRS first on purpose, so that
+      # we don't call has_conditional_contents for now reason.
+      # (In the past one project used so many conditional subdirectories
+      # that calling has_conditional_contents on SUBDIRS caused
+      # automake to grow to 150Mb -- this should not happen with
+      # the current implementation of has_conditional_contents,
+      # but it's more efficient to avoid the call anyway.)
+      if (var ('DIST_SUBDIRS'))
+	{
+	}
+      elsif ($subdirs->has_conditional_contents)
+	{
+	  define_pretty_variable
+	    ('DIST_SUBDIRS', TRUE, INTERNAL,
+	     uniq ($subdirs->value_as_list_recursive));
+	}
+      else
+	{
+	  # We always define this because that is what 'distclean'
+	  # wants.
+	  define_pretty_variable ('DIST_SUBDIRS', TRUE, INTERNAL,
+				  '$(SUBDIRS)');
+	}
+    }
+
+  # The remaining definitions are only required when a dist target is used.
+  return if option 'no-dist';
+
+  # At least one of the archive formats must be enabled.
+  if ($relative_dir eq '.')
+    {
+      my $archive_defined = option 'no-dist-gzip' ? 0 : 1;
+      $archive_defined ||=
+	grep { option "dist-$_" } qw(shar zip tarZ bzip2 lzip xz);
+      error (option 'no-dist-gzip',
+	     "no-dist-gzip specified but no dist-* specified,\n"
+	     . "at least one archive format must be enabled")
+	unless $archive_defined;
+    }
+
+  # Look for common files that should be included in distribution.
+  # If the aux dir is set, and it does not have a Makefile.am, then
+  # we check for these files there as well.
+  my $check_aux = 0;
+  if ($relative_dir eq '.'
+      && $config_aux_dir_set_in_configure_ac)
+    {
+      if (! is_make_dir ($config_aux_dir))
+	{
+	  $check_aux = 1;
+	}
+    }
+  foreach my $cfile (@common_files)
+    {
+      if (dir_has_case_matching_file ($relative_dir, $cfile)
+	  # The file might be absent, but if it can be built it's ok.
+	  || rule $cfile)
+	{
+	  push_dist_common ($cfile);
+	}
+
+      # Don't use 'elsif' here because a file might meaningfully
+      # appear in both directories.
+      if ($check_aux && dir_has_case_matching_file ($config_aux_dir, $cfile))
+	{
+	  push_dist_common ("$config_aux_dir/$cfile")
+	}
+    }
+
+  # We might copy elements from $configure_dist_common to
+  # %dist_common if we think we need to.  If the file appears in our
+  # directory, we would have discovered it already, so we don't
+  # check that.  But if the file is in a subdir without a Makefile,
+  # we want to distribute it here if we are doing '.'.  Ugly!
+  # Also, in some corner cases, it's possible that the following code
+  # will cause the same file to appear in the $(DIST_COMMON) variables
+  # of two distinct Makefiles; but this is not a problem, since the
+  # 'distdir' target in 'lib/am/distdir.am' can deal with the same
+  # file being distributed multiple times.
+  # See also automake bug#9651.
+  if ($relative_dir eq '.')
+    {
+      foreach my $file (split (' ' , $configure_dist_common))
+	{
+	  my $dir = dirname ($file);
+	  push_dist_common ($file)
+	    if ($dir eq '.' || ! is_make_dir ($dir));
+	}
+    }
+
+  # Files to distributed.  Don't use ->value_as_list_recursive
+  # as it recursively expands '$(dist_pkgdata_DATA)' etc.
+  my @dist_common = split (' ', rvar ('DIST_COMMON')->variable_value);
+  @dist_common = uniq (@dist_common);
+  variable_delete 'DIST_COMMON';
+  define_pretty_variable ('DIST_COMMON', TRUE, INTERNAL, @dist_common);
+
+  # Now that we've processed DIST_COMMON, disallow further attempts
+  # to set it.
+  $handle_dist_run = 1;
+
+  $transform{'DISTCHECK-HOOK'} = !! rule 'distcheck-hook';
+  $transform{'GETTEXT'} = $seen_gettext && !$seen_gettext_external;
+
+  # If the target 'dist-hook' exists, make sure it is run.  This
+  # allows users to do random weird things to the distribution
+  # before it is packaged up.
+  push (@dist_targets, 'dist-hook')
+    if user_phony_rule 'dist-hook';
+  $transform{'DIST-TARGETS'} = join (' ', @dist_targets);
+
+  my $flm = option ('filename-length-max');
+  my $filename_filter = $flm ? '.' x $flm->[1] : '';
+
+  $output_rules .= file_contents ('distdir',
+				  new Automake::Location,
+				  %transform,
+				  FILENAME_FILTER => $filename_filter);
+}
+
+
+# check_directory ($NAME, $WHERE [, $RELATIVE_DIR = "."])
+# -------------------------------------------------------
+# Ensure $NAME is a directory (in $RELATIVE_DIR), and that it uses a sane
+# name.  Use $WHERE as a location in the diagnostic, if any.
+sub check_directory
+{
+  my ($dir, $where, $reldir) = @_;
+  $reldir = '.' unless defined $reldir;
+
+  error $where, "required directory $reldir/$dir does not exist"
+    unless -d "$reldir/$dir";
+
+  # If an 'obj/' directory exists, BSD make will enter it before
+  # reading 'Makefile'.  Hence the 'Makefile' in the current directory
+  # will not be read.
+  #
+  #  % cat Makefile
+  #  all:
+  #          echo Hello
+  #  % cat obj/Makefile
+  #  all:
+  #          echo World
+  #  % make      # GNU make
+  #  echo Hello
+  #  Hello
+  #  % pmake     # BSD make
+  #  echo World
+  #  World
+  msg ('portability', $where,
+       "naming a subdirectory 'obj' causes troubles with BSD make")
+    if $dir eq 'obj';
+
+  # 'aux' is probably the most important of the following forbidden name,
+  # since it's tempting to use it as an AC_CONFIG_AUX_DIR.
+  msg ('portability', $where,
+       "name '$dir' is reserved on W32 and DOS platforms")
+    if grep (/^\Q$dir\E$/i, qw/aux lpt1 lpt2 lpt3 com1 com2 com3 com4 con prn/);
+}
+
+# check_directories_in_var ($VARIABLE)
+# ------------------------------------
+# Recursively check all items in variables $VARIABLE as directories
+sub check_directories_in_var
+{
+  my ($var) = @_;
+  $var->traverse_recursively
+    (sub
+     {
+       my ($var, $val, $cond, $full_cond) = @_;
+       check_directory ($val, $var->rdef ($cond)->location, $relative_dir);
+       return ();
+     },
+     undef,
+     skip_ac_subst => 1);
+}
+
+
+sub handle_subdirs ()
+{
+  my $subdirs = var ('SUBDIRS');
+  return
+    unless $subdirs;
+
+  check_directories_in_var $subdirs;
+
+  my $dsubdirs = var ('DIST_SUBDIRS');
+  check_directories_in_var $dsubdirs
+    if $dsubdirs;
+
+  $output_rules .= file_contents ('subdirs', new Automake::Location);
+  rvar ('RECURSIVE_TARGETS')->rdef (TRUE)->{'pretty'} = VAR_SORTED; # Gross!
+}
+
+
+# ($REGEN, @DEPENDENCIES)
+# scan_aclocal_m4
+# ---------------
+# If aclocal.m4 creation is automated, return the list of its dependencies.
+sub scan_aclocal_m4 ()
+{
+  my $regen_aclocal = 0;
+
+  set_seen 'CONFIG_STATUS_DEPENDENCIES';
+  set_seen 'CONFIGURE_DEPENDENCIES';
+
+  if (-f 'aclocal.m4')
+    {
+      define_variable ("ACLOCAL_M4", '$(top_srcdir)/aclocal.m4', INTERNAL);
+
+      my $aclocal = new Automake::XFile "< aclocal.m4";
+      my $line = $aclocal->getline;
+      $regen_aclocal = $line =~ 'generated automatically by aclocal';
+    }
+
+  my @ac_deps = ();
+
+  if (set_seen ('ACLOCAL_M4_SOURCES'))
+    {
+      push (@ac_deps, '$(ACLOCAL_M4_SOURCES)');
+      msg_var ('obsolete', 'ACLOCAL_M4_SOURCES',
+	       "'ACLOCAL_M4_SOURCES' is obsolete.\n"
+	       . "It should be safe to simply remove it");
+    }
+
+  # Note that it might be possible that aclocal.m4 doesn't exist but
+  # should be auto-generated.  This case probably isn't very
+  # important.
+
+  return ($regen_aclocal, @ac_deps);
+}
+
+
+# Helper function for 'substitute_ac_subst_variables'.
+sub substitute_ac_subst_variables_worker
+{
+  my ($token) = @_;
+  return "\@$token\@" if var $token;
+  return "\${$token\}";
+}
+
+# substitute_ac_subst_variables ($TEXT)
+# -------------------------------------
+# Replace any occurrence of ${FOO} in $TEXT by @FOO@ if FOO is an AC_SUBST
+# variable.
+sub substitute_ac_subst_variables
+{
+  my ($text) = @_;
+  $text =~ s/\${([^ \t=:+{}]+)}/substitute_ac_subst_variables_worker ($1)/ge;
+  return $text;
+}
+
+# @DEPENDENCIES
+# prepend_srcdir (@INPUTS)
+# ------------------------
+# Prepend $(srcdir) or $(top_srcdir) to all @INPUTS.  The idea is that
+# if an input file has a directory part the same as the current
+# directory, then the directory part is simply replaced by $(srcdir).
+# But if the directory part is different, then $(top_srcdir) is
+# prepended.
+sub prepend_srcdir
+{
+  my (@inputs) = @_;
+  my @newinputs;
+
+  foreach my $single (@inputs)
+    {
+      if (dirname ($single) eq $relative_dir)
+	{
+	  push (@newinputs, '$(srcdir)/' . basename ($single));
+	}
+      else
+	{
+	  push (@newinputs, '$(top_srcdir)/' . $single);
+	}
+    }
+  return @newinputs;
+}
+
+# @DEPENDENCIES
+# rewrite_inputs_into_dependencies ($OUTPUT, @INPUTS)
+# ---------------------------------------------------
+# Compute a list of dependencies appropriate for the rebuild
+# rule of
+#   AC_CONFIG_FILES($OUTPUT:$INPUT[0]:$INPUTS[1]:...)
+# Also distribute $INPUTs which are not built by another AC_CONFIG_FOOs.
+sub rewrite_inputs_into_dependencies
+{
+  my ($file, @inputs) = @_;
+  my @res = ();
+
+  for my $i (@inputs)
+    {
+      # We cannot create dependencies on shell variables.
+      next if (substitute_ac_subst_variables $i) =~ /\$/;
+
+      if (exists $ac_config_files_location{$i} && $i ne $file)
+	{
+	  my $di = dirname $i;
+	  if ($di eq $relative_dir)
+	    {
+	      $i = basename $i;
+	    }
+	  # In the top-level Makefile we do not use $(top_builddir), because
+	  # we are already there, and since the targets are built without
+	  # a $(top_builddir), it helps BSD Make to match them with
+	  # dependencies.
+	  elsif ($relative_dir ne '.')
+	    {
+	      $i = '$(top_builddir)/' . $i;
+	    }
+	}
+      else
+	{
+	  msg ('error', $ac_config_files_location{$file},
+	       "required file '$i' not found")
+	    unless $i =~ /\$/ || exists $output_files{$i} || -f $i;
+	  ($i) = prepend_srcdir ($i);
+	  push_dist_common ($i);
+	}
+      push @res, $i;
+    }
+  return @res;
+}
+
+
+
+# handle_configure ($MAKEFILE_AM, $MAKEFILE_IN, $MAKEFILE, @INPUTS)
+# -----------------------------------------------------------------
+# Handle remaking and configure stuff.
+# We need the name of the input file, to do proper remaking rules.
+sub handle_configure
+{
+  my ($makefile_am, $makefile_in, $makefile, @inputs) = @_;
+
+  prog_error 'empty @inputs'
+    unless @inputs;
+
+  my ($rel_makefile_am, $rel_makefile_in) = prepend_srcdir ($makefile_am,
+							    $makefile_in);
+  my $rel_makefile = basename $makefile;
+
+  my $colon_infile = ':' . join (':', @inputs);
+  $colon_infile = '' if $colon_infile eq ":$makefile.in";
+  my @rewritten = rewrite_inputs_into_dependencies ($makefile, @inputs);
+  my ($regen_aclocal_m4, @aclocal_m4_deps) = scan_aclocal_m4;
+  define_pretty_variable ('am__aclocal_m4_deps', TRUE, INTERNAL,
+			  @configure_deps, @aclocal_m4_deps,
+			  '$(top_srcdir)/' . $configure_ac);
+  my @configuredeps = ('$(am__aclocal_m4_deps)', '$(CONFIGURE_DEPENDENCIES)');
+  push @configuredeps, '$(ACLOCAL_M4)' if -f 'aclocal.m4';
+  define_pretty_variable ('am__configure_deps', TRUE, INTERNAL,
+			  @configuredeps);
+
+  my $automake_options = '--' . $strictness_name .
+			 (global_option 'no-dependencies' ? ' --ignore-deps' : '');
+
+  $output_rules .= file_contents
+    ('configure',
+     new Automake::Location,
+     MAKEFILE              => $rel_makefile,
+     'MAKEFILE-DEPS'       => "@rewritten",
+     'CONFIG-MAKEFILE'     => ($relative_dir eq '.') ? '$@' : '$(subdir)/$@',
+     'MAKEFILE-IN'         => $rel_makefile_in,
+     'HAVE-MAKEFILE-IN-DEPS' => (@include_stack > 0),
+     'MAKEFILE-IN-DEPS'    => "@include_stack",
+     'MAKEFILE-AM'         => $rel_makefile_am,
+     'AUTOMAKE-OPTIONS'    => $automake_options,
+     'MAKEFILE-AM-SOURCES' => "$makefile$colon_infile",
+     'REGEN-ACLOCAL-M4'    => $regen_aclocal_m4,
+     VERBOSE               => verbose_flag ('GEN'));
+
+  if ($relative_dir eq '.')
+    {
+      push_dist_common ('acconfig.h')
+	if -f 'acconfig.h';
+    }
+
+  # If we have a configure header, require it.
+  my $hdr_index = 0;
+  my @distclean_config;
+  foreach my $spec (@config_headers)
+    {
+      $hdr_index += 1;
+      # $CONFIG_H_PATH: config.h from top level.
+      my ($config_h_path, @ins) = split_config_file_spec ($spec);
+      my $config_h_dir = dirname ($config_h_path);
+
+      # If the header is in the current directory we want to build
+      # the header here.  Otherwise, if we're at the topmost
+      # directory and the header's directory doesn't have a
+      # Makefile, then we also want to build the header.
+      if ($relative_dir eq $config_h_dir
+	  || ($relative_dir eq '.' && ! is_make_dir ($config_h_dir)))
+	{
+	  my ($cn_sans_dir, $stamp_dir);
+	  if ($relative_dir eq $config_h_dir)
+	    {
+	      $cn_sans_dir = basename ($config_h_path);
+	      $stamp_dir = '';
+	    }
+	  else
+	    {
+	      $cn_sans_dir = $config_h_path;
+	      if ($config_h_dir eq '.')
+		{
+		  $stamp_dir = '';
+		}
+	      else
+		{
+		  $stamp_dir = $config_h_dir . '/';
+		}
+	    }
+
+	  # This will also distribute all inputs.
+	  @ins = rewrite_inputs_into_dependencies ($config_h_path, @ins);
+
+	  # Cannot define rebuild rules for filenames with shell variables.
+	  next if (substitute_ac_subst_variables $config_h_path) =~ /\$/;
+
+	  # Header defined in this directory.
+	  my @files;
+	  if (-f $config_h_path . '.top')
+	    {
+	      push (@files, "$cn_sans_dir.top");
+	    }
+	  if (-f $config_h_path . '.bot')
+	    {
+	      push (@files, "$cn_sans_dir.bot");
+	    }
+
+	  push_dist_common (@files);
+
+	  # For now, acconfig.h can only appear in the top srcdir.
+	  if (-f 'acconfig.h')
+	    {
+	      push (@files, '$(top_srcdir)/acconfig.h');
+	    }
+
+	  my $stamp = "${stamp_dir}stamp-h${hdr_index}";
+	  $output_rules .=
+	    file_contents ('remake-hdr',
+			   new Automake::Location,
+			   FILES            => "@files",
+			   'FIRST-HDR'      => ($hdr_index == 1),
+			   CONFIG_H         => $cn_sans_dir,
+			   CONFIG_HIN       => $ins[0],
+			   CONFIG_H_DEPS    => "@ins",
+			   CONFIG_H_PATH    => $config_h_path,
+			   STAMP            => "$stamp");
+
+	  push @distclean_config, $cn_sans_dir, $stamp;
+	}
+    }
+
+  $output_rules .= file_contents ('clean-hdr',
+				  new Automake::Location,
+				  FILES => "@distclean_config")
+    if @distclean_config;
+
+  # Distribute and define mkinstalldirs only if it is already present
+  # in the package, for backward compatibility (some people may still
+  # use $(mkinstalldirs)).
+  # TODO: start warning about this in Automake 1.14, and have
+  # TODO: Automake 2.0 drop it (and the mkinstalldirs script
+  # TODO: as well).
+  my $mkidpath = "$config_aux_dir/mkinstalldirs";
+  if (-f $mkidpath)
+    {
+      # Use require_file so that any existing script gets updated
+      # by --force-missing.
+      require_conf_file ($mkidpath, FOREIGN, 'mkinstalldirs');
+      define_variable ('mkinstalldirs',
+		       "\$(SHELL) $am_config_aux_dir/mkinstalldirs", INTERNAL);
+    }
+  else
+    {
+      # Use $(install_sh), not $(MKDIR_P) because the latter requires
+      # at least one argument, and $(mkinstalldirs) used to work
+      # even without arguments (e.g. $(mkinstalldirs) $(conditional_dir)).
+      define_variable ('mkinstalldirs', '$(install_sh) -d', INTERNAL);
+    }
+
+  reject_var ('CONFIG_HEADER',
+	      "'CONFIG_HEADER' is an anachronism; now determined "
+	      . "automatically\nfrom '$configure_ac'");
+
+  my @config_h;
+  foreach my $spec (@config_headers)
+    {
+      my ($out, @ins) = split_config_file_spec ($spec);
+      # Generate CONFIG_HEADER define.
+      if ($relative_dir eq dirname ($out))
+	{
+	  push @config_h, basename ($out);
+	}
+      else
+	{
+	  push @config_h, "\$(top_builddir)/$out";
+	}
+    }
+  define_variable ("CONFIG_HEADER", "@config_h", INTERNAL)
+    if @config_h;
+
+  # Now look for other files in this directory which must be remade
+  # by config.status, and generate rules for them.
+  my @actual_other_files = ();
+  # These get cleaned only in a VPATH build.
+  my @actual_other_vpath_files = ();
+  foreach my $lfile (@other_input_files)
+    {
+      my $file;
+      my @inputs;
+      if ($lfile =~ /^([^:]*):(.*)$/)
+	{
+	  # This is the ":" syntax of AC_OUTPUT.
+	  $file = $1;
+	  @inputs = split (':', $2);
+	}
+      else
+	{
+	  # Normal usage.
+	  $file = $lfile;
+	  @inputs = $file . '.in';
+	}
+
+      # Automake files should not be stored in here, but in %MAKE_LIST.
+      prog_error ("$lfile in \@other_input_files\n"
+		  . "\@other_input_files = (@other_input_files)")
+	if -f $file . '.am';
+
+      my $local = basename ($file);
+
+      # We skip files that aren't in this directory.  However, if
+      # the file's directory does not have a Makefile, and we are
+      # currently doing '.', then we create a rule to rebuild the
+      # file in the subdir.
+      my $fd = dirname ($file);
+      if ($fd ne $relative_dir)
+	{
+	  if ($relative_dir eq '.' && ! is_make_dir ($fd))
+	    {
+	      $local = $file;
+	    }
+	  else
+	    {
+	      next;
+	    }
+	}
+
+      my @rewritten_inputs = rewrite_inputs_into_dependencies ($file, @inputs);
+
+      # Cannot output rules for shell variables.
+      next if (substitute_ac_subst_variables $local) =~ /\$/;
+
+      my $condstr = '';
+      my $cond = $ac_config_files_condition{$lfile};
+      if (defined $cond)
+        {
+	  $condstr = $cond->subst_string;
+	  Automake::Rule::define ($local, $configure_ac, RULE_AUTOMAKE, $cond,
+				  $ac_config_files_location{$file});
+        }
+      $output_rules .= ($condstr . $local . ': '
+			. '$(top_builddir)/config.status '
+			. "@rewritten_inputs\n"
+			. $condstr . "\t"
+			. 'cd $(top_builddir) && '
+			. '$(SHELL) ./config.status '
+			. ($relative_dir eq '.' ? '' : '$(subdir)/')
+			. '$@'
+			. "\n");
+      push (@actual_other_files, $local);
+    }
+
+  # For links we should clean destinations and distribute sources.
+  foreach my $spec (@config_links)
+    {
+      my ($link, $file) = split /:/, $spec;
+      # Some people do AC_CONFIG_LINKS($computed).  We only handle
+      # the DEST:SRC form.
+      next unless $file;
+      my $where = $ac_config_files_location{$link};
+
+      # Skip destinations that contain shell variables.
+      if ((substitute_ac_subst_variables $link) !~ /\$/)
+	{
+	  # We skip links that aren't in this directory.  However, if
+	  # the link's directory does not have a Makefile, and we are
+	  # currently doing '.', then we add the link to CONFIG_CLEAN_FILES
+	  # in '.'s Makefile.in.
+	  my $local = basename ($link);
+	  my $fd = dirname ($link);
+	  if ($fd ne $relative_dir)
+	    {
+	      if ($relative_dir eq '.' && ! is_make_dir ($fd))
+		{
+		  $local = $link;
+		}
+	      else
+		{
+		  $local = undef;
+		}
+	    }
+	  if ($file ne $link)
+	    {
+	      push @actual_other_files, $local if $local;
+	    }
+	  else
+	    {
+	      push @actual_other_vpath_files, $local if $local;
+	    }
+	}
+
+      # Do not process sources that contain shell variables.
+      if ((substitute_ac_subst_variables $file) !~ /\$/)
+	{
+	  my $fd = dirname ($file);
+
+	  # We distribute files that are in this directory.
+	  # At the top-level ('.') we also distribute files whose
+	  # directory does not have a Makefile.
+	  if (($fd eq $relative_dir)
+	      || ($relative_dir eq '.' && ! is_make_dir ($fd)))
+	    {
+	      # The following will distribute $file as a side-effect when
+	      # it is appropriate (i.e., when $file is not already an output).
+	      # We do not need the result, just the side-effect.
+	      rewrite_inputs_into_dependencies ($link, $file);
+	    }
+	}
+    }
+
+  # These files get removed by "make distclean".
+  define_pretty_variable ('CONFIG_CLEAN_FILES', TRUE, INTERNAL,
+			  @actual_other_files);
+  define_pretty_variable ('CONFIG_CLEAN_VPATH_FILES', TRUE, INTERNAL,
+			  @actual_other_vpath_files);
+}
+
+sub handle_headers ()
+{
+    my @r = am_install_var ('-defaultdist', 'header', 'HEADERS', 'include',
+			    'oldinclude', 'pkginclude',
+			    'noinst', 'check');
+    foreach (@r)
+    {
+      next unless $_->[1] =~ /\..*$/;
+      saw_extension ($&);
+    }
+}
+
+sub handle_gettext ()
+{
+  return if ! $seen_gettext || $relative_dir ne '.';
+
+  my $subdirs = var 'SUBDIRS';
+
+  if (! $subdirs)
+    {
+      err_ac "AM_GNU_GETTEXT used but SUBDIRS not defined";
+      return;
+    }
+
+  # Perform some sanity checks to help users get the right setup.
+  # We disable these tests when po/ doesn't exist in order not to disallow
+  # unusual gettext setups.
+  #
+  # Bruno Haible:
+  # | The idea is:
+  # |
+  # |  1) If a package doesn't have a directory po/ at top level, it
+  # |     will likely have multiple po/ directories in subpackages.
+  # |
+  # |  2) It is useful to warn for the absence of intl/ if AM_GNU_GETTEXT
+  # |     is used without 'external'. It is also useful to warn for the
+  # |     presence of intl/ if AM_GNU_GETTEXT([external]) is used. Both
+  # |     warnings apply only to the usual layout of packages, therefore
+  # |     they should both be disabled if no po/ directory is found at
+  # |     top level.
+
+  if (-d 'po')
+    {
+      my @subdirs = $subdirs->value_as_list_recursive;
+
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'po' not in SUBDIRS")
+	if ! grep ($_ eq 'po', @subdirs);
+
+      # intl/ is not required when AM_GNU_GETTEXT is called with the
+      # 'external' option and AM_GNU_GETTEXT_INTL_SUBDIR is not called.
+      msg_var ('syntax', $subdirs,
+	       "AM_GNU_GETTEXT used but 'intl' not in SUBDIRS")
+	if (! ($seen_gettext_external && ! $seen_gettext_intl)
+	    && ! grep ($_ eq 'intl', @subdirs));
+
+      # intl/ should not be used with AM_GNU_GETTEXT([external]), except
+      # if AM_GNU_GETTEXT_INTL_SUBDIR is called.
+      msg_var ('syntax', $subdirs,
+	       "'intl' should not be in SUBDIRS when "
+	       . "AM_GNU_GETTEXT([external]) is used")
+	if ($seen_gettext_external && ! $seen_gettext_intl
+	    && grep ($_ eq 'intl', @subdirs));
+    }
+
+  require_file ($ac_gettext_location, GNU, 'ABOUT-NLS');
+}
+
+# Emit makefile footer.
+sub handle_footer ()
+{
+    reject_rule ('.SUFFIXES',
+		 "use variable 'SUFFIXES', not target '.SUFFIXES'");
+
+    # Note: AIX 4.1 /bin/make will fail if any suffix rule appears
+    # before .SUFFIXES.  So we make sure that .SUFFIXES appears before
+    # anything else, by sticking it right after the default: target.
+    $output_header .= ".SUFFIXES:\n";
+    my $suffixes = var 'SUFFIXES';
+    my @suffixes = Automake::Rule::suffixes;
+    if (@suffixes || $suffixes)
+    {
+	# Make sure SUFFIXES has unique elements.  Sort them to ensure
+	# the output remains consistent.  However, $(SUFFIXES) is
+	# always at the start of the list, unsorted.  This is done
+	# because make will choose rules depending on the ordering of
+	# suffixes, and this lets the user have some control.  Push
+	# actual suffixes, and not $(SUFFIXES).  Some versions of make
+	# do not like variable substitutions on the .SUFFIXES line.
+	my @user_suffixes = ($suffixes
+			     ? $suffixes->value_as_list_recursive : ());
+
+	my %suffixes = map { $_ => 1 } @suffixes;
+	delete @suffixes{@user_suffixes};
+
+	$output_header .= (".SUFFIXES: "
+			   . join (' ', @user_suffixes, sort keys %suffixes)
+			   . "\n");
+    }
+
+    $output_trailer .= file_contents ('footer', new Automake::Location);
+}
+
+
+# Generate 'make install' rules.
+sub handle_install ()
+{
+  $output_rules .= file_contents
+    ('install',
+     new Automake::Location,
+     maybe_BUILT_SOURCES => (set_seen ('BUILT_SOURCES')
+			     ? (" \$(BUILT_SOURCES)\n"
+				. "\t\$(MAKE) \$(AM_MAKEFLAGS)")
+			     : ''),
+     'installdirs-local' => (user_phony_rule ('installdirs-local')
+			     ? ' installdirs-local' : ''),
+     am__installdirs => variable_value ('am__installdirs') || '');
+}
+
+
+# handle_all ($MAKEFILE)
+#-----------------------
+# Deal with 'all' and 'all-am'.
+sub handle_all
+{
+    my ($makefile) = @_;
+
+    # Output 'all-am'.
+
+    # Put this at the beginning for the sake of non-GNU makes.  This
+    # is still wrong if these makes can run parallel jobs.  But it is
+    # right enough.
+    unshift (@all, basename ($makefile));
+
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push (@all, basename ($out))
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    # Install 'all' hooks.
+    push (@all, "all-local")
+      if user_phony_rule "all-local";
+
+    pretty_print_rule ("all-am:", "\t\t", @all);
+    depend ('.PHONY', 'all-am', 'all');
+
+
+    # Output 'all'.
+
+    my @local_headers = ();
+    push @local_headers, '$(BUILT_SOURCES)'
+      if var ('BUILT_SOURCES');
+    foreach my $spec (@config_headers)
+      {
+	my ($out, @ins) = split_config_file_spec ($spec);
+	push @local_headers, basename ($out)
+	  if dirname ($out) eq $relative_dir;
+      }
+
+    if (@local_headers)
+      {
+	# We need to make sure config.h is built before we recurse.
+	# We also want to make sure that built sources are built
+	# before any ordinary 'all' targets are run.  We can't do this
+	# by changing the order of dependencies to the "all" because
+	# that breaks when using parallel makes.  Instead we handle
+	# things explicitly.
+	$output_all .= ("all: @local_headers"
+			. "\n\t"
+			. '$(MAKE) $(AM_MAKEFLAGS) '
+			. (var ('SUBDIRS') ? 'all-recursive' : 'all-am')
+			. "\n\n");
+        depend ('.MAKE', 'all');
+      }
+    else
+      {
+	$output_all .= "all: " . (var ('SUBDIRS')
+				  ? 'all-recursive' : 'all-am') . "\n\n";
+      }
+}
+
+# Generate helper targets for user-defined recursive targets, where needed.
+sub handle_user_recursion ()
+{
+  return unless @extra_recursive_targets;
+
+  define_pretty_variable ('am__extra_recursive_targets', TRUE, INTERNAL,
+                          map { "$_-recursive" } @extra_recursive_targets);
+  my $aux = var ('SUBDIRS') ? 'recursive' : 'am';
+  foreach my $target (@extra_recursive_targets)
+    {
+      # This allows the default target's rules to be overridden in
+      # Makefile.am.
+      user_phony_rule ($target);
+      depend ("$target", "$target-$aux");
+      depend ("$target-am", "$target-local");
+      # Every user-defined recursive target 'foo' *must* have a valid
+      # associated 'foo-local' rule; we define it as an empty rule by
+      # default, so that the user can transparently extend it in his
+      # own Makefile.am.
+      pretty_print_rule ("$target-local:", '', '');
+      # $target-recursive might as well be undefined, so do not add
+      # it here; it's taken care of in subdirs.am anyway.
+      depend (".PHONY", "$target-am", "$target-local");
+    }
+}
+
+
+# Handle check merge target specially.
+sub do_check_merge_target ()
+{
+  # Include user-defined local form of target.
+  push @check_tests, 'check-local'
+    if user_phony_rule 'check-local';
+
+  # The check target must depend on the local equivalent of
+  # 'all', to ensure all the primary targets are built.  Then it
+  # must build the local check rules.
+  $output_rules .= "check-am: all-am\n";
+  if (@check)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ", @check);
+      depend ('.MAKE', 'check-am');
+    }
+
+  if (@check_tests)
+    {
+      pretty_print_rule ("\t\$(MAKE) \$(AM_MAKEFLAGS)", "\t  ",
+			 @check_tests);
+      depend ('.MAKE', 'check-am');
+    }
+
+  depend '.PHONY', 'check', 'check-am';
+  # Handle recursion.  We have to honor BUILT_SOURCES like for 'all:'.
+  $output_rules .= ("check: "
+		    . (var ('BUILT_SOURCES')
+		       ? "\$(BUILT_SOURCES)\n\t\$(MAKE) \$(AM_MAKEFLAGS) "
+		       : '')
+		    . (var ('SUBDIRS') ? 'check-recursive' : 'check-am')
+		    . "\n");
+  depend ('.MAKE', 'check')
+    if var ('BUILT_SOURCES');
+}
+
+# Handle all 'clean' targets.
+sub handle_clean
+{
+  my ($makefile) = @_;
+
+  # Clean the files listed in user variables if they exist.
+  $clean_files{'$(MOSTLYCLEANFILES)'} = MOSTLY_CLEAN
+    if var ('MOSTLYCLEANFILES');
+  $clean_files{'$(CLEANFILES)'} = CLEAN
+    if var ('CLEANFILES');
+  $clean_files{'$(DISTCLEANFILES)'} = DIST_CLEAN
+    if var ('DISTCLEANFILES');
+  $clean_files{'$(MAINTAINERCLEANFILES)'} = MAINTAINER_CLEAN
+    if var ('MAINTAINERCLEANFILES');
+
+  # Built sources are automatically removed by maintainer-clean.
+  $clean_files{'$(BUILT_SOURCES)'} = MAINTAINER_CLEAN
+    if var ('BUILT_SOURCES');
+
+  # Compute a list of "rm"s to run for each target.
+  my %rms = (MOSTLY_CLEAN, [],
+	     CLEAN, [],
+	     DIST_CLEAN, [],
+	     MAINTAINER_CLEAN, []);
+
+  foreach my $file (keys %clean_files)
+    {
+      my $when = $clean_files{$file};
+      prog_error 'invalid entry in %clean_files'
+	unless exists $rms{$when};
+
+      my $rm = "rm -f $file";
+      # If file is a variable, make sure when don't call 'rm -f' without args.
+      $rm ="test -z \"$file\" || $rm"
+	if ($file =~ /^\s*\$(\(.*\)|\{.*\})\s*$/);
+
+      push @{$rms{$when}}, "\t-$rm\n";
+    }
+
+  $output_rules .= file_contents
+    ('clean',
+     new Automake::Location,
+     MOSTLYCLEAN_RMS      => join ('', sort @{$rms{&MOSTLY_CLEAN}}),
+     CLEAN_RMS            => join ('', sort @{$rms{&CLEAN}}),
+     DISTCLEAN_RMS        => join ('', sort @{$rms{&DIST_CLEAN}}),
+     MAINTAINER_CLEAN_RMS => join ('', sort @{$rms{&MAINTAINER_CLEAN}}),
+     MAKEFILE             => basename $makefile,
+     );
+}
+
+
+# Subroutine for handle_factored_dependencies() to let '.PHONY' and
+# other '.TARGETS' be last.  This is meant to be used as a comparison
+# subroutine passed to the sort built-int.
+sub target_cmp
+{
+  return 0 if $a eq $b;
+
+  my $a1 = substr ($a, 0, 1);
+  my $b1 = substr ($b, 0, 1);
+  if ($a1 ne $b1)
+    {
+      return -1 if $b1 eq '.';
+      return 1 if $a1 eq '.';
+    }
+  return $a cmp $b;
+}
+
+
+# Handle everything related to gathered targets.
+sub handle_factored_dependencies ()
+{
+  # Reject bad hooks.
+  foreach my $utarg ('uninstall-data-local', 'uninstall-data-hook',
+		     'uninstall-exec-local', 'uninstall-exec-hook',
+		     'uninstall-dvi-local',
+		     'uninstall-html-local',
+		     'uninstall-info-local',
+		     'uninstall-pdf-local',
+		     'uninstall-ps-local')
+    {
+      my $x = $utarg;
+      $x =~ s/-.*-/-/;
+      reject_rule ($utarg, "use '$x', not '$utarg'");
+    }
+
+  reject_rule ('install-local',
+	       "use 'install-data-local' or 'install-exec-local', "
+	       . "not 'install-local'");
+
+  reject_rule ('install-hook',
+	       "use 'install-data-hook' or 'install-exec-hook', "
+	       . "not 'install-hook'");
+
+  # Install the -local hooks.
+  foreach (keys %dependencies)
+    {
+      # Hooks are installed on the -am targets.
+      s/-am$// or next;
+      depend ("$_-am", "$_-local")
+	if user_phony_rule "$_-local";
+    }
+
+  # Install the -hook hooks.
+  # FIXME: Why not be as liberal as we are with -local hooks?
+  foreach ('install-exec', 'install-data', 'uninstall')
+    {
+      if (user_phony_rule "$_-hook")
+	{
+	  depend ('.MAKE', "$_-am");
+	  register_action("$_-am",
+			  ("\t\@\$(NORMAL_INSTALL)\n"
+			   . "\t\$(MAKE) \$(AM_MAKEFLAGS) $_-hook"));
+	}
+    }
+
+  # All the required targets are phony.
+  depend ('.PHONY', keys %required_targets);
+
+  # Actually output gathered targets.
+  foreach (sort target_cmp keys %dependencies)
+    {
+      # If there is nothing about this guy, skip it.
+      next
+	unless (@{$dependencies{$_}}
+		|| $actions{$_}
+		|| $required_targets{$_});
+
+      # Define gathered targets in undefined conditions.
+      # FIXME: Right now we must handle .PHONY as an exception,
+      # because people write things like
+      #    .PHONY: myphonytarget
+      # to append dependencies.  This would not work if Automake
+      # refrained from defining its own .PHONY target as it does
+      # with other overridden targets.
+      # Likewise for '.MAKE'.
+      my @undefined_conds = (TRUE,);
+      if ($_ ne '.PHONY' && $_ ne '.MAKE')
+	{
+	  @undefined_conds =
+	    Automake::Rule::define ($_, 'internal',
+				    RULE_AUTOMAKE, TRUE, INTERNAL);
+	}
+      my @uniq_deps = uniq (sort @{$dependencies{$_}});
+      foreach my $cond (@undefined_conds)
+	{
+	  my $condstr = $cond->subst_string;
+	  pretty_print_rule ("$condstr$_:", "$condstr\t", @uniq_deps);
+	  $output_rules .= $actions{$_} if defined $actions{$_};
+	  $output_rules .= "\n";
+	}
+    }
+}
+
+
+sub handle_tests_dejagnu ()
+{
+    push (@check_tests, 'check-DEJAGNU');
+    $output_rules .= file_contents ('dejagnu', new Automake::Location);
+}
+
+# handle_per_suffix_test ($TEST_SUFFIX, [%TRANSFORM])
+#----------------------------------------------------
+sub handle_per_suffix_test
+{
+  my ($test_suffix, %transform) = @_;
+  my ($pfx, $generic, $am_exeext);
+  if ($test_suffix eq '')
+    {
+      $pfx = '';
+      $generic = 0;
+      $am_exeext = 'FALSE';
+    }
+  else
+    {
+      prog_error ("test suffix '$test_suffix' lacks leading dot")
+        unless $test_suffix =~ m/^\.(.*)/;
+      $pfx = uc ($1) . '_';
+      $generic = 1;
+      $am_exeext = exists $configure_vars{'EXEEXT'} ? 'am__EXEEXT'
+                                                    : 'FALSE';
+    }
+  # The "test driver" program, deputed to handle tests protocol used by
+  # test scripts.  By default, it's assumed that no protocol is used, so
+  # we fall back to the old behaviour, implemented by the 'test-driver'
+  # auxiliary script.
+  if (! var "${pfx}LOG_DRIVER")
+    {
+      require_conf_file ("parallel-tests", FOREIGN, 'test-driver');
+      define_variable ("${pfx}LOG_DRIVER",
+                       "\$(SHELL) $am_config_aux_dir/test-driver",
+                       INTERNAL);
+    }
+  my $driver = '$(' . $pfx . 'LOG_DRIVER)';
+  my $driver_flags = '$(AM_' . $pfx . 'LOG_DRIVER_FLAGS)'
+                       . ' $(' . $pfx . 'LOG_DRIVER_FLAGS)';
+  my $compile = "${pfx}LOG_COMPILE";
+  define_variable ($compile,
+                   '$(' . $pfx . 'LOG_COMPILER)'
+                      . ' $(AM_' .  $pfx . 'LOG_FLAGS)'
+                      . ' $(' . $pfx . 'LOG_FLAGS)',
+                     INTERNAL);
+  $output_rules .= file_contents ('check2', new Automake::Location,
+                                   GENERIC => $generic,
+                                   DRIVER => $driver,
+                                   DRIVER_FLAGS => $driver_flags,
+                                   COMPILE => '$(' . $compile . ')',
+                                   EXT => $test_suffix,
+                                   am__EXEEXT => $am_exeext,
+                                   %transform);
+}
+
+# is_valid_test_extension ($EXT)
+# ------------------------------
+# Return true if $EXT can appear in $(TEST_EXTENSIONS), return false
+# otherwise.
+sub is_valid_test_extension
+{
+  my $ext = shift;
+  return 1
+    if ($ext =~ /^\.[a-zA-Z_][a-zA-Z0-9_]*$/);
+  return 1
+    if (exists $configure_vars{'EXEEXT'} && $ext eq subst ('EXEEXT'));
+  return 0;
+}
+
+
+sub handle_tests ()
+{
+  if (option 'dejagnu')
+    {
+      handle_tests_dejagnu;
+    }
+  else
+    {
+      foreach my $c ('DEJATOOL', 'RUNTEST', 'RUNTESTFLAGS')
+	{
+	  reject_var ($c, "'$c' defined but 'dejagnu' not in "
+		      . "'AUTOMAKE_OPTIONS'");
+	}
+    }
+
+  if (var ('TESTS'))
+    {
+      push (@check_tests, 'check-TESTS');
+      my $check_deps = "@check";
+      $output_rules .= file_contents ('check', new Automake::Location,
+                                      SERIAL_TESTS => !! option 'serial-tests',
+                                      CHECK_DEPS => $check_deps);
+
+      # Tests that are known programs should have $(EXEEXT) appended.
+      # For matching purposes, we need to adjust XFAIL_TESTS as well.
+      append_exeext { exists $known_programs{$_[0]} } 'TESTS';
+      append_exeext { exists $known_programs{$_[0]} } 'XFAIL_TESTS'
+	if (var ('XFAIL_TESTS'));
+
+      if (! option 'serial-tests')
+        {
+	  define_variable ('TEST_SUITE_LOG', 'test-suite.log', INTERNAL);
+	  my $suff = '.test';
+	  my $at_exeext = '';
+	  my $handle_exeext = exists $configure_vars{'EXEEXT'};
+	  if ($handle_exeext)
+	    {
+	      $at_exeext = subst ('EXEEXT');
+	      $suff = $at_exeext  . ' ' . $suff;
+	    }
+          if (! var 'TEST_EXTENSIONS')
+            {
+	      define_variable ('TEST_EXTENSIONS', $suff, INTERNAL);
+            }
+          my $var = var 'TEST_EXTENSIONS';
+          # Currently, we are not able to deal with conditional contents
+          # in TEST_EXTENSIONS.
+          if ($var->has_conditional_contents)
+           {
+	     msg_var 'unsupported', $var,
+                     "'TEST_EXTENSIONS' cannot have conditional contents";
+           }
+	  my @test_suffixes = $var->value_as_list_recursive;
+          if ((my @invalid_test_suffixes =
+                  grep { !is_valid_test_extension $_ } @test_suffixes) > 0)
+            {
+              error $var->rdef (TRUE)->location,
+                    "invalid test extensions: @invalid_test_suffixes";
+            }
+          @test_suffixes = grep { is_valid_test_extension $_ } @test_suffixes;
+	  if ($handle_exeext)
+	    {
+	      unshift (@test_suffixes, $at_exeext)
+	        unless $test_suffixes[0] eq $at_exeext;
+	    }
+	  unshift (@test_suffixes, '');
+
+	  transform_variable_recursively
+	    ('TESTS', 'TEST_LOGS', 'am__testlogs', 1, INTERNAL,
+	      sub {
+	        my ($subvar, $val, $cond, $full_cond) = @_;
+		my $obj = $val;
+		return $obj
+		  if $val =~ /^\@.*\@$/;
+		$obj =~ s/\$\(EXEEXT\)$//o;
+
+		if ($val =~ /(\$\((top_)?srcdir\))\//o)
+		  {
+		    msg ('error', $subvar->rdef ($cond)->location,
+			 "using '$1' in TESTS is currently broken: '$val'");
+		  }
+
+		foreach my $test_suffix (@test_suffixes)
+		  {
+		    next
+		      if $test_suffix eq $at_exeext || $test_suffix eq '';
+		    return substr ($obj, 0, length ($obj) - length ($test_suffix)) . '.log'
+		      if substr ($obj, - length ($test_suffix)) eq $test_suffix;
+		  }
+		my $base = $obj;
+		$obj .= '.log';
+                handle_per_suffix_test ('',
+                                        OBJ => $obj,
+                                        BASE => $base,
+                                        SOURCE => $val);
+	        return $obj;
+	      });
+
+	  my $nhelper=1;
+	  my $prev = 'TESTS';
+	  my $post = '';
+	  my $last_suffix = $test_suffixes[$#test_suffixes];
+	  my $cur = '';
+	  foreach my $test_suffix (@test_suffixes)
+	    {
+	      if ($test_suffix eq $last_suffix)
+	        {
+		  $cur = 'TEST_LOGS';
+		}
+	      else
+	        {
+		  $cur = 'am__test_logs' . $nhelper;
+		}
+	      define_variable ($cur,
+		'$(' . $prev . ':' . $test_suffix . $post . '=.log)', INTERNAL);
+	      $post = '.log';
+	      $prev = $cur;
+	      $nhelper++;
+	      if ($test_suffix ne $at_exeext && $test_suffix ne '')
+	        {
+                  handle_per_suffix_test ($test_suffix,
+                                          OBJ => '',
+                                          BASE => '$*',
+                                          SOURCE => '$<');
+	        }
+	    }
+	  $clean_files{'$(TEST_LOGS)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_LOGS:.log=.trs)'} = MOSTLY_CLEAN;
+	  $clean_files{'$(TEST_SUITE_LOG)'} = MOSTLY_CLEAN;
+	}
+    }
+}
+
+sub handle_emacs_lisp ()
+{
+  my @elfiles = am_install_var ('-candist', 'lisp', 'LISP',
+                                'lisp', 'noinst');
+
+  return if ! @elfiles;
+
+  define_pretty_variable ('am__ELFILES', TRUE, INTERNAL,
+			  map { $_->[1] } @elfiles);
+  define_pretty_variable ('am__ELCFILES', TRUE, INTERNAL,
+			  '$(am__ELFILES:.el=.elc)');
+  # This one can be overridden by users.
+  define_pretty_variable ('ELCFILES', TRUE, INTERNAL, '$(LISP:.el=.elc)');
+
+  push @all, '$(ELCFILES)';
+
+  require_variables ($elfiles[0][0], "Emacs Lisp sources seen", TRUE,
+		     'EMACS', 'lispdir');
+}
+
+sub handle_python ()
+{
+  my @pyfiles = am_install_var ('-defaultdist', 'python', 'PYTHON',
+                                'noinst');
+  return if ! @pyfiles;
+
+  require_variables ($pyfiles[0][0], "Python sources seen", TRUE, 'PYTHON');
+  require_conf_file ($pyfiles[0][0], FOREIGN, 'py-compile');
+  define_variable ('py_compile', "$am_config_aux_dir/py-compile", INTERNAL);
+}
+
+sub handle_java ()
+{
+    my @sourcelist = am_install_var ('-candist',
+                                     'java', 'JAVA',
+                                     'noinst', 'check');
+    return if ! @sourcelist;
+
+    my @prefixes = am_primary_prefixes ('JAVA', 1,
+				        'noinst', 'check');
+
+    my $dir;
+    my @java_sources = ();
+    foreach my $prefix (@prefixes)
+      {
+        (my $curs = $prefix) =~ s/^(?:nobase_)?(?:dist_|nodist_)?//;
+
+	next
+	  if $curs eq 'EXTRA';
+
+        push @java_sources, '$(' . $prefix . '_JAVA' . ')';
+
+	if (defined $dir)
+	  {
+	    err_var "${curs}_JAVA", "multiple _JAVA primaries in use"
+	     unless $curs eq $dir;
+	  }
+
+	$dir = $curs;
+      }
+
+    define_pretty_variable ('am__java_sources', TRUE, INTERNAL,
+                            "@java_sources");
+
+    if ($dir eq 'check')
+      {
+        push (@check, "class$dir.stamp");
+      }
+    else
+      {
+        push (@all, "class$dir.stamp");
+      }
+}
+
+
+sub handle_minor_options ()
+{
+  if (option 'readme-alpha')
+    {
+      if ($relative_dir eq '.')
+	{
+	  if ($package_version !~ /^$GNITS_VERSION_PATTERN$/)
+	    {
+	      msg ('error-gnits', $package_version_location,
+		   "version '$package_version' doesn't follow " .
+		   "Gnits standards");
+	    }
+	  if (defined $1 && -f 'README-alpha')
+	    {
+	      # This means we have an alpha release.  See
+	      # GNITS_VERSION_PATTERN for details.
+	      push_dist_common ('README-alpha');
+	    }
+	}
+    }
+}
+
+################################################################
+
+# ($OUTPUT, @INPUTS)
+# split_config_file_spec ($SPEC)
+# ------------------------------
+# Decode the Autoconf syntax for config files (files, headers, links
+# etc.).
+sub split_config_file_spec
+{
+  my ($spec) = @_;
+  my ($output, @inputs) = split (/:/, $spec);
+
+  push @inputs, "$output.in"
+    unless @inputs;
+
+  return ($output, @inputs);
+}
+
+# $input
+# locate_am (@POSSIBLE_SOURCES)
+# -----------------------------
+# AC_CONFIG_FILES allow specifications such as Makefile:top.in:mid.in:bot.in
+# This functions returns the first *.in file for which a *.am exists.
+# It returns undef otherwise.
+sub locate_am
+{
+  my (@rest) = @_;
+  my $input;
+  foreach my $file (@rest)
+    {
+      if (($file =~ /^(.*)\.in$/) && -f "$1.am")
+	{
+	  $input = $file;
+	  last;
+	}
+    }
+  return $input;
+}
+
+my %make_list;
+
+# scan_autoconf_config_files ($WHERE, $CONFIG-FILES)
+# --------------------------------------------------
+# Study $CONFIG-FILES which is the first argument to AC_CONFIG_FILES
+# (or AC_OUTPUT).
+sub scan_autoconf_config_files
+{
+  my ($where, $config_files) = @_;
+
+  # Look at potential Makefile.am's.
+  foreach (split ' ', $config_files)
+    {
+      # Must skip empty string for Perl 4.
+      next if $_ eq "\\" || $_ eq '';
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/);
+      @rest = ("$local.in",) unless @rest;
+      # Keep in sync with test 'conffile-leading-dot.sh'.
+      msg ('unsupported', $where,
+           "omit leading './' from config file names such as '$local';"
+           . "\nremake rules might be subtly broken otherwise")
+        if ($local =~ /^\.\//);
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  # We have a file that automake should generate.
+	  $make_list{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  # We have a file that automake should cause to be
+	  # rebuilt, but shouldn't generate itself.
+	  push (@other_input_files, $_);
+	}
+      $ac_config_files_location{$local} = $where;
+      $ac_config_files_condition{$local} =
+        new Automake::Condition (@cond_stack)
+          if (@cond_stack);
+    }
+}
+
+
+sub scan_autoconf_traces
+{
+  my ($filename) = @_;
+
+  # Macros to trace, with their minimal number of arguments.
+  #
+  # IMPORTANT: If you add a macro here, you should also add this macro
+  # =========  to Automake-preselection in autoconf/lib/autom4te.in.
+  my %traced = (
+		AC_CANONICAL_BUILD => 0,
+		AC_CANONICAL_HOST => 0,
+		AC_CANONICAL_TARGET => 0,
+		AC_CONFIG_AUX_DIR => 1,
+		AC_CONFIG_FILES => 1,
+		AC_CONFIG_HEADERS => 1,
+		AC_CONFIG_LIBOBJ_DIR => 1,
+		AC_CONFIG_LINKS => 1,
+		AC_FC_SRCEXT => 1,
+		AC_INIT => 0,
+		AC_LIBSOURCE => 1,
+		AC_REQUIRE_AUX_FILE => 1,
+		AC_SUBST_TRACE => 1,
+		AM_AUTOMAKE_VERSION => 1,
+                AM_PROG_MKDIR_P => 0,
+		AM_CONDITIONAL => 2,
+		AM_EXTRA_RECURSIVE_TARGETS => 1,
+		AM_GNU_GETTEXT => 0,
+		AM_GNU_GETTEXT_INTL_SUBDIR => 0,
+		AM_INIT_AUTOMAKE => 0,
+		AM_MAINTAINER_MODE => 0,
+		AM_PROG_AR => 0,
+		_AM_SUBST_NOTMAKE => 1,
+		_AM_COND_IF => 1,
+		_AM_COND_ELSE => 1,
+		_AM_COND_ENDIF => 1,
+		LT_SUPPORTED_TAG => 1,
+		_LT_AC_TAGCONFIG => 0,
+		m4_include => 1,
+		m4_sinclude => 1,
+		sinclude => 1,
+	      );
+
+  my $traces = ($ENV{AUTOCONF} || 'autoconf') . " ";
+
+  # Use a separator unlikely to be used, not ':', the default, which
+  # has a precise meaning for AC_CONFIG_FILES and so on.
+  $traces .= join (' ',
+		   map { "--trace=$_" . ':\$f:\$l::\$d::\$n::\${::}%' }
+		   (keys %traced));
+
+  my $tracefh = new Automake::XFile ("$traces $filename |");
+  verb "reading $traces";
+
+  @cond_stack = ();
+  my $where;
+
+  while ($_ = $tracefh->getline)
+    {
+      chomp;
+      my ($here, $depth, @args) = split (/::/);
+      $where = new Automake::Location $here;
+      my $macro = $args[0];
+
+      prog_error ("unrequested trace '$macro'")
+	unless exists $traced{$macro};
+
+      # Skip and diagnose malformed calls.
+      if ($#args < $traced{$macro})
+	{
+	  msg ('syntax', $where, "not enough arguments for $macro");
+	  next;
+	}
+
+      # Alphabetical ordering please.
+      if ($macro eq 'AC_CANONICAL_BUILD')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_BUILD)
+	    {
+	      $seen_canonical = AC_CANONICAL_BUILD;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_HOST')
+	{
+	  if ($seen_canonical <= AC_CANONICAL_HOST)
+	    {
+	      $seen_canonical = AC_CANONICAL_HOST;
+	    }
+	}
+      elsif ($macro eq 'AC_CANONICAL_TARGET')
+	{
+	  $seen_canonical = AC_CANONICAL_TARGET;
+	}
+      elsif ($macro eq 'AC_CONFIG_AUX_DIR')
+	{
+	  if ($seen_init_automake)
+	    {
+	      error ($where, "AC_CONFIG_AUX_DIR must be called before "
+		     . "AM_INIT_AUTOMAKE ...", partial => 1);
+	      error ($seen_init_automake, "... AM_INIT_AUTOMAKE called here");
+	    }
+	  $config_aux_dir = $args[1];
+	  $config_aux_dir_set_in_configure_ac = 1;
+	  check_directory ($config_aux_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_FILES')
+	{
+	  # Look at potential Makefile.am's.
+	  scan_autoconf_config_files ($where, $args[1]);
+	}
+      elsif ($macro eq 'AC_CONFIG_HEADERS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, @src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_headers, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_CONFIG_LIBOBJ_DIR')
+	{
+	  $config_libobj_dir = $args[1];
+	  check_directory ($config_libobj_dir, $where);
+	}
+      elsif ($macro eq 'AC_CONFIG_LINKS')
+	{
+	  foreach my $spec (split (' ', $args[1]))
+	    {
+	      my ($dest, $src) = split (':', $spec);
+	      $ac_config_files_location{$dest} = $where;
+	      push @config_links, $spec;
+	    }
+	}
+      elsif ($macro eq 'AC_FC_SRCEXT')
+	{
+	  my $suffix = $args[1];
+	  # These flags are used as %SOURCEFLAG% in depend2.am,
+	  # where the trailing space is important.
+	  $sourceflags{'.' . $suffix} = '$(FCFLAGS_' . $suffix . ') '
+	    if ($suffix eq 'f90' || $suffix eq 'f95' || $suffix eq 'f03' || $suffix eq 'f08');
+	}
+      elsif ($macro eq 'AC_INIT')
+	{
+	  if (defined $args[2])
+	    {
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	}
+      elsif ($macro eq 'AC_LIBSOURCE')
+	{
+	  $libsources{$args[1]} = $here;
+	}
+      elsif ($macro eq 'AC_REQUIRE_AUX_FILE')
+	{
+	  # Only remember the first time a file is required.
+	  $required_aux_file{$args[1]} = $where
+	    unless exists $required_aux_file{$args[1]};
+	}
+      elsif ($macro eq 'AC_SUBST_TRACE')
+	{
+	  # Just check for alphanumeric in AC_SUBST_TRACE.  If you do
+	  # AC_SUBST(5), then too bad.
+	  $configure_vars{$args[1]} = $where
+	    if $args[1] =~ /^\w+$/;
+	}
+      elsif ($macro eq 'AM_AUTOMAKE_VERSION')
+	{
+	  error ($where,
+		 "version mismatch.  This is Automake $VERSION,\n" .
+		 "but the definition used by this AM_INIT_AUTOMAKE\n" .
+		 "comes from Automake $args[1].  You should recreate\n" .
+		 "aclocal.m4 with aclocal and run automake again.\n",
+		 # $? = 63 is used to indicate version mismatch to missing.
+		 exit_code => 63)
+	    if $VERSION ne $args[1];
+
+	  $seen_automake_version = 1;
+	}
+      elsif ($macro eq 'AM_PROG_MKDIR_P')
+	{
+	  msg 'obsolete', $where, <<'EOF';
+The 'AM_PROG_MKDIR_P' macro is deprecated, and its use is discouraged.
+You should use the Autoconf-provided 'AC_PROG_MKDIR_P' macro instead,
+and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.
+EOF
+	}
+      elsif ($macro eq 'AM_CONDITIONAL')
+	{
+	  $configure_cond{$args[1]} = $where;
+	}
+      elsif ($macro eq 'AM_EXTRA_RECURSIVE_TARGETS')
+	{
+          # Empty leading/trailing fields might be produced by split,
+          # hence the grep is really needed.
+          push @extra_recursive_targets,
+               grep (/./, (split /\s+/, $args[1]));
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT')
+	{
+	  $seen_gettext = $where;
+	  $ac_gettext_location = $where;
+	  $seen_gettext_external = grep ($_ eq 'external', @args);
+	}
+      elsif ($macro eq 'AM_GNU_GETTEXT_INTL_SUBDIR')
+	{
+	  $seen_gettext_intl = $where;
+	}
+      elsif ($macro eq 'AM_INIT_AUTOMAKE')
+	{
+	  $seen_init_automake = $where;
+	  if (defined $args[2])
+	    {
+              msg 'obsolete', $where, <<'EOF';
+AM_INIT_AUTOMAKE: two- and three-arguments forms are deprecated.  For more info, see:
+http://www.gnu.org/software/automake/manual/automake.html#Modernize-AM_005fINIT_005fAUTOMAKE-invocation
+EOF
+	      $package_version = $args[2];
+	      $package_version_location = $where;
+	    }
+	  elsif (defined $args[1])
+	    {
+	      my @opts = split (' ', $args[1]);
+	      @opts = map { { option => $_, where => $where } } @opts;
+	      exit $exit_code unless process_global_option_list (@opts);
+	    }
+	}
+      elsif ($macro eq 'AM_MAINTAINER_MODE')
+	{
+	  $seen_maint_mode = $where;
+	}
+      elsif ($macro eq 'AM_PROG_AR')
+	{
+	  $seen_ar = $where;
+	}
+      elsif ($macro eq '_AM_COND_IF')
+        {
+	  cond_stack_if ('', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ELSE')
+        {
+	  cond_stack_else ('!', $args[1], $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_COND_ENDIF')
+        {
+	  cond_stack_endif (undef, undef, $where);
+	  error ($where, "missing m4 quoting, macro depth $depth")
+	    if ($depth != 1);
+	}
+      elsif ($macro eq '_AM_SUBST_NOTMAKE')
+	{
+	  $ignored_configure_vars{$args[1]} = $where;
+	}
+      elsif ($macro eq 'm4_include'
+	     || $macro eq 'm4_sinclude'
+	     || $macro eq 'sinclude')
+	{
+	  # Skip missing 'sinclude'd files.
+	  next if $macro ne 'm4_include' && ! -f $args[1];
+
+	  # Some modified versions of Autoconf don't use
+	  # frozen files.  Consequently it's possible that we see all
+	  # m4_include's performed during Autoconf's startup.
+	  # Obviously we don't want to distribute Autoconf's files
+	  # so we skip absolute filenames here.
+	  push @configure_deps, '$(top_srcdir)/' . $args[1]
+	    unless $here =~ m,^(?:\w:)?[\\/],;
+	  # Keep track of the greatest timestamp.
+	  if (-e $args[1])
+	    {
+	      my $mtime = mtime $args[1];
+	      $configure_deps_greatest_timestamp = $mtime
+		if $mtime > $configure_deps_greatest_timestamp;
+	    }
+	}
+      elsif ($macro eq 'LT_SUPPORTED_TAG')
+	{
+	  $libtool_tags{$args[1]} = 1;
+	  $libtool_new_api = 1;
+	}
+      elsif ($macro eq '_LT_AC_TAGCONFIG')
+	{
+	  # _LT_AC_TAGCONFIG is an old macro present in Libtool 1.5.
+	  # We use it to detect whether tags are supported.  Our
+	  # preferred interface is LT_SUPPORTED_TAG, but it was
+	  # introduced in Libtool 1.6.
+	  if (0 == keys %libtool_tags)
+	    {
+	      # Hardcode the tags supported by Libtool 1.5.
+	      %libtool_tags = (CC => 1, CXX => 1, GCJ => 1, F77 => 1);
+	    }
+	}
+    }
+
+  error ($where, "condition stack not properly closed")
+    if (@cond_stack);
+
+  $tracefh->close;
+}
+
+
+# Check whether we use 'configure.ac' or 'configure.in'.
+# Scan it (and possibly 'aclocal.m4') for interesting things.
+# We must scan aclocal.m4 because there might be AC_SUBSTs and such there.
+sub scan_autoconf_files ()
+{
+  # Reinitialize libsources here.  This isn't really necessary,
+  # since we currently assume there is only one configure.ac.  But
+  # that won't always be the case.
+  %libsources = ();
+
+  # Keep track of the youngest configure dependency.
+  $configure_deps_greatest_timestamp = mtime $configure_ac;
+  if (-e 'aclocal.m4')
+    {
+      my $mtime = mtime 'aclocal.m4';
+      $configure_deps_greatest_timestamp = $mtime
+	if $mtime > $configure_deps_greatest_timestamp;
+    }
+
+  scan_autoconf_traces ($configure_ac);
+
+  @configure_input_files = sort keys %make_list;
+  # Set input and output files if not specified by user.
+  if (! @input_files)
+    {
+      @input_files = @configure_input_files;
+      %output_files = %make_list;
+    }
+
+
+  if (! $seen_init_automake)
+    {
+      err_ac ("no proper invocation of AM_INIT_AUTOMAKE was found.\nYou "
+	      . "should verify that $configure_ac invokes AM_INIT_AUTOMAKE,"
+	      . "\nthat aclocal.m4 is present in the top-level directory,\n"
+	      . "and that aclocal.m4 was recently regenerated "
+	      . "(using aclocal)");
+    }
+  else
+    {
+      if (! $seen_automake_version)
+	{
+	  if (-f 'aclocal.m4')
+	    {
+	      error ($seen_init_automake,
+		     "your implementation of AM_INIT_AUTOMAKE comes from " .
+		     "an\nold Automake version.  You should recreate " .
+		     "aclocal.m4\nwith aclocal and run automake again",
+		     # $? = 63 is used to indicate version mismatch to missing.
+		     exit_code => 63);
+	    }
+	  else
+	    {
+	      error ($seen_init_automake,
+		     "no proper implementation of AM_INIT_AUTOMAKE was " .
+		     "found,\nprobably because aclocal.m4 is missing.\n" .
+		     "You should run aclocal to create this file, then\n" .
+		     "run automake again");
+	    }
+	}
+    }
+
+  locate_aux_dir ();
+
+  # Look for some files we need.  Always check for these.  This
+  # check must be done for every run, even those where we are only
+  # looking at a subdir Makefile.  We must set relative_dir for
+  # push_required_file to work.
+  # Sort the files for stable verbose output.
+  $relative_dir = '.';
+  foreach my $file (sort keys %required_aux_file)
+    {
+      require_conf_file ($required_aux_file{$file}->get, FOREIGN, $file)
+    }
+  err_am "'install.sh' is an anachronism; use 'install-sh' instead"
+    if -f $config_aux_dir . '/install.sh';
+
+  # Preserve dist_common for later.
+  $configure_dist_common = variable_value ('DIST_COMMON') || '';
+
+}
+
+################################################################
+
+# Do any extra checking for GNU standards.
+sub check_gnu_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNU,
+		    qw/INSTALL NEWS README AUTHORS ChangeLog/);
+
+      # Accept one of these three licenses; default to COPYING.
+      # Make sure we do not overwrite an existing license.
+      my $license;
+      foreach (qw /COPYING COPYING.LIB COPYING.LESSER/)
+	{
+	  if (-f $_)
+	    {
+	      $license = $_;
+	      last;
+	    }
+	}
+      require_file ("$am_file.am", GNU, 'COPYING')
+	unless $license;
+    }
+
+  for my $opt ('no-installman', 'no-installinfo')
+    {
+      msg ('error-gnu', option $opt,
+	   "option '$opt' disallowed by GNU standards")
+	if option $opt;
+    }
+}
+
+# Do any extra checking for GNITS standards.
+sub check_gnits_standards ()
+{
+  if ($relative_dir eq '.')
+    {
+      # In top level (or only) directory.
+      require_file ("$am_file.am", GNITS, 'THANKS');
+    }
+}
+
+################################################################
+#
+# Functions to handle files of each language.
+
+# Each 'lang_X_rewrite($DIRECTORY, $BASE, $EXT)' function follows a
+# simple formula: Return value is LANG_SUBDIR if the resulting object
+# file should be in a subdir if the source file is, LANG_PROCESS if
+# file is to be dealt with, LANG_IGNORE otherwise.
+
+# Much of the actual processing is handled in
+# handle_single_transform.  These functions exist so that
+# auxiliary information can be recorded for a later cleanup pass.
+# Note that the calls to these functions are computed, so don't bother
+# searching for their precise names in the source.
+
+# This is just a convenience function that can be used to determine
+# when a subdir object should be used.
+sub lang_sub_obj ()
+{
+    return option 'subdir-objects' ? LANG_SUBDIR : LANG_PROCESS;
+}
+
+# Rewrite a single header file.
+sub lang_header_rewrite
+{
+    # Header files are simply ignored.
+    return LANG_IGNORE;
+}
+
+# Rewrite a single Vala source file.
+sub lang_vala_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    (my $newext = $ext) =~ s/vala$/c/;
+    return (LANG_SUBDIR, $newext);
+}
+
+# Rewrite a single yacc/yacc++ file.
+sub lang_yacc_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/y/c/;
+    return ($r, $newext);
+}
+sub lang_yaccxx_rewrite { lang_yacc_rewrite (@_); };
+
+# Rewrite a single lex/lex++ file.
+sub lang_lex_rewrite
+{
+    my ($directory, $base, $ext) = @_;
+
+    my $r = lang_sub_obj;
+    (my $newext = $ext) =~ tr/l/c/;
+    return ($r, $newext);
+}
+sub lang_lexxx_rewrite { lang_lex_rewrite (@_); };
+
+# Rewrite a single Java file.
+sub lang_java_rewrite
+{
+    return LANG_SUBDIR;
+}
+
+# The lang_X_finish functions are called after all source file
+# processing is done.  Each should handle defining rules for the
+# language, etc.  A finish function is only called if a source file of
+# the appropriate type has been seen.
+
+sub lang_vala_finish_target
+{
+  my ($self, $name) = @_;
+
+  my $derived = canonicalize ($name);
+  my $var = var "${derived}_SOURCES";
+  return unless $var;
+
+  my @vala_sources = grep { /\.(vala|vapi)$/ } ($var->value_as_list_recursive);
+
+  # For automake bug#11229.
+  return unless @vala_sources;
+
+  foreach my $vala_file (@vala_sources)
+    {
+      my $c_file = $vala_file;
+      if ($c_file =~ s/(.*)\.vala$/$1.c/)
+        {
+          $c_file = "\$(srcdir)/$c_file";
+          $output_rules .= "$c_file: \$(srcdir)/${derived}_vala.stamp\n"
+            . "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+            . "\t\@if test -f \$@; then :; else \\\n"
+            . "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+            . "\tfi\n";
+	  $clean_files{$c_file} = MAINTAINER_CLEAN;
+        }
+    }
+
+  # Add rebuild rules for generated header and vapi files
+  my $flags = var ($derived . '_VALAFLAGS');
+  if ($flags)
+    {
+      my $lastflag = '';
+      foreach my $flag ($flags->value_as_list_recursive)
+	{
+	  if (grep (/$lastflag/, ('-H', '-h', '--header', '--internal-header',
+	                          '--vapi', '--internal-vapi', '--gir')))
+	    {
+	      my $headerfile = "\$(srcdir)/$flag";
+	      $output_rules .= "$headerfile: \$(srcdir)/${derived}_vala.stamp\n"
+		. "\t\@if test -f \$@; then :; else rm -f \$(srcdir)/${derived}_vala.stamp; fi\n"
+		. "\t\@if test -f \$@; then :; else \\\n"
+		. "\t  \$(MAKE) \$(AM_MAKEFLAGS) \$(srcdir)/${derived}_vala.stamp; \\\n"
+		. "\tfi\n";
+
+	      # valac is not used when building from dist tarballs
+	      # distribute the generated files
+	      push_dist_common ($headerfile);
+	      $clean_files{$headerfile} = MAINTAINER_CLEAN;
+	    }
+	  $lastflag = $flag;
+	}
+    }
+
+  my $compile = $self->compile;
+
+  # Rewrite each occurrence of 'AM_VALAFLAGS' in the compile
+  # rule into '${derived}_VALAFLAGS' if it exists.
+  my $val = "${derived}_VALAFLAGS";
+  $compile =~ s/\(AM_VALAFLAGS\)/\($val\)/
+    if set_seen ($val);
+
+  # VALAFLAGS is a user variable (per GNU Standards),
+  # it should not be overridden in the Makefile...
+  check_user_variables 'VALAFLAGS';
+
+  my $dirname = dirname ($name);
+
+  # Only generate C code, do not run C compiler
+  $compile .= " -C";
+
+  my $verbose = verbose_flag ('VALAC');
+  my $silent = silent_flag ();
+  my $stampfile = "\$(srcdir)/${derived}_vala.stamp";
+
+  $output_rules .=
+    "\$(srcdir)/${derived}_vala.stamp: @vala_sources\n".
+# Since the C files generated from the vala sources depend on the
+# ${derived}_vala.stamp file, we must ensure its timestamp is older than
+# those of the C files generated by the valac invocation below (this is
+# especially important on systems with sub-second timestamp resolution).
+# Thus we need to create the stamp file *before* invoking valac, and to
+# move it to its final location only after valac has been invoked.
+    "\t${silent}rm -f \$\@ && echo stamp > \$\@-t\n".
+    "\t${verbose}\$(am__cd) \$(srcdir) && $compile @vala_sources\n".
+    "\t${silent}mv -f \$\@-t \$\@\n";
+
+  push_dist_common ($stampfile);
+
+  $clean_files{$stampfile} = MAINTAINER_CLEAN;
+}
+
+# Add output rules to invoke valac and create stamp file as a witness
+# to handle multiple outputs. This function is called after all source
+# file processing is done.
+sub lang_vala_finish ()
+{
+  my ($self) = @_;
+
+  foreach my $prog (keys %known_programs)
+    {
+      lang_vala_finish_target ($self, $prog);
+    }
+
+  while (my ($name) = each %known_libraries)
+    {
+      lang_vala_finish_target ($self, $name);
+    }
+}
+
+# The built .c files should be cleaned only on maintainer-clean
+# as the .c files are distributed. This function is called for each
+# .vala source file.
+sub lang_vala_target_hook
+{
+  my ($self, $aggregate, $output, $input, %transform) = @_;
+
+  $clean_files{$output} = MAINTAINER_CLEAN;
+}
+
+# This is a yacc helper which is called whenever we have decided to
+# compile a yacc file.
+sub lang_yacc_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+
+    # If some relevant *YFLAGS variable contains the '-d' flag, we'll
+    # have to to generate special code.
+    my $yflags_contains_minus_d = 0;
+
+    foreach my $pfx ("", "${aggregate}_")
+      {
+	my $yflagsvar = var ("${pfx}YFLAGS");
+	next unless $yflagsvar;
+	# We cannot work reliably with conditionally-defined YFLAGS.
+	if ($yflagsvar->has_conditional_contents)
+	  {
+	    msg_var ('unsupported', $yflagsvar,
+	             "'${pfx}YFLAGS' cannot have conditional contents");
+	  }
+	else
+	  {
+	    $yflags_contains_minus_d = 1
+	      if grep (/^-d$/, $yflagsvar->value_as_list_recursive);
+	  }
+      }
+
+    if ($yflags_contains_minus_d)
+      {
+	# Found a '-d' that applies to the compilation of this file.
+	# Add a dependency for the generated header file, and arrange
+	# for that file to be included in the distribution.
+
+	# The extension of the output file (e.g., '.c' or '.cxx').
+	# We'll need it to compute the name of the generated header file.
+	(my $output_ext = basename ($output)) =~ s/.*(\.[^.]+)$/$1/;
+
+	# We know that a yacc input should be turned into either a C or
+	# C++ output file.  We depend on this fact (here and in yacc.am),
+	# so check that it really holds.
+	my $lang = $languages{$extension_map{$output_ext}};
+	prog_error "invalid output name '$output' for yacc file '$input'"
+	  if (!$lang || ($lang->name ne 'c' && $lang->name ne 'cxx'));
+
+	(my $header_ext = $output_ext) =~ s/c/h/g;
+        # Quote $output_ext in the regexp, so that dots in it are taken
+        # as literal dots, not as metacharacters.
+	(my $header = $output) =~ s/\Q$output_ext\E$/$header_ext/;
+
+	foreach my $cond (Automake::Rule::define (${header}, 'internal',
+						  RULE_AUTOMAKE, TRUE,
+						  INTERNAL))
+	  {
+	    my $condstr = $cond->subst_string;
+	    $output_rules .=
+	      "$condstr${header}: $output\n"
+	      # Recover from removal of $header
+	      . "$condstr\t\@if test ! -f \$@; then rm -f $output; else :; fi\n"
+	      . "$condstr\t\@if test ! -f \$@; then \$(MAKE) \$(AM_MAKEFLAGS) $output; else :; fi\n";
+	  }
+	# Distribute the generated file, unless its .y source was
+	# listed in a nodist_ variable.  (handle_source_transform()
+	# will set DIST_SOURCE.)
+	push_dist_common ($header)
+	  if $transform{'DIST_SOURCE'};
+
+	# The GNU rules say that yacc/lex output files should be removed
+	# by maintainer-clean.  However, if the files are not distributed,
+	# then we want to remove them with "make clean"; otherwise,
+	# "make distcheck" will fail.
+	$clean_files{$header} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+      }
+    # See the comment above for $HEADER.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a lex helper which is called whenever we have decided to
+# compile a lex file.
+sub lang_lex_target_hook
+{
+    my ($self, $aggregate, $output, $input, %transform) = @_;
+    # The GNU rules say that yacc/lex output files should be removed
+    # by maintainer-clean.  However, if the files are not distributed,
+    # then we want to remove them with "make clean"; otherwise,
+    # "make distcheck" will fail.
+    $clean_files{$output} = $transform{'DIST_SOURCE'} ? MAINTAINER_CLEAN : CLEAN;
+}
+
+# This is a helper for both lex and yacc.
+sub yacc_lex_finish_helper ()
+{
+  return if defined $language_scratch{'lex-yacc-done'};
+  $language_scratch{'lex-yacc-done'} = 1;
+
+  # FIXME: for now, no line number.
+  require_conf_file ($configure_ac, FOREIGN, 'ylwrap');
+  define_variable ('YLWRAP', "$am_config_aux_dir/ylwrap", INTERNAL);
+}
+
+sub lang_yacc_finish ()
+{
+  return if defined $language_scratch{'yacc-done'};
+  $language_scratch{'yacc-done'} = 1;
+
+  reject_var 'YACCFLAGS', "'YACCFLAGS' obsolete; use 'YFLAGS' instead";
+
+  yacc_lex_finish_helper;
+}
+
+
+sub lang_lex_finish ()
+{
+  return if defined $language_scratch{'lex-done'};
+  $language_scratch{'lex-done'} = 1;
+
+  yacc_lex_finish_helper;
+}
+
+
+# Given a hash table of linker names, pick the name that has the most
+# precedence.  This is lame, but something has to have global
+# knowledge in order to eliminate the conflict.  Add more linkers as
+# required.
+sub resolve_linker
+{
+    my (%linkers) = @_;
+
+    foreach my $l (qw(GCJLINK OBJCXXLINK CXXLINK F77LINK FCLINK OBJCLINK UPCLINK))
+    {
+	return $l if defined $linkers{$l};
+    }
+    return 'LINK';
+}
+
+# Called to indicate that an extension was used.
+sub saw_extension
+{
+    my ($ext) = @_;
+    $extension_seen{$ext} = 1;
+}
+
+# register_language (%ATTRIBUTE)
+# ------------------------------
+# Register a single language.
+# Each %ATTRIBUTE is of the form ATTRIBUTE => VALUE.
+sub register_language
+{
+  my (%option) = @_;
+
+  # Set the defaults.
+  $option{'autodep'} = 'no'
+    unless defined $option{'autodep'};
+  $option{'linker'} = ''
+    unless defined $option{'linker'};
+  $option{'flags'} = []
+    unless defined $option{'flags'};
+  $option{'output_extensions'} = sub { return ( '.$(OBJEXT)', '.lo' ) }
+    unless defined $option{'output_extensions'};
+  $option{'nodist_specific'} = 0
+    unless defined $option{'nodist_specific'};
+
+  my $lang = new Automake::Language (%option);
+
+  # Fill indexes.
+  $extension_map{$_} = $lang->name foreach @{$lang->extensions};
+  $languages{$lang->name} = $lang;
+  my $link = $lang->linker;
+  if ($link)
+    {
+      if (exists $link_languages{$link})
+	{
+	  prog_error ("'$link' has different definitions in "
+		      . $lang->name . " and " . $link_languages{$link}->name)
+	    if $lang->link ne $link_languages{$link}->link;
+	}
+      else
+	{
+	  $link_languages{$link} = $lang;
+	}
+    }
+
+  # Update the pattern of known extensions.
+  accept_extensions (@{$lang->extensions});
+
+  # Update the suffix rules map.
+  foreach my $suffix (@{$lang->extensions})
+    {
+      foreach my $dest ($lang->output_extensions->($suffix))
+	{
+	  register_suffix_rule (INTERNAL, $suffix, $dest);
+	}
+    }
+}
+
+# derive_suffix ($EXT, $OBJ)
+# --------------------------
+# This function is used to find a path from a user-specified suffix $EXT
+# to $OBJ or to some other suffix we recognize internally, e.g. 'cc'.
+sub derive_suffix
+{
+  my ($source_ext, $obj) = @_;
+
+  while (!$extension_map{$source_ext} && $source_ext ne $obj)
+    {
+      my $new_source_ext = next_in_suffix_chain ($source_ext, $obj);
+      last if not defined $new_source_ext;
+      $source_ext = $new_source_ext;
+    }
+
+  return $source_ext;
+}
+
+
+# Pretty-print something and append to '$output_rules'.
+sub pretty_print_rule
+{
+    $output_rules .= makefile_wrap (shift, shift, @_);
+}
+
+
+################################################################
+
+
+## -------------------------------- ##
+## Handling the conditional stack.  ##
+## -------------------------------- ##
+
+
+# $STRING
+# make_conditional_string ($NEGATE, $COND)
+# ----------------------------------------
+sub make_conditional_string
+{
+  my ($negate, $cond) = @_;
+  $cond = "${cond}_TRUE"
+    unless $cond =~ /^TRUE|FALSE$/;
+  $cond = Automake::Condition::conditional_negate ($cond)
+    if $negate;
+  return $cond;
+}
+
+
+my %_am_macro_for_cond =
+  (
+  AMDEP => "one of the compiler tests\n"
+	   . "    AC_PROG_CC, AC_PROG_CXX, AC_PROG_OBJC, AC_PROG_OBJCXX,\n"
+	   . "    AM_PROG_AS, AM_PROG_GCJ, AM_PROG_UPC",
+  am__fastdepCC => 'AC_PROG_CC',
+  am__fastdepCCAS => 'AM_PROG_AS',
+  am__fastdepCXX => 'AC_PROG_CXX',
+  am__fastdepGCJ => 'AM_PROG_GCJ',
+  am__fastdepOBJC => 'AC_PROG_OBJC',
+  am__fastdepOBJCXX => 'AC_PROG_OBJCXX',
+  am__fastdepUPC => 'AM_PROG_UPC'
+  );
+
+# $COND
+# cond_stack_if ($NEGATE, $COND, $WHERE)
+# --------------------------------------
+sub cond_stack_if
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! $configure_cond{$cond} && $cond !~ /^TRUE|FALSE$/)
+    {
+      my $text = "$cond does not appear in AM_CONDITIONAL";
+      my $scope = US_LOCAL;
+      if (exists $_am_macro_for_cond{$cond})
+	{
+	  my $mac = $_am_macro_for_cond{$cond};
+	  $text .= "\n  The usual way to define '$cond' is to add ";
+	  $text .= ($mac =~ / /) ? $mac : "'$mac'";
+	  $text .= "\n  to '$configure_ac' and run 'aclocal' and 'autoconf' again";
+	  # These warnings appear in Automake files (depend2.am),
+	  # so there is no need to display them more than once:
+	  $scope = US_GLOBAL;
+	}
+      error $where, $text, uniq_scope => $scope;
+    }
+
+  push (@cond_stack, make_conditional_string ($negate, $cond));
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_else ($NEGATE, $COND, $WHERE)
+# ----------------------------------------
+sub cond_stack_else
+{
+  my ($negate, $cond, $where) = @_;
+
+  if (! @cond_stack)
+    {
+      error $where, "else without if";
+      return FALSE;
+    }
+
+  $cond_stack[$#cond_stack] =
+    Automake::Condition::conditional_negate ($cond_stack[$#cond_stack]);
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "else reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+# $COND
+# cond_stack_endif ($NEGATE, $COND, $WHERE)
+# -----------------------------------------
+sub cond_stack_endif
+{
+  my ($negate, $cond, $where) = @_;
+  my $old_cond;
+
+  if (! @cond_stack)
+    {
+      error $where, "endif without if";
+      return TRUE;
+    }
+
+  # If $COND is given, check against it.
+  if (defined $cond)
+    {
+      $cond = make_conditional_string ($negate, $cond);
+
+      error ($where, "endif reminder ($negate$cond) incompatible with "
+	     . "current conditional: $cond_stack[$#cond_stack]")
+	if $cond_stack[$#cond_stack] ne $cond;
+    }
+
+  pop @cond_stack;
+
+  return new Automake::Condition (@cond_stack);
+}
+
+
+
+
+
+## ------------------------ ##
+## Handling the variables.  ##
+## ------------------------ ##
+
+
+# define_pretty_variable ($VAR, $COND, $WHERE, @VALUE)
+# ----------------------------------------------------
+# Like define_variable, but the value is a list, and the variable may
+# be defined conditionally.  The second argument is the condition
+# under which the value should be defined; this should be the empty
+# string to define the variable unconditionally.  The third argument
+# is a list holding the values to use for the variable.  The value is
+# pretty printed in the output file.
+sub define_pretty_variable
+{
+    my ($var, $cond, $where, @value) = @_;
+
+    if (! vardef ($var, $cond))
+    {
+	Automake::Variable::define ($var, VAR_AUTOMAKE, '', $cond, "@value",
+				    '', $where, VAR_PRETTY);
+	rvar ($var)->rdef ($cond)->set_seen;
+    }
+}
+
+
+# define_variable ($VAR, $VALUE, $WHERE)
+# --------------------------------------
+# Define a new Automake Makefile variable VAR to VALUE, but only if
+# not already defined.
+sub define_variable
+{
+    my ($var, $value, $where) = @_;
+    define_pretty_variable ($var, TRUE, $where, $value);
+}
+
+
+# define_files_variable ($VAR, \@BASENAME, $EXTENSION, $WHERE)
+# ------------------------------------------------------------
+# Define the $VAR which content is the list of file names composed of
+# a @BASENAME and the $EXTENSION.
+sub define_files_variable ($\@$$)
+{
+  my ($var, $basename, $extension, $where) = @_;
+  define_variable ($var,
+		   join (' ', map { "$_.$extension" } @$basename),
+		   $where);
+}
+
+
+# Like define_variable, but define a variable to be the configure
+# substitution by the same name.
+sub define_configure_variable
+{
+  my ($var) = @_;
+  # Some variables we do not want to output.  For instance it
+  # would be a bad idea to output `U = @U@` when `@U@` can be
+  # substituted as `\`.
+  my $pretty = exists $ignored_configure_vars{$var} ? VAR_SILENT : VAR_ASIS;
+  Automake::Variable::define ($var, VAR_CONFIGURE, '', TRUE, subst ($var),
+			      '', $configure_vars{$var}, $pretty);
+}
+
+
+# define_compiler_variable ($LANG)
+# --------------------------------
+# Define a compiler variable.  We also handle defining the 'LT'
+# version of the command when using libtool.
+sub define_compiler_variable
+{
+    my ($lang) = @_;
+
+    my ($var, $value) = ($lang->compiler, $lang->compile);
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    define_variable ($var, $value, INTERNAL);
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	define_variable ("LT$var",
+                         "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS)"
+                         . " \$(LIBTOOLFLAGS) --mode=compile $value",
+                         INTERNAL);
+      }
+    define_verbose_tagvar ($lang->ccer || 'GEN');
+}
+
+
+sub define_linker_variable
+{
+    my ($lang) = @_;
+
+    my $libtool_tag = '';
+    $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+      if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+    # CCLD = $(CC).
+    define_variable ($lang->lder, $lang->ld, INTERNAL);
+    # CCLINK = $(CCLD) blah blah...
+    my $link = '';
+    if (var ('LIBTOOL'))
+      {
+	my $verbose = define_verbose_libtool ();
+	$link = "\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) "
+		. "\$(LIBTOOLFLAGS) --mode=link ";
+      }
+    define_variable ($lang->linker, $link . $lang->link, INTERNAL);
+    define_variable ($lang->compiler, $lang, INTERNAL);
+    define_verbose_tagvar ($lang->lder || 'GEN');
+}
+
+sub define_per_target_linker_variable
+{
+  my ($linker, $target) = @_;
+
+  # If the user wrote a custom link command, we don't define ours.
+  return "${target}_LINK"
+    if set_seen "${target}_LINK";
+
+  my $xlink = $linker ? $linker : 'LINK';
+
+  my $lang = $link_languages{$xlink};
+  prog_error "Unknown language for linker variable '$xlink'"
+    unless $lang;
+
+  my $link_command = $lang->link;
+  if (var 'LIBTOOL')
+    {
+      my $libtool_tag = '';
+      $libtool_tag = '--tag=' . $lang->libtool_tag . ' '
+	if $lang->libtool_tag && exists $libtool_tags{$lang->libtool_tag};
+
+      my $verbose = define_verbose_libtool ();
+      $link_command =
+	"\$(LIBTOOL) $verbose $libtool_tag\$(AM_LIBTOOLFLAGS) \$(LIBTOOLFLAGS) "
+	. "--mode=link " . $link_command;
+    }
+
+  # Rewrite each occurrence of 'AM_$flag' in the link
+  # command into '${derived}_$flag' if it exists.
+  my $orig_command = $link_command;
+  my @flags = (@{$lang->flags}, 'LDFLAGS');
+  push @flags, 'LIBTOOLFLAGS' if var 'LIBTOOL';
+  for my $flag (@flags)
+    {
+      my $val = "${target}_$flag";
+      $link_command =~ s/\(AM_$flag\)/\($val\)/
+	if set_seen ($val);
+    }
+
+  # If the computed command is the same as the generic command, use
+  # the command linker variable.
+  return ($lang->linker, $lang->lder)
+    if $link_command eq $orig_command;
+
+  define_variable ("${target}_LINK", $link_command, INTERNAL);
+  return ("${target}_LINK", $lang->lder);
+}
+
+################################################################
+
+# check_trailing_slash ($WHERE, $LINE)
+# ------------------------------------
+# Return 1 iff $LINE ends with a slash.
+# Might modify $LINE.
+sub check_trailing_slash ($\$)
+{
+  my ($where, $line) = @_;
+
+  # Ignore '##' lines.
+  return 0 if $$line =~ /$IGNORE_PATTERN/o;
+
+  # Catch and fix a common error.
+  msg "syntax", $where, "whitespace following trailing backslash"
+    if $$line =~ s/\\\s+\n$/\\\n/;
+
+  return $$line =~ /\\$/;
+}
+
+
+# read_am_file ($AMFILE, $WHERE, $RELDIR)
+# ---------------------------------------
+# Read Makefile.am and set up %contents.  Simultaneously copy lines
+# from Makefile.am into $output_trailer, or define variables as
+# appropriate.  NOTE we put rules in the trailer section.  We want
+# user rules to come after our generated stuff.
+sub read_am_file
+{
+    my ($amfile, $where, $reldir) = @_;
+    my $canon_reldir = &canonicalize ($reldir);
+
+    my $am_file = new Automake::XFile ("< $amfile");
+    verb "reading $amfile";
+
+    # Keep track of the youngest output dependency.
+    my $mtime = mtime $amfile;
+    $output_deps_greatest_timestamp = $mtime
+      if $mtime > $output_deps_greatest_timestamp;
+
+    my $spacing = '';
+    my $comment = '';
+    my $blank = 0;
+    my $saw_bk = 0;
+    my $var_look = VAR_ASIS;
+
+    use constant IN_VAR_DEF => 0;
+    use constant IN_RULE_DEF => 1;
+    use constant IN_COMMENT => 2;
+    my $prev_state = IN_RULE_DEF;
+
+    while ($_ = $am_file->getline)
+    {
+	$where->set ("$amfile:$.");
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    $blank = 1;
+	    # Flush all comments seen so far.
+	    if ($comment ne '')
+	    {
+		$output_vars .= $comment;
+		$comment = '';
+	    }
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    # Stick comments before the incoming macro or rule.  Make
+	    # sure a blank line precedes the first block of comments.
+	    $spacing = "\n" unless $blank;
+	    $blank = 1;
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	else
+	{
+	    last;
+	}
+	$saw_bk = check_trailing_slash ($where, $_);
+    }
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    my $last_var_name = '';
+    my $last_var_type = '';
+    my $last_var_value = '';
+    my $last_where;
+    # FIXME: shouldn't use $_ in this loop; it is too big.
+    while ($_)
+    {
+	$where->set ("$amfile:$.");
+
+	# Make sure the line is \n-terminated.
+	chomp;
+	$_ .= "\n";
+
+	# Don't look at MAINTAINER_MODE_TRUE here.  That shouldn't be
+	# used by users.  @MAINT@ is an anachronism now.
+	$_ =~ s/\@MAINT\@//g
+	    unless $seen_maint_mode;
+
+	my $new_saw_bk = check_trailing_slash ($where, $_);
+
+	if ($reldir eq '.')
+	  {
+	    # If present, eat the following '_' or '/', converting
+	    # "%reldir%/foo" and "%canon_reldir%_foo" into plain "foo"
+	    # when $reldir is '.'.
+	    $_ =~ s,%(D|reldir)%/,,g;
+	    $_ =~ s,%(C|canon_reldir)%_,,g;
+	  }
+	$_ =~ s/%(D|reldir)%/${reldir}/g;
+	$_ =~ s/%(C|canon_reldir)%/${canon_reldir}/g;
+
+	if (/$IGNORE_PATTERN/o)
+	{
+	    # Merely delete comments beginning with two hashes.
+
+	    # Keep any backslash from the previous line.
+	    $new_saw_bk = $saw_bk;
+	}
+	elsif (/$WHITE_PATTERN/o)
+	{
+	    # Stick a single white line before the incoming macro or rule.
+	    $spacing = "\n";
+	    error $where, "blank line following trailing backslash"
+	      if $saw_bk;
+	}
+	elsif (/$COMMENT_PATTERN/o)
+	{
+	    error $where, "comment following trailing backslash"
+	      if $saw_bk && $prev_state != IN_COMMENT;
+
+	    # Stick comments before the incoming macro or rule.
+	    $comment .= $spacing . $_;
+	    $spacing = '';
+	    $prev_state = IN_COMMENT;
+	}
+	elsif ($saw_bk)
+	{
+	    if ($prev_state == IN_RULE_DEF)
+	    {
+	      my $cond = new Automake::Condition @cond_stack;
+	      $output_trailer .= $cond->subst_string;
+	      $output_trailer .= $_;
+	    }
+	    elsif ($prev_state == IN_COMMENT)
+	    {
+		# If the line doesn't start with a '#', add it.
+		# We do this because a continued comment like
+		#   # A = foo \
+		#         bar \
+		#         baz
+		# is not portable.  BSD make doesn't honor
+		# escaped newlines in comments.
+		s/^#?/#/;
+		$comment .= $spacing . $_;
+	    }
+	    else # $prev_state == IN_VAR_DEF
+	    {
+	      $last_var_value .= ' '
+		unless $last_var_value =~ /\s$/;
+	      $last_var_value .= $_;
+
+	      if (!/\\$/)
+		{
+		  Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					      $last_var_type, $cond,
+					      $last_var_value, $comment,
+					      $last_where, VAR_ASIS)
+		    if $cond != FALSE;
+		  $comment = $spacing = '';
+		}
+	    }
+	}
+
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $where);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $where);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $where);
+	  }
+
+	elsif (/$RULE_PATTERN/o)
+	{
+	    # Found a rule.
+	    $prev_state = IN_RULE_DEF;
+
+	    # For now we have to output all definitions of user rules
+	    # and can't diagnose duplicates (see the comment in
+	    # Automake::Rule::define). So we go on and ignore the return value.
+	    Automake::Rule::define ($1, $amfile, RULE_USER, $cond, $where);
+
+	    check_variable_expansions ($_, $where);
+
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	}
+	elsif (/$ASSIGNMENT_PATTERN/o)
+	{
+	    # Found a macro definition.
+	    $prev_state = IN_VAR_DEF;
+	    $last_var_name = $1;
+	    $last_var_type = $2;
+	    $last_var_value = $3;
+	    $last_where = $where->clone;
+	    if ($3 ne '' && substr ($3, -1) eq "\\")
+	      {
+		# We preserve the '\' because otherwise the long lines
+		# that are generated will be truncated by broken
+		# 'sed's.
+		$last_var_value = $3 . "\n";
+	      }
+	    # Normally we try to output variable definitions in the
+	    # same format they were input.  However, POSIX compliant
+	    # systems are not required to support lines longer than
+	    # 2048 bytes (most notably, some sed implementation are
+	    # limited to 4000 bytes, and sed is used by config.status
+	    # to rewrite Makefile.in into Makefile).  Moreover nobody
+	    # would really write such long lines by hand since it is
+	    # hardly maintainable.  So if a line is longer that 1000
+	    # bytes (an arbitrary limit), assume it has been
+	    # automatically generated by some tools, and flatten the
+	    # variable definition.  Otherwise, keep the variable as it
+	    # as been input.
+	    $var_look = VAR_PRETTY if length ($last_var_value) >= 1000;
+
+	    if (!/\\$/)
+	      {
+		Automake::Variable::define ($last_var_name, VAR_MAKEFILE,
+					    $last_var_type, $cond,
+					    $last_var_value, $comment,
+					    $last_where, $var_look)
+		  if $cond != FALSE;
+		$comment = $spacing = '';
+		$var_look = VAR_ASIS;
+	      }
+	}
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    my $path = $1;
+
+	    if ($path =~ s/^\$\(top_srcdir\)\///)
+	      {
+		push (@include_stack, "\$\(top_srcdir\)/$path");
+		# Distribute any included file.
+
+		# Always use the $(top_srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(top_srcdir\)/$path");
+	      }
+	    else
+	      {
+		$path =~ s/\$\(srcdir\)\///;
+		push (@include_stack, "\$\(srcdir\)/$path");
+		# Always use the $(srcdir) prefix in DIST_COMMON,
+		# otherwise OSF make will implicitly copy the included
+		# file in the build tree during "make distdir" to satisfy
+		# the dependency.
+		# (subdir-am-cond.sh and subdir-ac-cond.sh will fail)
+		push_dist_common ("\$\(srcdir\)/$path");
+		$path = $relative_dir . "/" . $path if $relative_dir ne '.';
+	      }
+	    my $new_reldir = File::Spec->abs2rel ($path, $relative_dir);
+	    $new_reldir = '.' if $new_reldir !~ s,/[^/]*$,,;
+	    $where->push_context ("'$path' included from here");
+	    read_am_file ($path, $where, $new_reldir);
+	    $where->pop_context;
+	}
+	else
+	{
+	    # This isn't an error; it is probably a continued rule.
+	    # In fact, this is what we assume.
+	    $prev_state = IN_RULE_DEF;
+	    check_variable_expansions ($_, $where);
+	    $output_trailer .= $comment . $spacing;
+	    my $cond = new Automake::Condition @cond_stack;
+	    $output_trailer .= $cond->subst_string;
+	    $output_trailer .= $_;
+	    $comment = $spacing = '';
+	    error $where, "'#' comment at start of rule is unportable"
+	      if $_ =~ /^\t\s*\#/;
+	}
+
+	$saw_bk = $new_saw_bk;
+	$_ = $am_file->getline;
+    }
+
+    $output_trailer .= $comment;
+
+    error ($where, "trailing backslash on last line")
+      if $saw_bk;
+
+    error ($where, (@cond_stack ? "unterminated conditionals: @cond_stack"
+		    : "too many conditionals closed in include file"))
+      if "@saved_cond_stack" ne "@cond_stack";
+}
+
+
+# A helper for read_main_am_file which initializes configure variables
+# and variables from header-vars.am.
+sub define_standard_variables ()
+{
+  my $saved_output_vars = $output_vars;
+  my ($comments, undef, $rules) =
+    file_contents_internal (1, "$libdir/am/header-vars.am",
+			    new Automake::Location);
+
+  foreach my $var (sort keys %configure_vars)
+    {
+      define_configure_variable ($var);
+    }
+
+  $output_vars .= $comments . $rules;
+}
+
+
+# read_main_am_file ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+sub read_main_am_file
+{
+    my ($amfile, $infile) = @_;
+
+    # This supports the strange variable tricks we are about to play.
+    prog_error ("variable defined before read_main_am_file\n" . variables_dump ())
+      if (scalar (variables) > 0);
+
+    # Generate copyright header for generated Makefile.in.
+    # We do discard the output of predefined variables, handled below.
+    $output_vars = ("# " . basename ($infile) . " generated by automake "
+		   . $VERSION . " from " . basename ($amfile) . ".\n");
+    $output_vars .= '# ' . subst ('configure_input') . "\n";
+    $output_vars .= $gen_copyright;
+
+    # We want to predefine as many variables as possible.  This lets
+    # the user set them with '+=' in Makefile.am.
+    define_standard_variables;
+
+    # Read user file, which might override some of our values.
+    read_am_file ($amfile, new Automake::Location, '.');
+}
+
+
+
+################################################################
+
+# $STRING
+# flatten ($ORIGINAL_STRING)
+# --------------------------
+sub flatten
+{
+  $_ = shift;
+
+  s/\\\n//somg;
+  s/\s+/ /g;
+  s/^ //;
+  s/ $//;
+
+  return $_;
+}
+
+
+# transform_token ($TOKEN, \%PAIRS, $KEY)
+# ---------------------------------------
+# Return the value associated to $KEY in %PAIRS, as used on $TOKEN
+# (which should be ?KEY? or any of the special %% requests)..
+sub transform_token ($\%$)
+{
+  my ($token, $transform, $key) = @_;
+  my $res = $transform->{$key};
+  prog_error "Unknown key '$key' in '$token'" unless defined $res;
+  return $res;
+}
+
+
+# transform ($TOKEN, \%PAIRS)
+# ---------------------------
+# If ($TOKEN, $VAL) is in %PAIRS:
+#   - replaces %KEY% with $VAL,
+#   - enables/disables ?KEY? and ?!KEY?,
+#   - replaces %?KEY% with TRUE or FALSE.
+sub transform ($\%)
+{
+  my ($token, $transform) = @_;
+
+  # %KEY%.
+  # Must be before the following pattern to exclude the case
+  # when there is neither IFTRUE nor IFFALSE.
+  if ($token =~ /^%([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1);
+    }
+  # %?KEY%.
+  elsif ($token =~ /^%\?([\w\-]+)%$/)
+    {
+      return transform_token ($token, %$transform, $1) ? 'TRUE' : 'FALSE';
+    }
+  # ?KEY? and ?!KEY?.
+  elsif ($token =~ /^ \? (!?) ([\w\-]+) \? $/x)
+    {
+      my $neg = ($1 eq '!') ? 1 : 0;
+      my $val = transform_token ($token, %$transform, $2);
+      return (!!$val == $neg) ? '##%' : '';
+    }
+  else
+    {
+      prog_error "Unknown request format: $token";
+    }
+}
+
+# $TEXT
+# preprocess_file ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return the result.
+# No extra parsing or post-processing is done (i.e., recognition of
+# rules declaration or of make variables definitions).
+sub preprocess_file
+{
+  my ($file, %transform) = @_;
+
+  # Complete %transform with global options.
+  # Note that %transform goes last, so it overrides global options.
+  %transform = ( 'MAINTAINER-MODE'
+		 => $seen_maint_mode ? subst ('MAINTAINER_MODE_TRUE') : '',
+
+		 'XZ'          => !! option 'dist-xz',
+		 'LZIP'        => !! option 'dist-lzip',
+		 'BZIP2'       => !! option 'dist-bzip2',
+		 'COMPRESS'    => !! option 'dist-tarZ',
+		 'GZIP'        =>  ! option 'no-dist-gzip',
+		 'SHAR'        => !! option 'dist-shar',
+		 'ZIP'         => !! option 'dist-zip',
+
+		 'INSTALL-INFO' =>  ! option 'no-installinfo',
+		 'INSTALL-MAN'  =>  ! option 'no-installman',
+		 'CK-NEWS'      => !! option 'check-news',
+
+		 'SUBDIRS'      => !! var ('SUBDIRS'),
+		 'TOPDIR_P'     => $relative_dir eq '.',
+
+		 'BUILD'    => ($seen_canonical >= AC_CANONICAL_BUILD),
+		 'HOST'     => ($seen_canonical >= AC_CANONICAL_HOST),
+		 'TARGET'   => ($seen_canonical >= AC_CANONICAL_TARGET),
+
+		 'LIBTOOL'      => !! var ('LIBTOOL'),
+		 'NONLIBTOOL'   => 1,
+		%transform);
+
+  if (! defined ($_ = $am_file_cache{$file}))
+    {
+      verb "reading $file";
+      # Swallow the whole file.
+      my $fc_file = new Automake::XFile "< $file";
+      my $saved_dollar_slash = $/;
+      undef $/;
+      $_ = $fc_file->getline;
+      $/ = $saved_dollar_slash;
+      $fc_file->close;
+      # Remove ##-comments.
+      # Besides we don't need more than two consecutive new-lines.
+      s/(?:$IGNORE_PATTERN|(?<=\n\n)\n+)//gom;
+      # Remember the contents of the just-read file.
+      $am_file_cache{$file} = $_;
+    }
+
+  # Substitute Automake template tokens.
+  s/(?: % \?? [\w\-]+ %
+      | \? !? [\w\-]+ \?
+    )/transform($&, %transform)/gex;
+  # transform() may have added some ##%-comments to strip.
+  # (we use '##%' instead of '##' so we can distinguish ##%##%##% from
+  # ####### and do not remove the latter.)
+  s/^[ \t]*(?:##%)+.*\n//gm;
+
+  return $_;
+}
+
+
+# @PARAGRAPHS
+# make_paragraphs ($MAKEFILE, [%TRANSFORM])
+# -----------------------------------------
+# Load a $MAKEFILE, apply the %TRANSFORM, and return it as a list of
+# paragraphs.
+sub make_paragraphs
+{
+  my ($file, %transform) = @_;
+  $transform{FIRST} = !$transformed_files{$file};
+  $transformed_files{$file} = 1;
+
+  my @lines = split /(?<!\\)\n/, preprocess_file ($file, %transform);
+  my @res;
+
+  while (defined ($_ = shift @lines))
+    {
+      my $paragraph = $_;
+      # If we are a rule, eat as long as we start with a tab.
+      if (/$RULE_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines) && $_ =~ /^\t/)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      # If we are a comments, eat as much comments as you can.
+      elsif (/$COMMENT_PATTERN/smo)
+	{
+	  while (defined ($_ = shift @lines)
+		 && $_ =~ /$COMMENT_PATTERN/smo)
+	    {
+	      $paragraph .= "\n$_";
+	    }
+	  unshift (@lines, $_);
+	}
+
+      push @res, $paragraph;
+    }
+
+  return @res;
+}
+
+
+
+# ($COMMENT, $VARIABLES, $RULES)
+# file_contents_internal ($IS_AM, $FILE, $WHERE, [%TRANSFORM])
+# ------------------------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known. $IS_AM iff the caller is
+# reading an Automake file (as opposed to the user's Makefile.am).
+sub file_contents_internal
+{
+    my ($is_am, $file, $where, %transform) = @_;
+
+    $where->set ($file);
+
+    my $result_vars = '';
+    my $result_rules = '';
+    my $comment = '';
+    my $spacing = '';
+
+    # The following flags are used to track rules spanning across
+    # multiple paragraphs.
+    my $is_rule = 0;		# 1 if we are processing a rule.
+    my $discard_rule = 0;	# 1 if the current rule should not be output.
+
+    # We save the conditional stack on entry, and then check to make
+    # sure it is the same on exit.  This lets us conditionally include
+    # other files.
+    my @saved_cond_stack = @cond_stack;
+    my $cond = new Automake::Condition (@cond_stack);
+
+    foreach (make_paragraphs ($file, %transform))
+    {
+	# FIXME: no line number available.
+	$where->set ($file);
+
+	# Sanity checks.
+	error $where, "blank line following trailing backslash:\n$_"
+	  if /\\$/;
+	error $where, "comment following trailing backslash:\n$_"
+	  if /\\#/;
+
+	if (/^$/)
+	{
+	    $is_rule = 0;
+	    # Stick empty line before the incoming macro or rule.
+	    $spacing = "\n";
+	}
+	elsif (/$COMMENT_PATTERN/mso)
+	{
+	    $is_rule = 0;
+	    # Stick comments before the incoming macro or rule.
+	    $comment = "$_\n";
+	}
+
+	# Handle inclusion of other files.
+	elsif (/$INCLUDE_PATTERN/o)
+	{
+	    if ($cond != FALSE)
+	      {
+		my $file = ($is_am ? "$libdir/am/" : '') . $1;
+		$where->push_context ("'$file' included from here");
+		# N-ary '.=' fails.
+		my ($com, $vars, $rules)
+		  = file_contents_internal ($is_am, $file, $where, %transform);
+		$where->pop_context;
+		$comment .= $com;
+		$result_vars .= $vars;
+		$result_rules .= $rules;
+	      }
+	}
+
+	# Handling the conditionals.
+	elsif (/$IF_PATTERN/o)
+	  {
+	    $cond = cond_stack_if ($1, $2, $file);
+	  }
+	elsif (/$ELSE_PATTERN/o)
+	  {
+	    $cond = cond_stack_else ($1, $2, $file);
+	  }
+	elsif (/$ENDIF_PATTERN/o)
+	  {
+	    $cond = cond_stack_endif ($1, $2, $file);
+	  }
+
+	# Handling rules.
+	elsif (/$RULE_PATTERN/mso)
+	{
+	  $is_rule = 1;
+	  $discard_rule = 0;
+	  # Separate relationship from optional actions: the first
+	  # `new-line tab" not preceded by backslash (continuation
+	  # line).
+	  my $paragraph = $_;
+	  /^(.*?)(?:(?<!\\)\n(\t.*))?$/s;
+	  my ($relationship, $actions) = ($1, $2 || '');
+
+	  # Separate targets from dependencies: the first colon.
+	  $relationship =~ /^([^:]+\S+) *: *(.*)$/som;
+	  my ($targets, $dependencies) = ($1, $2);
+	  # Remove the escaped new lines.
+	  # I don't know why, but I have to use a tmp $flat_deps.
+	  my $flat_deps = flatten ($dependencies);
+	  my @deps = split (' ', $flat_deps);
+
+	  foreach (split (' ', $targets))
+	    {
+	      # FIXME: 1. We are not robust to people defining several targets
+	      # at once, only some of them being in %dependencies.  The
+	      # actions from the targets in %dependencies are usually generated
+	      # from the content of %actions, but if some targets in $targets
+	      # are not in %dependencies the ELSE branch will output
+	      # a rule for all $targets (i.e. the targets which are both
+	      # in %dependencies and $targets will have two rules).
+
+	      # FIXME: 2. The logic here is not able to output a
+	      # multi-paragraph rule several time (e.g. for each condition
+	      # it is defined for) because it only knows the first paragraph.
+
+	      # FIXME: 3. We are not robust to people defining a subset
+	      # of a previously defined "multiple-target" rule.  E.g.
+	      # 'foo:' after 'foo bar:'.
+
+	      # Output only if not in FALSE.
+	      if (defined $dependencies{$_} && $cond != FALSE)
+		{
+		  depend ($_, @deps);
+		  register_action ($_, $actions);
+		}
+	      else
+		{
+		  # Free-lance dependency.  Output the rule for all the
+		  # targets instead of one by one.
+		  my @undefined_conds =
+		    Automake::Rule::define ($targets, $file,
+					    $is_am ? RULE_AUTOMAKE : RULE_USER,
+					    $cond, $where);
+		  for my $undefined_cond (@undefined_conds)
+		    {
+		      my $condparagraph = $paragraph;
+		      $condparagraph =~ s/^/$undefined_cond->subst_string/gme;
+		      $result_rules .= "$spacing$comment$condparagraph\n";
+		    }
+		  if (scalar @undefined_conds == 0)
+		    {
+		      # Remember to discard next paragraphs
+		      # if they belong to this rule.
+		      # (but see also FIXME: #2 above.)
+		      $discard_rule = 1;
+		    }
+		  $comment = $spacing = '';
+		  last;
+		}
+	    }
+	}
+
+	elsif (/$ASSIGNMENT_PATTERN/mso)
+	{
+	    my ($var, $type, $val) = ($1, $2, $3);
+	    error $where, "variable '$var' with trailing backslash"
+	      if /\\$/;
+
+	    $is_rule = 0;
+
+	    Automake::Variable::define ($var,
+					$is_am ? VAR_AUTOMAKE : VAR_MAKEFILE,
+					$type, $cond, $val, $comment, $where,
+					VAR_ASIS)
+	      if $cond != FALSE;
+
+	    $comment = $spacing = '';
+	}
+	else
+	{
+	    # This isn't an error; it is probably some tokens which
+	    # configure is supposed to replace, such as '@SET-MAKE@',
+	    # or some part of a rule cut by an if/endif.
+	    if (! $cond->false && ! ($is_rule && $discard_rule))
+	      {
+		s/^/$cond->subst_string/gme;
+		$result_rules .= "$spacing$comment$_\n";
+	      }
+	    $comment = $spacing = '';
+	}
+    }
+
+    error ($where, @cond_stack ?
+	   "unterminated conditionals: @cond_stack" :
+	   "too many conditionals closed in include file")
+      if "@saved_cond_stack" ne "@cond_stack";
+
+    return ($comment, $result_vars, $result_rules);
+}
+
+
+# $CONTENTS
+# file_contents ($BASENAME, $WHERE, [%TRANSFORM])
+# -----------------------------------------------
+# Return contents of a file from $libdir/am, automatically skipping
+# macros or rules which are already known.
+sub file_contents
+{
+    my ($basename, $where, %transform) = @_;
+    my ($comments, $variables, $rules) =
+      file_contents_internal (1, "$libdir/am/$basename.am", $where,
+			      %transform);
+    return "$comments$variables$rules";
+}
+
+
+# @PREFIX
+# am_primary_prefixes ($PRIMARY, $CAN_DIST, @PREFIXES)
+# ----------------------------------------------------
+# Find all variable prefixes that are used for install directories.  A
+# prefix 'zar' qualifies iff:
+#
+# * 'zardir' is a variable.
+# * 'zar_PRIMARY' is a variable.
+#
+# As a side effect, it looks for misspellings.  It is an error to have
+# a variable ending in a "reserved" suffix whose prefix is unknown, e.g.
+# "bni_PROGRAMS".  However, unusual prefixes are allowed if a variable
+# of the same name (with "dir" appended) exists.  For instance, if the
+# variable "zardir" is defined, then "zar_PROGRAMS" becomes valid.
+# This is to provide a little extra flexibility in those cases which
+# need it.
+sub am_primary_prefixes
+{
+  my ($primary, $can_dist, @prefixes) = @_;
+
+  local $_;
+  my %valid = map { $_ => 0 } @prefixes;
+  $valid{'EXTRA'} = 0;
+  foreach my $var (variables $primary)
+    {
+      # Automake is allowed to define variables that look like primaries
+      # but which aren't.  E.g. INSTALL_sh_DATA.
+      # Autoconf can also define variables like INSTALL_DATA, so
+      # ignore all configure variables (at least those which are not
+      # redefined in Makefile.am).
+      # FIXME: We should make sure that these variables are not
+      # conditionally defined (or else adjust the condition below).
+      my $def = $var->def (TRUE);
+      next if $def && $def->owner != VAR_MAKEFILE;
+
+      my $varname = $var->name;
+
+      if ($varname =~ /^(nobase_)?(dist_|nodist_)?(.*)_[[:alnum:]]+$/)
+	{
+	  my ($base, $dist, $X) = ($1 || '', $2 || '', $3 || '');
+	  if ($dist ne '' && ! $can_dist)
+	    {
+	      err_var ($var,
+		       "invalid variable '$varname': 'dist' is forbidden");
+	    }
+	  # Standard directories must be explicitly allowed.
+	  elsif (! defined $valid{$X} && exists $standard_prefix{$X})
+	    {
+	      err_var ($var,
+		       "'${X}dir' is not a legitimate directory " .
+		       "for '$primary'");
+	    }
+	  # A not explicitly valid directory is allowed if Xdir is defined.
+	  elsif (! defined $valid{$X} &&
+		 $var->requires_variables ("'$varname' is used", "${X}dir"))
+	    {
+	      # Nothing to do.  Any error message has been output
+	      # by $var->requires_variables.
+	    }
+	  else
+	    {
+	      # Ensure all extended prefixes are actually used.
+	      $valid{"$base$dist$X"} = 1;
+	    }
+	}
+      else
+	{
+	  prog_error "unexpected variable name: $varname";
+	}
+    }
+
+  # Return only those which are actually defined.
+  return sort grep { var ($_ . '_' . $primary) } keys %valid;
+}
+
+
+# am_install_var (-OPTION..., file, HOW, where...)
+# ------------------------------------------------
+#
+# Handle 'where_HOW' variable magic.  Does all lookups, generates
+# install code, and possibly generates code to define the primary
+# variable.  The first argument is the name of the .am file to munge,
+# the second argument is the primary variable (e.g. HEADERS), and all
+# subsequent arguments are possible installation locations.
+#
+# Returns list of [$location, $value] pairs, where
+# $value's are the values in all where_HOW variable, and $location
+# there associated location (the place here their parent variables were
+# defined).
+#
+# FIXME: this should be rewritten to be cleaner.  It should be broken
+# up into multiple functions.
+#
+sub am_install_var
+{
+  my (@args) = @_;
+
+  my $do_require = 1;
+  my $can_dist = 0;
+  my $default_dist = 0;
+  while (@args)
+    {
+      if ($args[0] eq '-noextra')
+	{
+	  $do_require = 0;
+	}
+      elsif ($args[0] eq '-candist')
+	{
+	  $can_dist = 1;
+	}
+      elsif ($args[0] eq '-defaultdist')
+	{
+	  $default_dist = 1;
+	  $can_dist = 1;
+	}
+      elsif ($args[0] !~ /^-/)
+	{
+	  last;
+	}
+      shift (@args);
+    }
+
+  my ($file, $primary, @prefix) = @args;
+
+  # Now that configure substitutions are allowed in where_HOW
+  # variables, it is an error to actually define the primary.  We
+  # allow 'JAVA', as it is customarily used to mean the Java
+  # interpreter.  This is but one of several Java hacks.  Similarly,
+  # 'PYTHON' is customarily used to mean the Python interpreter.
+  reject_var $primary, "'$primary' is an anachronism"
+    unless $primary eq 'JAVA' || $primary eq 'PYTHON';
+
+  # Get the prefixes which are valid and actually used.
+  @prefix = am_primary_prefixes ($primary, $can_dist, @prefix);
+
+  # If a primary includes a configure substitution, then the EXTRA_
+  # form is required.  Otherwise we can't properly do our job.
+  my $require_extra;
+
+  my @used = ();
+  my @result = ();
+
+  foreach my $X (@prefix)
+    {
+      my $nodir_name = $X;
+      my $one_name = $X . '_' . $primary;
+      my $one_var = var $one_name;
+
+      my $strip_subdir = 1;
+      # If subdir prefix should be preserved, do so.
+      if ($nodir_name =~ /^nobase_/)
+	{
+	  $strip_subdir = 0;
+	  $nodir_name =~ s/^nobase_//;
+	}
+
+      # If files should be distributed, do so.
+      my $dist_p = 0;
+      if ($can_dist)
+	{
+	  $dist_p = (($default_dist && $nodir_name !~ /^nodist_/)
+		     || (! $default_dist && $nodir_name =~ /^dist_/));
+	  $nodir_name =~ s/^(dist|nodist)_//;
+	}
+
+
+      # Use the location of the currently processed variable.
+      # We are not processing a particular condition, so pick the first
+      # available.
+      my $tmpcond = $one_var->conditions->one_cond;
+      my $where = $one_var->rdef ($tmpcond)->location->clone;
+
+      # Append actual contents of where_PRIMARY variable to
+      # @result, skipping @substitutions@.
+      foreach my $locvals ($one_var->value_as_list_recursive (location => 1))
+	{
+	  my ($loc, $value) = @$locvals;
+	  # Skip configure substitutions.
+	  if ($value =~ /^\@.*\@$/)
+	    {
+	      if ($nodir_name eq 'EXTRA')
+		{
+		  error ($where,
+			 "'$one_name' contains configure substitution, "
+			 . "but shouldn't");
+		}
+	      # Check here to make sure variables defined in
+	      # configure.ac do not imply that EXTRA_PRIMARY
+	      # must be defined.
+	      elsif (! defined $configure_vars{$one_name})
+		{
+		  $require_extra = $one_name
+		    if $do_require;
+		}
+	    }
+	  else
+	    {
+	      # Strip any $(EXEEXT) suffix the user might have added,
+              # or this will confuse handle_source_transform() and
+              # check_canonical_spelling().
+	      # We'll add $(EXEEXT) back later anyway.
+	      # Do it here rather than in handle_programs so the
+              # uniquifying at the end of this function works.
+	      ${$locvals}[1] =~ s/\$\(EXEEXT\)$//
+	        if $primary eq 'PROGRAMS';
+
+	      push (@result, $locvals);
+	    }
+	}
+      # A blatant hack: we rewrite each _PROGRAMS primary to include
+      # EXEEXT.
+      append_exeext { 1 } $one_name
+	if $primary eq 'PROGRAMS';
+      # "EXTRA" shouldn't be used when generating clean targets,
+      # all, or install targets.  We used to warn if EXTRA_FOO was
+      # defined uselessly, but this was annoying.
+      next
+	if $nodir_name eq 'EXTRA';
+
+      if ($nodir_name eq 'check')
+	{
+	  push (@check, '$(' . $one_name . ')');
+	}
+      else
+	{
+	  push (@used, '$(' . $one_name . ')');
+	}
+
+      # Is this to be installed?
+      my $install_p = $nodir_name ne 'noinst' && $nodir_name ne 'check';
+
+      # If so, with install-exec? (or install-data?).
+      my $exec_p = ($nodir_name =~ /$EXEC_DIR_PATTERN/o);
+
+      my $check_options_p = $install_p && !! option 'std-options';
+
+      # Use the location of the currently processed variable as context.
+      $where->push_context ("while processing '$one_name'");
+
+      # The variable containing all files to distribute.
+      my $distvar = "\$($one_name)";
+      $distvar = shadow_unconditionally ($one_name, $where)
+	if ($dist_p && $one_var->has_conditional_contents);
+
+      # Singular form of $PRIMARY.
+      (my $one_primary = $primary) =~ s/S$//;
+      $output_rules .= file_contents ($file, $where,
+                                      PRIMARY     => $primary,
+                                      ONE_PRIMARY => $one_primary,
+                                      DIR         => $X,
+                                      NDIR        => $nodir_name,
+                                      BASE        => $strip_subdir,
+                                      EXEC        => $exec_p,
+                                      INSTALL     => $install_p,
+                                      DIST        => $dist_p,
+                                      DISTVAR     => $distvar,
+                                      'CK-OPTS'   => $check_options_p);
+    }
+
+  # The JAVA variable is used as the name of the Java interpreter.
+  # The PYTHON variable is used as the name of the Python interpreter.
+  if (@used && $primary ne 'JAVA' && $primary ne 'PYTHON')
+    {
+      # Define it.
+      define_pretty_variable ($primary, TRUE, INTERNAL, @used);
+      $output_vars .= "\n";
+    }
+
+  err_var ($require_extra,
+	   "'$require_extra' contains configure substitution,\n"
+	   . "but 'EXTRA_$primary' not defined")
+    if ($require_extra && ! var ('EXTRA_' . $primary));
+
+  # Push here because PRIMARY might be configure time determined.
+  push (@all, '$(' . $primary . ')')
+    if @used && $primary ne 'JAVA' && $primary ne 'PYTHON';
+
+  # Make the result unique.  This lets the user use conditionals in
+  # a natural way, but still lets us program lazily -- we don't have
+  # to worry about handling a particular object more than once.
+  # We will keep only one location per object.
+  my %result = ();
+  for my $pair (@result)
+    {
+      my ($loc, $val) = @$pair;
+      $result{$val} = $loc;
+    }
+  my @l = sort keys %result;
+  return map { [$result{$_}->clone, $_] } @l;
+}
+
+
+################################################################
+
+# Each key in this hash is the name of a directory holding a
+# Makefile.in.  These variables are local to 'is_make_dir'.
+my %make_dirs = ();
+my $make_dirs_set = 0;
+
+# is_make_dir ($DIRECTORY)
+# ------------------------
+sub is_make_dir
+{
+    my ($dir) = @_;
+    if (! $make_dirs_set)
+    {
+	foreach my $iter (@configure_input_files)
+	{
+	    $make_dirs{dirname ($iter)} = 1;
+	}
+	# We also want to notice Makefile.in's.
+	foreach my $iter (@other_input_files)
+	{
+	    if ($iter =~ /Makefile\.in$/)
+	    {
+		$make_dirs{dirname ($iter)} = 1;
+	    }
+	}
+	$make_dirs_set = 1;
+    }
+    return defined $make_dirs{$dir};
+}
+
+################################################################
+
+# Find the aux dir.  This should match the algorithm used by
+# ./configure. (See the Autoconf documentation for for
+# AC_CONFIG_AUX_DIR.)
+sub locate_aux_dir ()
+{
+  if (! $config_aux_dir_set_in_configure_ac)
+    {
+      # The default auxiliary directory is the first
+      # of ., .., or ../.. that contains install-sh.
+      # Assume . if install-sh doesn't exist yet.
+      for my $dir (qw (. .. ../..))
+	{
+	  if (-f "$dir/install-sh")
+	    {
+	      $config_aux_dir = $dir;
+	      last;
+	    }
+	}
+      $config_aux_dir = '.' unless $config_aux_dir;
+    }
+  # Avoid unsightly '/.'s.
+  $am_config_aux_dir =
+    '$(top_srcdir)' . ($config_aux_dir eq '.' ? "" : "/$config_aux_dir");
+  $am_config_aux_dir =~ s,/*$,,;
+}
+
+
+# push_required_file ($DIR, $FILE, $FULLFILE)
+# -------------------------------------------
+# Push the given file onto DIST_COMMON.
+sub push_required_file
+{
+  my ($dir, $file, $fullfile) = @_;
+
+  # If the file to be distributed is in the same directory of the
+  # currently processed Makefile.am, then we want to distribute it
+  # from this same Makefile.am.
+  if ($dir eq $relative_dir)
+    {
+      push_dist_common ($file);
+    }
+  # This is needed to allow a construct in a non-top-level Makefile.am
+  # to require a file in the build-aux directory (see at least the test
+  # script 'test-driver-is-distributed.sh').  This is related to the
+  # automake bug#9546.  Note that the use of $config_aux_dir instead
+  # of $am_config_aux_dir here is deliberate and necessary.
+  elsif ($dir eq $config_aux_dir)
+    {
+      push_dist_common ("$am_config_aux_dir/$file");
+    }
+  # FIXME: another spacial case, for AC_LIBOBJ/AC_LIBSOURCE support.
+  # We probably need some refactoring of this function and its callers,
+  # to have a more explicit and systematic handling of all the special
+  # cases; but, since there are only two of them, this is low-priority
+  # ATM.
+  elsif ($config_libobj_dir && $dir eq $config_libobj_dir)
+    {
+      # Avoid unsightly '/.'s.
+      my $am_config_libobj_dir =
+        '$(top_srcdir)' .
+        ($config_libobj_dir eq '.' ? "" : "/$config_libobj_dir");
+      $am_config_libobj_dir =~ s|/*$||;
+      push_dist_common ("$am_config_libobj_dir/$file");
+    }
+  elsif ($relative_dir eq '.' && ! is_make_dir ($dir))
+    {
+      # If we are doing the topmost directory, and the file is in a
+      # subdir which does not have a Makefile, then we distribute it
+      # here.
+
+      # If a required file is above the source tree, it is important
+      # to prefix it with '$(srcdir)' so that no VPATH search is
+      # performed.  Otherwise problems occur with Make implementations
+      # that rewrite and simplify rules whose dependencies are found in a
+      # VPATH location.  Here is an example with OSF1/Tru64 Make.
+      #
+      #   % cat Makefile
+      #   VPATH = sub
+      #   distdir: ../a
+      #	          echo ../a
+      #   % ls
+      #   Makefile a
+      #   % make
+      #   echo a
+      #   a
+      #
+      # Dependency '../a' was found in 'sub/../a', but this make
+      # implementation simplified it as 'a'.  (Note that the sub/
+      # directory does not even exist.)
+      #
+      # This kind of VPATH rewriting seems hard to cancel.  The
+      # distdir.am hack against VPATH rewriting works only when no
+      # simplification is done, i.e., for dependencies which are in
+      # subdirectories, not in enclosing directories.  Hence, in
+      # the latter case we use a full path to make sure no VPATH
+      # search occurs.
+      $fullfile = '$(srcdir)/' . $fullfile
+	if $dir =~ m,^\.\.(?:$|/),;
+
+      push_dist_common ($fullfile);
+    }
+  else
+    {
+      prog_error "a Makefile in relative directory $relative_dir " .
+                 "can't add files in directory $dir to DIST_COMMON";
+    }
+}
+
+
+# If a file name appears as a key in this hash, then it has already
+# been checked for.  This allows us not to report the same error more
+# than once.
+my %required_file_not_found = ();
+
+# required_file_check_or_copy ($WHERE, $DIRECTORY, $FILE)
+# -------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+sub required_file_check_or_copy
+{
+  my ($where, $dir, $file) = @_;
+
+  my $fullfile = "$dir/$file";
+  my $found_it = 0;
+  my $dangling_sym = 0;
+
+  if (-l $fullfile && ! -f $fullfile)
+    {
+      $dangling_sym = 1;
+    }
+  elsif (dir_has_case_matching_file ($dir, $file))
+    {
+      $found_it = 1;
+    }
+
+  # '--force-missing' only has an effect if '--add-missing' is
+  # specified.
+  return
+    if $found_it && (! $add_missing || ! $force_missing);
+
+  # If we've already looked for it, we're done.  You might
+  # wonder why we don't do this before searching for the
+  # file.  If we do that, then something like
+  # AC_OUTPUT(subdir/foo foo) will fail to put foo.in into
+  # DIST_COMMON.
+  if (! $found_it)
+    {
+      return if defined $required_file_not_found{$fullfile};
+      $required_file_not_found{$fullfile} = 1;
+    }
+  if ($dangling_sym && $add_missing)
+    {
+      unlink ($fullfile);
+    }
+
+  my $trailer = '';
+  my $trailer2 = '';
+  my $suppress = 0;
+
+  # Only install missing files according to our desired
+  # strictness level.
+  my $message = "required file '$fullfile' not found";
+  if ($add_missing)
+    {
+      if (-f "$libdir/$file")
+        {
+          $suppress = 1;
+
+          # Install the missing file.  Symlink if we
+          # can, copy if we must.  Note: delete the file
+          # first, in case it is a dangling symlink.
+          $message = "installing '$fullfile'";
+
+          # The license file should not be volatile.
+          if ($file eq "COPYING")
+            {
+              $message .= " using GNU General Public License v3 file";
+              $trailer2 = "\n    Consider adding the COPYING file"
+                        . " to the version control system"
+                        . "\n    for your code, to avoid questions"
+                        . " about which license your project uses";
+            }
+
+          # Windows Perl will hang if we try to delete a
+          # file that doesn't exist.
+          unlink ($fullfile) if -f $fullfile;
+          if ($symlink_exists && ! $copy_missing)
+            {
+              if (! symlink ("$libdir/$file", $fullfile)
+                  || ! -e $fullfile)
+                {
+                  $suppress = 0;
+                  $trailer = "; error while making link: $!";
+                }
+            }
+          elsif (system ('cp', "$libdir/$file", $fullfile))
+            {
+              $suppress = 0;
+              $trailer = "\n    error while copying";
+            }
+          set_dir_cache_file ($dir, $file);
+        }
+    }
+  else
+    {
+      $trailer = "\n  'automake --add-missing' can install '$file'"
+        if -f "$libdir/$file";
+    }
+
+  # If --force-missing was specified, and we have
+  # actually found the file, then do nothing.
+  return
+    if $found_it && $force_missing;
+
+  # If we couldn't install the file, but it is a target in
+  # the Makefile, don't print anything.  This allows files
+  # like README, AUTHORS, or THANKS to be generated.
+  return
+    if !$suppress && rule $file;
+
+  msg ($suppress ? 'note' : 'error', $where, "$message$trailer$trailer2");
+}
+
+
+# require_file_internal ($WHERE, $MYSTRICT, $DIRECTORY, $QUEUE, @FILES)
+# ---------------------------------------------------------------------
+# Verify that the file must exist in $DIRECTORY, or install it.
+# $MYSTRICT is the strictness level at which this file becomes required.
+# Worker threads may queue up the action to be serialized by the master,
+# if $QUEUE is true
+sub require_file_internal
+{
+  my ($where, $mystrict, $dir, $queue, @files) = @_;
+
+  return
+    unless $strictness >= $mystrict;
+
+  foreach my $file (@files)
+    {
+      push_required_file ($dir, $file, "$dir/$file");
+      if ($queue)
+        {
+          queue_required_file_check_or_copy ($required_conf_file_queue,
+                                             QUEUE_CONF_FILE, $relative_dir,
+                                             $where, $mystrict, @files);
+        }
+      else
+        {
+          required_file_check_or_copy ($where, $dir, $file);
+        }
+    }
+}
+
+# require_file ($WHERE, $MYSTRICT, @FILES)
+# ----------------------------------------
+sub require_file
+{
+    my ($where, $mystrict, @files) = @_;
+    require_file_internal ($where, $mystrict, $relative_dir, 0, @files);
+}
+
+# require_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ----------------------------------------------------------
+sub require_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+}
+
+# require_libsource_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+# Require an AC_LIBSOURCEd file.  If AC_CONFIG_LIBOBJ_DIR was called, it
+# must be in that directory.  Otherwise expect it in the current directory.
+sub require_libsource_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    $macro = rvar ($macro) unless ref $macro;
+    if ($config_libobj_dir)
+      {
+	require_file_internal ($macro->rdef ($cond)->location, $mystrict,
+			       $config_libobj_dir, 0, @files);
+      }
+    else
+      {
+	require_file ($macro->rdef ($cond)->location, $mystrict, @files);
+      }
+}
+
+# queue_required_file_check_or_copy ($QUEUE, $KEY, $DIR, $WHERE,
+#                                    $MYSTRICT, @FILES)
+# --------------------------------------------------------------
+sub queue_required_file_check_or_copy
+{
+    my ($queue, $key, $dir, $where, $mystrict, @files) = @_;
+    my @serial_loc;
+    if (ref $where)
+      {
+        @serial_loc = (QUEUE_LOCATION, $where->serialize ());
+      }
+    else
+      {
+        @serial_loc = (QUEUE_STRING, $where);
+      }
+    $queue->enqueue ($key, $dir, @serial_loc, $mystrict, 0 + @files, @files);
+}
+
+# require_queued_file_check_or_copy ($QUEUE)
+# ------------------------------------------
+sub require_queued_file_check_or_copy
+{
+    my ($queue) = @_;
+    my $where;
+    my $dir = $queue->dequeue ();
+    my $loc_key = $queue->dequeue ();
+    if ($loc_key eq QUEUE_LOCATION)
+      {
+	$where = Automake::Location::deserialize ($queue);
+      }
+    elsif ($loc_key eq QUEUE_STRING)
+      {
+	$where = $queue->dequeue ();
+      }
+    else
+      {
+	prog_error "unexpected key $loc_key";
+      }
+    my $mystrict = $queue->dequeue ();
+    my $nfiles = $queue->dequeue ();
+    my @files;
+    push @files, $queue->dequeue ()
+      foreach (1 .. $nfiles);
+    return
+      unless $strictness >= $mystrict;
+    foreach my $file (@files)
+      {
+        required_file_check_or_copy ($where, $config_aux_dir, $file);
+      }
+}
+
+# require_conf_file ($WHERE, $MYSTRICT, @FILES)
+# ---------------------------------------------
+# Looks in configuration path, as specified by AC_CONFIG_AUX_DIR.
+sub require_conf_file
+{
+    my ($where, $mystrict, @files) = @_;
+    my $queue = defined $required_conf_file_queue ? 1 : 0;
+    require_file_internal ($where, $mystrict, $config_aux_dir,
+                           $queue, @files);
+}
+
+
+# require_conf_file_with_macro ($COND, $MACRO, $MYSTRICT, @FILES)
+# ---------------------------------------------------------------
+sub require_conf_file_with_macro
+{
+    my ($cond, $macro, $mystrict, @files) = @_;
+    require_conf_file (rvar ($macro)->rdef ($cond)->location,
+		       $mystrict, @files);
+}
+
+################################################################
+
+# require_build_directory ($DIRECTORY)
+# ------------------------------------
+# Emit rules to create $DIRECTORY if needed, and return
+# the file that any target requiring this directory should be made
+# dependent upon.
+# We don't want to emit the rule twice, and want to reuse it
+# for directories with equivalent names (e.g., 'foo/bar' and './foo//bar').
+sub require_build_directory
+{
+  my $directory = shift;
+
+  return $directory_map{$directory} if exists $directory_map{$directory};
+
+  my $cdir = File::Spec->canonpath ($directory);
+
+  if (exists $directory_map{$cdir})
+    {
+      my $stamp = $directory_map{$cdir};
+      $directory_map{$directory} = $stamp;
+      return $stamp;
+    }
+
+  my $dirstamp = "$cdir/\$(am__dirstamp)";
+
+  $directory_map{$directory} = $dirstamp;
+  $directory_map{$cdir} = $dirstamp;
+
+  # Set a variable for the dirstamp basename.
+  define_pretty_variable ('am__dirstamp', TRUE, INTERNAL,
+			  '$(am__leading_dot)dirstamp');
+
+  # Directory must be removed by 'make distclean'.
+  $clean_files{$dirstamp} = DIST_CLEAN;
+
+  $output_rules .= ("$dirstamp:\n"
+		    . "\t\@\$(MKDIR_P) $directory\n"
+		    . "\t\@: > $dirstamp\n");
+
+  return $dirstamp;
+}
+
+# require_build_directory_maybe ($FILE)
+# -------------------------------------
+# If $FILE lies in a subdirectory, emit a rule to create this
+# directory and return the file that $FILE should be made
+# dependent upon.  Otherwise, just return the empty string.
+sub require_build_directory_maybe
+{
+    my $file = shift;
+    my $directory = dirname ($file);
+
+    if ($directory ne '.')
+    {
+	return require_build_directory ($directory);
+    }
+    else
+    {
+	return '';
+    }
+}
+
+################################################################
+
+# Push a list of files onto '@dist_common'.
+sub push_dist_common
+{
+  prog_error "push_dist_common run after handle_dist"
+    if $handle_dist_run;
+  Automake::Variable::define ('DIST_COMMON', VAR_AUTOMAKE, '+', TRUE, "@_",
+			      '', INTERNAL, VAR_PRETTY);
+}
+
+
+################################################################
+
+# generate_makefile ($MAKEFILE_AM, $MAKEFILE_IN)
+# ----------------------------------------------
+# Generate a Makefile.in given the name of the corresponding Makefile and
+# the name of the file output by config.status.
+sub generate_makefile
+{
+  my ($makefile_am, $makefile_in) = @_;
+
+  # Reset all the Makefile.am related variables.
+  initialize_per_input;
+
+  # AUTOMAKE_OPTIONS can contains -W flags to disable or enable
+  # warnings for this file.  So hold any warning issued before
+  # we have processed AUTOMAKE_OPTIONS.
+  buffer_messages ('warning');
+
+  # $OUTPUT is encoded.  If it contains a ":" then the first element
+  # is the real output file, and all remaining elements are input
+  # files.  We don't scan or otherwise deal with these input files,
+  # other than to mark them as dependencies.  See the subroutine
+  # 'scan_autoconf_files' for details.
+  my ($makefile, @inputs) = split (/:/, $output_files{$makefile_in});
+
+  $relative_dir = dirname ($makefile);
+
+  read_main_am_file ($makefile_am, $makefile_in);
+  if (not handle_options)
+    {
+      # Process buffered warnings.
+      flush_messages;
+      # Fatal error.  Just return, so we can continue with next file.
+      return;
+    }
+  # Process buffered warnings.
+  flush_messages;
+
+  # There are a few install-related variables that you should not define.
+  foreach my $var ('PRE_INSTALL', 'POST_INSTALL', 'NORMAL_INSTALL')
+    {
+      my $v = var $var;
+      if ($v)
+	{
+	  my $def = $v->def (TRUE);
+	  prog_error "$var not defined in condition TRUE"
+	    unless $def;
+	  reject_var $var, "'$var' should not be defined"
+	    if $def->owner != VAR_AUTOMAKE;
+	}
+    }
+
+  # Catch some obsolete variables.
+  msg_var ('obsolete', 'INCLUDES',
+	   "'INCLUDES' is the old name for 'AM_CPPFLAGS' (or '*_CPPFLAGS')")
+    if var ('INCLUDES');
+
+  # Must do this after reading .am file.
+  define_variable ('subdir', $relative_dir, INTERNAL);
+
+  # If DIST_SUBDIRS is defined, make sure SUBDIRS is, so that
+  # recursive rules are enabled.
+  define_pretty_variable ('SUBDIRS', TRUE, INTERNAL, '')
+    if var 'DIST_SUBDIRS' && ! var 'SUBDIRS';
+
+  # Check first, because we might modify some state.
+  check_gnu_standards;
+  check_gnits_standards;
+
+  handle_configure ($makefile_am, $makefile_in, $makefile, @inputs);
+  handle_gettext;
+  handle_libraries;
+  handle_ltlibraries;
+  handle_programs;
+  handle_scripts;
+
+  handle_silent;
+
+  # These must be run after all the sources are scanned.  They use
+  # variables defined by handle_libraries(), handle_ltlibraries(),
+  # or handle_programs().
+  handle_compile;
+  handle_languages;
+  handle_libtool;
+
+  # Variables used by distdir.am and tags.am.
+  define_pretty_variable ('SOURCES', TRUE, INTERNAL, @sources);
+  if (! option 'no-dist')
+    {
+      define_pretty_variable ('DIST_SOURCES', TRUE, INTERNAL, @dist_sources);
+    }
+
+  handle_texinfo;
+  handle_emacs_lisp;
+  handle_python;
+  handle_java;
+  handle_man_pages;
+  handle_data;
+  handle_headers;
+  handle_subdirs;
+  handle_user_recursion;
+  handle_tags;
+  handle_minor_options;
+  # Must come after handle_programs so that %known_programs is up-to-date.
+  handle_tests;
+
+  # This must come after most other rules.
+  handle_dist;
+
+  handle_footer;
+  do_check_merge_target;
+  handle_all ($makefile);
+
+  # FIXME: Gross!
+  if (var ('lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-libLTLIBRARIES\n\n";
+    }
+  if (var ('nobase_lib_LTLIBRARIES') && var ('bin_PROGRAMS'))
+    {
+      $output_rules .= "install-binPROGRAMS: install-nobase_libLTLIBRARIES\n\n";
+    }
+
+  handle_install;
+  handle_clean ($makefile);
+  handle_factored_dependencies;
+
+  # Comes last, because all the above procedures may have
+  # defined or overridden variables.
+  $output_vars .= output_variables;
+
+  check_typos;
+
+  if ($exit_code != 0)
+    {
+      verb "not writing $makefile_in because of earlier errors";
+      return;
+    }
+
+  my $am_relative_dir = dirname ($makefile_am);
+  mkdir ($am_relative_dir, 0755) if ! -d $am_relative_dir;
+
+  # We make sure that 'all:' is the first target.
+  my $output =
+    "$output_vars$output_all$output_header$output_rules$output_trailer";
+
+  # Decide whether we must update the output file or not.
+  # We have to update in the following situations.
+  #  * $force_generation is set.
+  #  * any of the output dependencies is younger than the output
+  #  * the contents of the output is different (this can happen
+  #    if the project has been populated with a file listed in
+  #    @common_files since the last run).
+  # Output's dependencies are split in two sets:
+  #  * dependencies which are also configure dependencies
+  #    These do not change between each Makefile.am
+  #  * other dependencies, specific to the Makefile.am being processed
+  #    (such as the Makefile.am itself, or any Makefile fragment
+  #    it includes).
+  my $timestamp = mtime $makefile_in;
+  if (! $force_generation
+      && $configure_deps_greatest_timestamp < $timestamp
+      && $output_deps_greatest_timestamp < $timestamp
+      && $output eq contents ($makefile_in))
+    {
+      verb "$makefile_in unchanged";
+      # No need to update.
+      return;
+    }
+
+  if (-e $makefile_in)
+    {
+      unlink ($makefile_in)
+	or fatal "cannot remove $makefile_in: $!";
+    }
+
+  my $gm_file = new Automake::XFile "> $makefile_in";
+  verb "creating $makefile_in";
+  print $gm_file $output;
+}
+
+
+################################################################
+
+
+# Helper function for usage().
+sub print_autodist_files
+{
+  # NOTE: we need to call our 'uniq' function with the leading '&'
+  # here, because otherwise perl complains that "Unquoted string
+  # 'uniq' may clash with future reserved word".
+  my @lcomm = sort (&uniq (@_));
+
+  my @four;
+  format USAGE_FORMAT =
+  @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<   @<<<<<<<<<<<<<<<<
+  $four[0],           $four[1],           $four[2],           $four[3]
+.
+  local $~ = "USAGE_FORMAT";
+
+  my $cols = 4;
+  my $rows = int(@lcomm / $cols);
+  my $rest = @lcomm % $cols;
+
+  if ($rest)
+    {
+      $rows++;
+    }
+  else
+    {
+      $rest = $cols;
+    }
+
+  for (my $y = 0; $y < $rows; $y++)
+    {
+      @four = ("", "", "", "");
+      for (my $x = 0; $x < $cols; $x++)
+        {
+          last if $y + 1 == $rows && $x == $rest;
+
+          my $idx = (($x > $rest)
+               ?  ($rows * $rest + ($rows - 1) * ($x - $rest))
+               : ($rows * $x));
+
+          $idx += $y;
+          $four[$x] = $lcomm[$idx];
+        }
+      write;
+    }
+}
+
+
+sub usage ()
+{
+    print "Usage: $0 [OPTION]... [Makefile]...
+
+Generate Makefile.in for configure from Makefile.am.
+
+Operation modes:
+      --help               print this help, then exit
+      --version            print version number, then exit
+  -v, --verbose            verbosely list files processed
+      --no-force           only update Makefile.in's that are out of date
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY
+
+Dependency tracking:
+  -i, --ignore-deps      disable dependency tracking code
+      --include-deps     enable dependency tracking code
+
+Flavors:
+      --foreign          set strictness to foreign
+      --gnits            set strictness to gnits
+      --gnu              set strictness to gnu
+
+Library files:
+  -a, --add-missing      add missing standard files to package
+      --libdir=DIR       set directory storing library files
+      --print-libdir     print directory storing library files
+  -c, --copy             with -a, copy missing files (default is symlink)
+  -f, --force-missing    force update of standard files
+
+";
+    Automake::ChannelDefs::usage;
+
+    print "\nFiles automatically distributed if found " .
+          "(always):\n";
+    print_autodist_files @common_files;
+    print "\nFiles automatically distributed if found " .
+          "(under certain conditions):\n";
+    print_autodist_files @common_sometimes;
+
+    print '
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <http://www.gnu.org/software/automake/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+';
+
+    # --help always returns 0 per GNU standards.
+    exit 0;
+}
+
+
+sub version ()
+{
+  print <<EOF;
+automake (GNU $PACKAGE) $VERSION
+Copyright (C) $RELEASE_YEAR Free Software Foundation, Inc.
+License GPLv2+: GNU GPL version 2 or later <http://gnu.org/licenses/gpl-2.0.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by Tom Tromey <tromey\@redhat.com>
+       and Alexandre Duret-Lutz <adl\@gnu.org>.
+EOF
+  # --version always returns 0 per GNU standards.
+  exit 0;
+}
+
+################################################################
+
+# Parse command line.
+sub parse_arguments ()
+{
+  my $strict = 'gnu';
+  my $ignore_deps = 0;
+  my @warnings = ();
+
+  my %cli_options =
+    (
+     'version' => \&version,
+     'help'    => \&usage,
+     'libdir=s'	=> \$libdir,
+     'print-libdir'     => sub { print "$libdir\n"; exit 0; },
+     'gnu'		=> sub { $strict = 'gnu'; },
+     'gnits'		=> sub { $strict = 'gnits'; },
+     'foreign'		=> sub { $strict = 'foreign'; },
+     'include-deps'	=> sub { $ignore_deps = 0; },
+     'i|ignore-deps'	=> sub { $ignore_deps = 1; },
+     'no-force'	=> sub { $force_generation = 0; },
+     'f|force-missing'  => \$force_missing,
+     'a|add-missing'	=> \$add_missing,
+     'c|copy'		=> \$copy_missing,
+     'v|verbose'	=> sub { setup_channel 'verb', silent => 0; },
+     'W|warnings=s'     => \@warnings,
+     );
+
+  use Automake::Getopt ();
+  Automake::Getopt::parse_options %cli_options;
+
+  set_strictness ($strict);
+  my $cli_where = new Automake::Location;
+  set_global_option ('no-dependencies', $cli_where) if $ignore_deps;
+  for my $warning (@warnings)
+    {
+      parse_warnings ('-W', $warning);
+    }
+
+  return unless @ARGV;
+
+  my $errspec = 0;
+  foreach my $arg (@ARGV)
+    {
+      fatal ("empty argument\nTry '$0 --help' for more information")
+	if ($arg eq '');
+
+      # Handle $local:$input syntax.
+      my ($local, @rest) = split (/:/, $arg);
+      @rest = ("$local.in",) unless @rest;
+      my $input = locate_am @rest;
+      if ($input)
+	{
+	  push @input_files, $input;
+	  $output_files{$input} = join (':', ($local, @rest));
+	}
+      else
+	{
+	  error "no Automake input file found for '$arg'";
+	  $errspec = 1;
+	}
+    }
+  fatal "no input file found among supplied arguments"
+    if $errspec && ! @input_files;
+}
+
+
+# handle_makefile ($MAKEFILE)
+# ---------------------------
+sub handle_makefile
+{
+  my ($file) =  @_;
+  ($am_file = $file) =~ s/\.in$//;
+  if (! -f ($am_file . '.am'))
+    {
+      error "'$am_file.am' does not exist";
+    }
+  else
+    {
+      # Any warning setting now local to this Makefile.am.
+      dup_channel_setup;
+
+      generate_makefile ($am_file . '.am', $file);
+
+      # Back out any warning setting.
+      drop_channel_setup;
+    }
+}
+
+# Deal with all makefiles, without threads.
+sub handle_makefiles_serial ()
+{
+  foreach my $file (@input_files)
+    {
+      handle_makefile ($file);
+    }
+}
+
+# Logic for deciding how many worker threads to use.
+sub get_number_of_threads ()
+{
+  my $nthreads = $ENV{'AUTOMAKE_JOBS'} || 0;
+
+  $nthreads = 0
+    unless $nthreads =~ /^[0-9]+$/;
+
+  # It doesn't make sense to use more threads than makefiles,
+  my $max_threads = @input_files;
+
+  if ($nthreads > $max_threads)
+    {
+      $nthreads = $max_threads;
+    }
+  return $nthreads;
+}
+
+# handle_makefiles_threaded ($NTHREADS)
+# -------------------------------------
+# Deal with all makefiles, using threads.  The general strategy is to
+# spawn NTHREADS worker threads, dispatch makefiles to them, and let the
+# worker threads push back everything that needs serialization:
+# * warning and (normal) error messages, for stable stderr output
+#   order and content (avoiding duplicates, for example),
+# * races when installing aux files (and respective messages),
+# * races when collecting aux files for distribution.
+#
+# The latter requires that the makefile that deals with the aux dir
+# files be handled last, done by the master thread.
+sub handle_makefiles_threaded
+{
+  my ($nthreads) = @_;
+
+  # The file queue distributes all makefiles, the message queues
+  # collect all serializations needed for respective files.
+  my $file_queue = Thread::Queue->new;
+  my %msg_queues;
+  foreach my $file (@input_files)
+    {
+      $msg_queues{$file} = Thread::Queue->new;
+    }
+
+  verb "spawning $nthreads worker threads";
+  my @threads = (1 .. $nthreads);
+  foreach my $t (@threads)
+    {
+      $t = threads->new (sub
+	{
+	  while (my $file = $file_queue->dequeue)
+	    {
+	      verb "handling $file";
+	      my $queue = $msg_queues{$file};
+	      setup_channel_queue ($queue, QUEUE_MESSAGE);
+	      $required_conf_file_queue = $queue;
+	      handle_makefile ($file);
+	      $queue->enqueue (undef);
+	      setup_channel_queue (undef, undef);
+	      $required_conf_file_queue = undef;
+	    }
+	  return $exit_code;
+	});
+    }
+
+  # Queue all makefiles.
+  verb "queuing " . @input_files . " input files";
+  $file_queue->enqueue (@input_files, (undef) x @threads);
+
+  # Collect and process serializations.
+  foreach my $file (@input_files)
+    {
+      verb "dequeuing messages for " . $file;
+      reset_local_duplicates ();
+      my $queue = $msg_queues{$file};
+      while (my $key = $queue->dequeue)
+	{
+	  if ($key eq QUEUE_MESSAGE)
+	    {
+	      pop_channel_queue ($queue);
+	    }
+	  elsif ($key eq QUEUE_CONF_FILE)
+	    {
+	      require_queued_file_check_or_copy ($queue);
+	    }
+	  else
+	    {
+	      prog_error "unexpected key $key";
+	    }
+	}
+    }
+
+  foreach my $t (@threads)
+    {
+      my @exit_thread = $t->join;
+      $exit_code = $exit_thread[0]
+	if ($exit_thread[0] > $exit_code);
+    }
+}
+
+################################################################
+
+# Parse the WARNINGS environment variable.
+parse_WARNINGS;
+
+# Parse command line.
+parse_arguments;
+
+$configure_ac = require_configure_ac;
+
+# Do configure.ac scan only once.
+scan_autoconf_files;
+
+if (! @input_files)
+  {
+    my $msg = '';
+    $msg = "\nDid you forget AC_CONFIG_FILES([Makefile]) in $configure_ac?"
+      if -f 'Makefile.am';
+    fatal ("no 'Makefile.am' found for any configure output$msg");
+  }
+
+my $nthreads = get_number_of_threads ();
+
+if ($perl_threads && $nthreads >= 1)
+  {
+    handle_makefiles_threaded ($nthreads);
+  }
+else
+  {
+    handle_makefiles_serial ();
+  }
+
+exit $exit_code;
+
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoreconf b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoreconf
new file mode 100755
index 0000000..1ef55af
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoreconf
@@ -0,0 +1,718 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoreconf.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# autoreconf - install the GNU Build System in a directory tree
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David J. MacKenzie.
+# Extended and rewritten in Perl by Akim Demaille.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+# Do not use Cwd::chdir, since it might hang.
+use Cwd 'cwd';
+use strict;
+
+## ----------- ##
+## Variables.  ##
+## ----------- ##
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [DIRECTORY]...
+
+Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
+(formerly `gettextize'), and `libtoolize' where appropriate)
+repeatedly to remake the GNU Build System files in specified
+DIRECTORIES and their subdirectories (defaulting to `.').
+
+By default, it only remakes those files that are older than their
+sources.  If you install new versions of the GNU Build System,
+you can make `autoreconf' remake all of the files by giving it the
+`--force' option.
+
+Operation modes:
+  -h, --help               print this help, then exit
+  -V, --version            print version number, then exit
+  -v, --verbose            verbosely report processing
+  -d, --debug              don't remove temporary files
+  -f, --force              consider all files obsolete
+  -i, --install            copy missing auxiliary files
+      --no-recursive       don't rebuild sub-packages
+  -s, --symlink            with -i, install symbolic links instead of copies
+  -m, --make               when applicable, re-run ./configure && make
+  -W, --warnings=CATEGORY  report the warnings falling in CATEGORY [syntax]
+
+" . Autom4te::ChannelDefs::usage . "
+
+The environment variable \`WARNINGS\' is honored.  Some subtools might
+support other warning types, using \`all' is encouraged.
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+The environment variables AUTOM4TE, AUTOCONF, AUTOHEADER, AUTOMAKE,
+ACLOCAL, AUTOPOINT, LIBTOOLIZE, M4, and MAKE are honored.
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoreconf (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+# Lib files.
+my $autoconf   = $ENV{'AUTOCONF'}   || '/x86_64-unknown-linux-gnu/bin/autoconf';
+my $autoheader = $ENV{'AUTOHEADER'} || '/x86_64-unknown-linux-gnu/bin/autoheader';
+my $autom4te   = $ENV{'AUTOM4TE'}   || '/x86_64-unknown-linux-gnu/bin/autom4te';
+my $automake   = $ENV{'AUTOMAKE'}   || 'automake';
+my $aclocal    = $ENV{'ACLOCAL'}    || 'aclocal';
+my $libtoolize = $ENV{'LIBTOOLIZE'} || 'libtoolize';
+my $autopoint  = $ENV{'AUTOPOINT'}  || 'autopoint';
+my $make       = $ENV{'MAKE'}       || 'make';
+
+# --install -- as --add-missing in other tools.
+my $install = 0;
+# symlink -- when --install, use symlinks instead.
+my $symlink = 0;
+# Does aclocal support --force?
+my $aclocal_supports_force = 0;
+# Does aclocal support -Wfoo?
+my $aclocal_supports_warnings = 0;
+# Does automake support --force-missing?
+my $automake_supports_force_missing = 0;
+# Does automake support -Wfoo?
+my $automake_supports_warnings = 0;
+
+my @prepend_include;
+my @include;
+
+# List of command line warning requests.
+my @warning;
+
+# Rerun `./configure && make'?
+my $run_make = 0;
+
+# Recurse into subpackages
+my $recursive = 1;
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ("W|warnings=s"         => \@warning,
+	  'I|include=s'          => \@include,
+	  'B|prepend-include=s'	 => \@prepend_include,
+	  'i|install'            => \$install,
+	  's|symlink'            => \$symlink,
+	  'm|make'               => \$run_make,
+	  'recursive!'           => \$recursive);
+
+  # Split the warnings as a list of elements instead of a list of
+  # lists.
+  @warning = map { split /,/ } @warning;
+  parse_WARNINGS;
+  parse_warnings '--warnings', @warning;
+
+  # Even if the user specified a configure.ac, trim to get the
+  # directory, and look for configure.ac again.  Because (i) the code
+  # is simpler, and (ii) we are still able to diagnose simultaneous
+  # presence of configure.ac and configure.in.
+  @ARGV = map { /configure\.(ac|in)$/ ? dirname ($_) : $_ } @ARGV;
+  push @ARGV, '.' unless @ARGV;
+
+  if ($verbose && $debug)
+    {
+      for my $prog ($autoconf, $autoheader,
+		    $automake, $aclocal,
+		    $autopoint,
+		    $libtoolize)
+	{
+	  xsystem ("$prog --version | sed 1q >&2");
+	  print STDERR "\n";
+	}
+    }
+
+  my $aclocal_help = `$aclocal --help 2>/dev/null`;
+  my $automake_help = `$automake --help 2>/dev/null`;
+  $aclocal_supports_force = $aclocal_help =~ /--force/;
+  $aclocal_supports_warnings = $aclocal_help =~ /--warnings/;
+  $automake_supports_force_missing = $automake_help =~ /--force-missing/;
+  $automake_supports_warnings = $automake_help =~ /--warnings/;
+
+  # Dispatch autoreconf's option to the tools.
+  # --include;
+  $aclocal    .= join (' -I ', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoconf   .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+  $autoheader .= join (' --include=', '', map { shell_quote ($_) } @include);
+  $autoheader .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+  # --install and --symlink;
+  if ($install)
+    {
+      $automake   .= ' --add-missing';
+      $automake   .= ' --copy' unless $symlink;
+      $libtoolize .= ' --copy' unless $symlink;
+    }
+  # --force;
+  if ($force)
+    {
+      $aclocal    .= ' --force'
+	if $aclocal_supports_force;
+      $autoconf   .= ' --force';
+      $autoheader .= ' --force';
+      $automake   .= ' --force-missing'
+	if $automake_supports_force_missing;
+      $autopoint  .= ' --force';
+      $libtoolize .= ' --force';
+    }
+  else
+    {
+      # The implementation of --no-force is bogus in all implementations
+      # of Automake up to 1.8, so we avoid it in these cases.  (Automake
+      # 1.8 is the first version where aclocal supports force, hence
+      # the condition.)
+      $automake .= ' --no-force'
+	if $aclocal_supports_force;
+    }
+  # --verbose --verbose or --debug;
+  if ($verbose > 1 || $debug)
+    {
+      $autoconf   .= ' --verbose';
+      $autoheader .= ' --verbose';
+      $automake   .= ' --verbose';
+      $aclocal    .= ' --verbose';
+    }
+  if ($debug)
+    {
+      $autoconf   .= ' --debug';
+      $autoheader .= ' --debug';
+      $libtoolize .= ' --debug';
+    }
+  # --warnings;
+  if (@warning)
+    {
+      my $warn = ' --warnings=' . join (',', @warning);
+      $autoconf   .= $warn;
+      $autoheader .= $warn;
+      $automake   .= $warn
+	if $automake_supports_warnings;
+      $aclocal    .= $warn
+        if $aclocal_supports_warnings;
+    }
+}
+
+
+# &run_aclocal ($ACLOCAL, $FLAGS)
+# -------------------------------
+# Update aclocal.m4 as lazily as possible, as aclocal pre-1.8 always
+# overwrites aclocal.m4, hence triggers autoconf, autoheader, automake
+# etc. uselessly.  aclocal 1.8+ does not need this.
+sub run_aclocal ($$)
+{
+  my ($aclocal, $flags) = @_;
+
+  # aclocal 1.8+ does all this for free.  It can be recognized by its
+  # --force support.
+  if ($aclocal_supports_force)
+    {
+      xsystem ("$aclocal $flags");
+    }
+  else
+    {
+      xsystem ("$aclocal $flags --output=aclocal.m4t");
+      # aclocal may produce no output.
+      if (-f 'aclocal.m4t')
+	{
+	  update_file ('aclocal.m4t', 'aclocal.m4');
+	  # Make sure that the local m4 files are older than
+	  # aclocal.m4.
+	  #
+	  # Why is not always the case?  Because we already run
+	  # aclocal at first (before tracing), which, for instance,
+	  # can find Gettext's macros in .../share/aclocal, so we may
+	  # have had the right aclocal.m4 already.  Then autopoint is
+	  # run, and installs locally these M4 files.  Then
+	  # autoreconf, via update_file, sees it is the _same_
+	  # aclocal.m4, and doesn't change its timestamp.  But later,
+	  # Automake's Makefile expresses that aclocal.m4 depends on
+	  # these local files, which are newer, so it triggers aclocal
+	  # again.
+	  #
+	  # To make sure aclocal.m4 is no older, we change the
+	  # modification times of the local M4 files to be not newer
+	  # than it.
+	  #
+	  # First, where are the local files?
+	  my $aclocal_local_dir = '.';
+	  if ($flags =~ /-I\s+(\S+)/)
+	    {
+	      $aclocal_local_dir = $1;
+	    }
+	  # All the local files newer than aclocal.m4 are to be
+	  # made not newer than it.
+	  my $aclocal_m4_mtime = mtime ('aclocal.m4');
+	  for my $file (glob ("$aclocal_local_dir/*.m4"), 'acinclude.m4')
+	    {
+	      if ($aclocal_m4_mtime < mtime ($file))
+		{
+		  debug "aging $file to be not newer than aclocal.m4";
+		  utime $aclocal_m4_mtime, $aclocal_m4_mtime, $file;
+		}
+	    }
+	}
+    }
+}
+
+# &autoreconf_current_directory
+# -----------------------------
+sub autoreconf_current_directory ()
+{
+  my $configure_ac = find_configure_ac;
+
+  # ---------------------- #
+  # Is it using Autoconf?  #
+  # ---------------------- #
+
+  my $uses_autoconf;
+  my $uses_gettext;
+  if (-f $configure_ac)
+    {
+      my $configure_ac_file = new Autom4te::XFile "< $configure_ac";
+      while ($_ = $configure_ac_file->getline)
+	{
+	  s/#.*//;
+	  s/dnl.*//;
+	  $uses_autoconf = 1 if /AC_INIT/;
+	  # See below for why we look for gettext here.
+	  $uses_gettext = 1  if /^AM_GNU_GETTEXT_VERSION/;
+	}
+    }
+  if (!$uses_autoconf)
+    {
+      verb "$configure_ac: not using Autoconf";
+      return;
+    }
+
+
+  # ------------------- #
+  # Running autopoint.  #
+  # ------------------- #
+
+  # Gettext is a bit of a problem: its macros are not necessarily
+  # visible to aclocal, so if we start with a completely striped down
+  # package (think of a fresh CVS checkout), running `aclocal' first
+  # will fail: the Gettext macros are missing.
+  #
+  # Therefore, we can't use the traces to decide if we use Gettext or
+  # not.  I guess that once Gettext move to 2.5x we will be able to,
+  # but in the meanwhile forget it.
+  #
+  # We can only grep for AM_GNU_GETTEXT_VERSION in configure.ac.  You
+  # might think this approach is naive, and indeed it is, as it
+  # prevents one to embed AM_GNU_GETTEXT_VERSION in another *.m4, but
+  # anyway we don't limit the generality, since... that's what
+  # autopoint does.  Actually, it is even more restrictive, as it
+  # greps for `^AM_GNU_GETTEXT_VERSION('.  We did this above, while
+  # scanning configure.ac.
+  if (!$uses_gettext)
+    {
+      verb "$configure_ac: not using Gettext";
+    }
+  elsif (!$install)
+    {
+      verb "$configure_ac: not running autopoint: --install not given";
+    }
+  else
+    {
+      xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
+    }
+
+
+  # ----------------- #
+  # Running aclocal.  #
+  # ----------------- #
+
+  # Run it first: it might discover new macros to add, e.g.,
+  # AC_PROG_LIBTOOL, which we will trace later to see if Libtool is
+  # used.
+  #
+  # Always run it.  Tracking its sources for up-to-dateness is too
+  # complex and too error prone.  The best we can do is avoiding
+  # nuking the time stamp.
+  my $uses_aclocal = 1;
+
+  # Nevertheless, if aclocal.m4 exists and is not made by aclocal,
+  # don't run aclocal.
+
+  if (-f 'aclocal.m4')
+    {
+      my $aclocal_m4 = new Autom4te::XFile 'aclocal.m4';
+      $_ = $aclocal_m4->getline;
+      $uses_aclocal = 0
+	unless defined ($_) && /generated.*by aclocal/;
+    }
+
+  # If there are flags for aclocal in Makefile.am, use them.
+  my $aclocal_flags = '';
+  if ($uses_aclocal && -f 'Makefile.am')
+    {
+      my $makefile = new Autom4te::XFile 'Makefile.am';
+      while ($_ = $makefile->getline)
+	{
+	  if (/^ACLOCAL_[A-Z_]*FLAGS\s*=\s*(.*)/)
+	    {
+	      $aclocal_flags = $1;
+	      last;
+	    }
+	}
+    }
+
+  if (!$uses_aclocal)
+    {
+      verb "$configure_ac: not using aclocal";
+    }
+  else
+    {
+      # Some file systems have sub-second time stamps, and if so we may
+      # run into trouble later, after we rerun autoconf and set the
+      # time stamps of input files to be no greater than aclocal.m4,
+      # because the time-stamp-setting operation (utime) has a
+      # resolution of only 1 second.  Work around the problem by
+      # ensuring that there is at least a one-second window before the
+      # time stamp of aclocal.m4t in which no file time stamps can
+      # fall.
+      sleep 1;
+
+      run_aclocal ($aclocal, $aclocal_flags);
+    }
+
+  # We might have to rerun aclocal if Libtool (or others) imports new
+  # macros.
+  my $rerun_aclocal = 0;
+
+
+
+  # ------------------------------- #
+  # See what tools will be needed.  #
+  # ------------------------------- #
+
+  # Perform a single trace reading to avoid --force forcing a rerun
+  # between two --trace, that's useless.  If there is no AC_INIT, then
+  # we are not interested: it looks like a Cygnus thingy.
+  my $aux_dir;
+  my $uses_gettext_via_traces;
+  my $uses_libtool;
+  my $uses_libltdl;
+  my $uses_autoheader;
+  my $uses_automake;
+  my @subdir;
+  verb "$configure_ac: tracing";
+  my $traces = new Autom4te::XFile
+    ("$autoconf"
+     . join (' ',
+	     map { ' --trace=' . $_ . ':\$n::\${::}%' }
+	     # If you change this list, update the
+	     # `Autoreconf-preselections' section of autom4te.in.
+	     'AC_CONFIG_AUX_DIR',
+	     'AC_CONFIG_HEADERS',
+	     'AC_CONFIG_SUBDIRS',
+	     'AC_INIT',
+	     'AC_PROG_LIBTOOL',
+	     'LT_INIT',
+	     'LT_CONFIG_LTDL_DIR',
+	     'AM_GNU_GETTEXT',
+	     'AM_INIT_AUTOMAKE',
+	    )
+     . ' |');
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($macro, @args) = split (/::/);
+      $aux_dir = $args[0]           if $macro eq "AC_CONFIG_AUX_DIR";
+      $uses_autoconf = 1            if $macro eq "AC_INIT";
+      $uses_gettext_via_traces = 1  if $macro eq "AM_GNU_GETTEXT";
+      $uses_libtool = 1             if $macro eq "AC_PROG_LIBTOOL"
+                                       || $macro eq "LT_INIT";
+      $uses_libltdl = 1             if $macro eq "LT_CONFIG_LTDL_DIR";
+      $uses_autoheader = 1          if $macro eq "AC_CONFIG_HEADERS";
+      $uses_automake = 1            if $macro eq "AM_INIT_AUTOMAKE";
+      push @subdir, split (' ', $args[0])
+                                    if $macro eq "AC_CONFIG_SUBDIRS" && $recursive;
+    }
+
+  # The subdirs are *optional*, they may not exist.
+  foreach (@subdir)
+    {
+      if (-d)
+	{
+	  verb "$configure_ac: adding subdirectory $_ to autoreconf";
+	  autoreconf ($_);
+	}
+      else
+	{
+	  verb "$configure_ac: subdirectory $_ not present";
+	}
+    }
+
+  # Gettext consistency checks...
+  error "$configure_ac: AM_GNU_GETTEXT is used, but not AM_GNU_GETTEXT_VERSION"
+    if $uses_gettext_via_traces && ! $uses_gettext;
+  error "$configure_ac: AM_GNU_GETTEXT_VERSION is used, but not AM_GNU_GETTEXT"
+    if $uses_gettext && ! $uses_gettext_via_traces;
+
+
+  # ---------------------------- #
+  # Setting up the source tree.  #
+  # ---------------------------- #
+
+  # libtoolize, automake --add-missing etc. will drop files in the
+  # $AUX_DIR.  But these tools fail to install these files if the
+  # directory itself does not exist, which valid: just imagine a CVS
+  # repository with hand written code only (there is not even a need
+  # for a Makefile.am!).
+
+  if (defined $aux_dir && ! -d $aux_dir)
+    {
+      verb "$configure_ac: creating directory $aux_dir";
+      mkdir $aux_dir, 0755
+	or error "cannot create $aux_dir: $!";
+    }
+
+
+  # -------------------- #
+  # Running libtoolize.  #
+  # -------------------- #
+
+  if (!$uses_libtool)
+    {
+      verb "$configure_ac: not using Libtool";
+    }
+  elsif ($install)
+    {
+      if ($uses_libltdl)
+	{
+	  $libtoolize .= " --ltdl";
+	}
+      xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
+      $rerun_aclocal = 1;
+    }
+  else
+    {
+      verb "$configure_ac: not running libtoolize: --install not given";
+    }
+
+
+
+  # ------------------- #
+  # Rerunning aclocal.  #
+  # ------------------- #
+
+  # If we re-installed Libtool or Gettext, the macros might have changed.
+  # Automake also needs an up-to-date aclocal.m4.
+  if ($rerun_aclocal)
+    {
+      if (!$uses_aclocal)
+	{
+	  verb "$configure_ac: not using aclocal";
+	}
+      else
+	{
+	  run_aclocal ($aclocal, $aclocal_flags);
+	}
+    }
+
+
+  # ------------------ #
+  # Running autoconf.  #
+  # ------------------ #
+
+  # Don't try to be smarter than `autoconf', which does its own up to
+  # date checks.
+  #
+  # We prefer running autoconf before autoheader, because (i) the
+  # latter runs the former, and (ii) autoconf is stricter than
+  # autoheader.  So all in all, autoconf should give better error
+  # messages.
+  xsystem ($autoconf);
+
+
+  # -------------------- #
+  # Running autoheader.  #
+  # -------------------- #
+
+  # We now consider that if AC_CONFIG_HEADERS is used, then autoheader
+  # is used too.
+  #
+  # Just as for autoconf, up to date ness is performed by the tool
+  # itself.
+  #
+  # Run it before automake, since the latter checks the presence of
+  # config.h.in when it sees an AC_CONFIG_HEADERS.
+  if (!$uses_autoheader)
+    {
+      verb "$configure_ac: not using Autoheader";
+    }
+  else
+    {
+      xsystem ($autoheader);
+    }
+
+
+  # ------------------ #
+  # Running automake.  #
+  # ------------------ #
+
+  if (!$uses_automake)
+    {
+      verb "$configure_ac: not using Automake";
+    }
+  else
+    {
+      # We should always run automake, and let it decide whether it shall
+      # update the file or not.  In fact, the effect of `$force' is already
+      # included in `$automake' via `--no-force'.
+      xsystem ($automake);
+    }
+
+
+  # -------------- #
+  # Running make.  #
+  # -------------- #
+
+  if ($run_make)
+    {
+      if (!-f "config.status")
+	{
+	  verb "no config.status: cannot re-make";
+	}
+      else
+	{
+	  xsystem ("./config.status --recheck");
+	  xsystem ("./config.status");
+	  if (!-f "Makefile")
+	    {
+	      verb "no Makefile: cannot re-make";
+	    }
+	  else
+	    {
+	      xsystem ("$make");
+	    }
+	}
+    }
+}
+
+
+# &autoreconf ($DIRECTORY)
+# ------------------------
+# Reconf the $DIRECTORY.
+sub autoreconf ($)
+{
+  my ($directory) = @_;
+  my $cwd = cwd;
+
+  # The format for this message is not free: taken from Emacs, itself
+  # using GNU Make's format.
+  verb "Entering directory `$directory'";
+  chdir $directory
+    or error "cannot chdir to $directory: $!";
+
+  autoreconf_current_directory;
+
+  # The format is not free: taken from Emacs, itself using GNU Make's
+  # format.
+  verb "Leaving directory `$directory'";
+  chdir $cwd
+    or error "cannot chdir to $cwd: $!";
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+# When debugging, it is convenient that all the related temporary
+# files be at the same place.
+mktmpdir ('ar');
+$ENV{'TMPDIR'} = $tmp;
+parse_args;
+
+# Autoreconf all the given configure.ac.  Unless `--no-recursive' is passed,
+# AC_CONFIG_SUBDIRS will be traversed in &autoreconf_current_directory.
+$ENV{'AUTOM4TE'} = $autom4te;
+for my $directory (@ARGV)
+  {
+    require_configure_ac ($directory);
+    autoreconf ($directory);
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoscan b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoscan
new file mode 100755
index 0000000..0d9ecdd
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoscan
@@ -0,0 +1,679 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoscan.in; do not edit by hand.
+
+# autoscan - Create configure.scan (a preliminary configure.ac) for a package.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Configure_ac;
+use Autom4te::General;
+use Autom4te::FileUtils;
+use Autom4te::XFile;
+use File::Basename;
+use File::Find;
+use strict;
+
+use vars qw(@cfiles @makefiles @shfiles @subdirs %printed);
+
+# The kind of the words we are looking for.
+my @kinds = qw (function header identifier program
+		makevar librarie);
+
+# For each kind, the default macro.
+my %generic_macro =
+  (
+   'function'   => 'AC_CHECK_FUNCS',
+   'header'     => 'AC_CHECK_HEADERS',
+   'identifier' => 'AC_CHECK_TYPES',
+   'program'    => 'AC_CHECK_PROGS',
+   'library'    => 'AC_CHECK_LIB'
+  );
+
+my %kind_comment =
+  (
+   'function'   => 'Checks for library functions.',
+   'header'     => 'Checks for header files.',
+   'identifier' => 'Checks for typedefs, structures, and compiler characteristics.',
+   'program'    => 'Checks for programs.',
+  );
+
+# $USED{KIND}{ITEM} is the list of locations where the ITEM (of KIND) was used
+# in the user package.
+# For instance $USED{function}{alloca} is the list of `file:line' where
+# `alloca (...)' appears.
+my %used = ();
+
+# $MACRO{KIND}{ITEM} is the list of macros to use to test ITEM.
+# Initialized from lib/autoscan/*.  E.g., $MACRO{function}{alloca} contains
+# the singleton AC_FUNC_ALLOCA.  Some require several checks.
+my %macro = ();
+
+# $NEEDED_MACROS{MACRO} is an array of locations requiring MACRO.
+# E.g., $NEEDED_MACROS{AC_FUNC_ALLOC} the list of `file:line' containing
+# `alloca (...)'.
+my %needed_macros =
+  (
+   'AC_PREREQ' => [$me],
+  );
+
+my $configure_scan = 'configure.scan';
+my $log;
+
+# Autoconf and lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-unknown-linux-gnu/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+my @prepend_include;
+my @include = ('//share/autoconf');
+
+# $help
+# -----
+$help = "Usage: $0 [OPTION]... [SRCDIR]
+
+Examine source files in the directory tree rooted at SRCDIR, or the
+current directory if none is given.  Search the source files for
+common portability problems, check for incompleteness of
+`configure.ac', and create a file `$configure_scan' which is a
+preliminary `configure.ac' for that package.
+
+  -h, --help          print this help, then exit
+  -V, --version       print version number, then exit
+  -v, --verbose       verbosely report processing
+  -d, --debug         don't remove temporary files
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $version
+# --------
+$version = "autoscan (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+
+
+
+## ------------------------ ##
+## Command line interface.  ##
+## ------------------------ ##
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ('I|include=s' => \@include,
+	  'B|prepend-include=s' => \@prepend_include);
+
+  die "$me: too many arguments
+Try `$me --help' for more information.\n"
+    if @ARGV > 1;
+
+  my $srcdir = $ARGV[0] || ".";
+
+  verb "srcdir = $srcdir";
+  chdir $srcdir || error "cannot cd to $srcdir: $!";
+}
+
+
+# init_tables ()
+# --------------
+# Put values in the tables of what to do with each token.
+sub init_tables ()
+{
+  # The data file format supports only one line of macros per function.
+  # If more than that is required for a common portability problem,
+  # a new Autoconf macro should probably be written for that case,
+  # instead of duplicating the code in lots of configure.ac files.
+  my $file = find_file ("autoscan/autoscan.list",
+			reverse (@prepend_include), @include);
+  my $table = new Autom4te::XFile "< " . open_quote ($file);
+  my $tables_are_consistent = 1;
+
+  while ($_ = $table->getline)
+    {
+      # Ignore blank lines and comments.
+      next
+	if /^\s*$/ || /^\s*\#/;
+
+      # '<kind>: <word> <macro invocation>' or...
+      # '<kind>: <word> warn: <message>'.
+      if (/^(\S+):\s+(\S+)\s+(\S.*)$/)
+	{
+	  my ($kind, $word, $macro) = ($1, $2, $3);
+	  error "$file:$.: invalid kind: $_"
+	    unless grep { $_ eq $kind } @kinds;
+	  push @{$macro{$kind}{$word}}, $macro;
+	}
+      else
+	{
+	  error "$file:$.: invalid definition: $_";
+	}
+    }
+
+  if ($debug)
+    {
+      foreach my $kind (@kinds)
+	{
+	  foreach my $word (sort keys %{$macro{$kind}})
+	    {
+	      print "$kind: $word: @{$macro{$kind}{$word}}\n";
+	    }
+	}
+
+    }
+}
+
+
+# used ($KIND, $WORD, [$WHERE])
+# -----------------------------
+# $WORD is used as a $KIND.
+sub used ($$;$)
+{
+  my ($kind, $word, $where) = @_;
+  $where ||= "$File::Find::name:$.";
+  if (
+      # Check for all the libraries.  But `-links' is certainly a
+      # `find' argument, and `-le', a `test' argument.
+      ($kind eq 'library' && $word !~ /^(e|inks)$/)
+      # Other than libraries are to be checked only if listed in
+      # the Autoscan library files.
+      || defined $macro{$kind}{$word}
+     )
+    {
+      push (@{$used{$kind}{$word}}, $where);
+    }
+}
+
+
+
+## ----------------------- ##
+## Scanning source files.  ##
+## ----------------------- ##
+
+
+# scan_c_file ($FILE-NAME)
+# ------------------------
+sub scan_c_file ($)
+{
+  my ($file_name) = @_;
+  push @cfiles, $File::Find::name;
+
+  # Nonzero if in a multiline comment.
+  my $in_comment = 0;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      if ($in_comment && s,^.*?\*/,,)
+	{
+	  $in_comment = 0;
+	}
+      # The whole line is inside a commment.
+      next if $in_comment;
+      # All on one line.
+      s,/\*.*?\*/,,g;
+
+      # Starting on this line.
+      if (s,/\*.*$,,)
+	{
+	  $in_comment = 1;
+	}
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*//)
+	{
+	  if (/^include\s*<([^>]*)>/)
+	    {
+	      used ('header', $1);
+	    }
+	  if (s/^(if|ifdef|ifndef|elif)\s+//)
+	    {
+	      foreach my $word (split (/\W+/))
+		{
+		  used ('identifier', $word)
+		    unless $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+		}
+	    }
+	  # Ignore other preprocessor directives.
+	  next;
+	}
+
+      # Remove string and character constants.
+      s,\"[^\"]*\",,g;
+      s,\'[^\']*\',,g;
+
+      # Tokens in the code.
+      # Maybe we should ignore function definitions (in column 0)?
+      while (s/\b([a-zA-Z_]\w*)\s*\(/ /)
+	{
+	  used ('function', $1);
+	}
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('identifier', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_makefile($MAKEFILE-NAME)
+# -----------------------------
+sub scan_makefile ($)
+{
+  my ($file_name) = @_;
+  push @makefiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments.
+      s/#.*//;
+
+      # Variable assignments.
+      while (s/\b([a-zA-Z_]\w*)\s*=/ /)
+	{
+	  used ('makevar', $1);
+	}
+      # Be sure to catch a whole word.  For instance `lex$U.$(OBJEXT)'
+      # is a single token.  Otherwise we might believe `lex' is needed.
+      foreach my $word (split (/\s+/))
+	{
+	  # Libraries.
+	  if ($word =~ /^-l([a-zA-Z_]\w*)$/)
+	    {
+	      used ('library', $1);
+	    }
+	  # Tokens in the code.
+	  # We allow some additional characters, e.g., `+', since
+	  # autoscan/programs includes `c++'.
+	  if ($word =~ /^[a-zA-Z_][\w+]*$/)
+	    {
+	      used ('program', $word);
+	    }
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_sh_file($SHELL-SCRIPT-NAME)
+# --------------------------------
+sub scan_sh_file ($)
+{
+  my ($file_name) = @_;
+  push @shfiles, $File::Find::name;
+
+  my $file = new Autom4te::XFile "< " . open_quote ($file_name);
+
+  while ($_ = $file->getline)
+    {
+      # Strip out comments and variable references.
+      s/#.*//;
+      s/\${[^\}]*}//g;
+      s/@[^@]*@//g;
+
+      # Tokens in the code.
+      while (s/\b([a-zA-Z_]\w*)\b/ /)
+	{
+	  used ('program', $1);
+	}
+    }
+
+  $file->close;
+}
+
+
+# scan_file ()
+# ------------
+# Called by &find on each file.  $_ contains the current file name with
+# the current directory of the walk through.
+sub scan_file ()
+{
+  # Wanted only if there is no corresponding FILE.in.
+  return
+    if -f "$_.in";
+
+  # Save $_ as Find::File requires it to be preserved.
+  local $_ = $_;
+
+  # Strip a useless leading `./'.
+  $File::Find::name =~ s,^\./,,;
+
+  if ($_ ne '.' and -d $_ and
+      -f "$_/configure.in"  ||
+      -f "$_/configure.ac"  ||
+      -f "$_/configure.gnu" ||
+      -f "$_/configure")
+    {
+      $File::Find::prune = 1;
+      push @subdirs, $File::Find::name;
+    }
+  if (/\.[chlym](\.in)?$/)
+    {
+      used 'program', 'cc', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif (/\.(cc|cpp|cxx|CC|C|hh|hpp|hxx|HH|H|yy|ypp|ll|lpp)(\.in)?$/)
+    {
+      used 'program', 'c++', $File::Find::name;
+      scan_c_file ($_);
+    }
+  elsif ((/^((?:GNUm|M|m)akefile)(\.in)?$/ && ! -f "$1.am")
+	 || /^(?:GNUm|M|m)akefile(\.am)?$/)
+    {
+      scan_makefile ($_);
+    }
+  elsif (/\.sh(\.in)?$/)
+    {
+      scan_sh_file ($_);
+    }
+}
+
+
+# scan_files ()
+# -------------
+# Read through the files and collect lists of tokens in them
+# that might create nonportabilities.
+sub scan_files ()
+{
+  find (\&scan_file, '.');
+
+  if ($verbose)
+    {
+      print "cfiles: @cfiles\n";
+      print "makefiles: @makefiles\n";
+      print "shfiles: @shfiles\n";
+
+      foreach my $kind (@kinds)
+	{
+	  print "\n$kind:\n";
+	  foreach my $word (sort keys %{$used{$kind}})
+	    {
+	      print "$word: @{$used{$kind}{$word}}\n";
+	    }
+	}
+    }
+}
+
+
+## ----------------------- ##
+## Output configure.scan.  ##
+## ----------------------- ##
+
+
+# output_kind ($FILE, $KIND)
+# --------------------------
+sub output_kind ($$)
+{
+  my ($file, $kind) = @_;
+  # Lists of words to be checked with the generic macro.
+  my @have;
+
+  print $file "\n# $kind_comment{$kind}\n"
+    if exists $kind_comment{$kind};
+  foreach my $word (sort keys %{$used{$kind}})
+    {
+      # Output the needed macro invocations in $configure_scan if not
+      # already printed, and remember these macros are needed.
+      foreach my $macro (@{$macro{$kind}{$word}})
+	{
+	  if ($macro =~ /^warn:\s+(.*)/)
+	    {
+	      my $message = $1;
+	      foreach my $location (@{$used{$kind}{$word}})
+		{
+		  warn "$location: warning: $message\n";
+		}
+	    }
+	  elsif (exists $generic_macro{$kind}
+	      && $macro eq $generic_macro{$kind})
+	    {
+	      push (@have, $word);
+	      push (@{$needed_macros{"$generic_macro{$kind}([$word])"}},
+		    @{$used{$kind}{$word}});
+	    }
+	  else
+	    {
+	      if (! $printed{$macro})
+		{
+		  print $file "$macro\n";
+		  $printed{$macro} = 1;
+		}
+	      push (@{$needed_macros{$macro}},
+		    @{$used{$kind}{$word}});
+	    }
+	}
+    }
+  print $file "$generic_macro{$kind}([" . join(' ', sort(@have)) . "])\n"
+    if @have;
+}
+
+
+# output_libraries ($FILE)
+# ------------------------
+sub output_libraries ($)
+{
+  my ($file) = @_;
+
+  print $file "\n# Checks for libraries.\n";
+  foreach my $word (sort keys %{$used{'library'}})
+    {
+      print $file "# FIXME: Replace `main' with a function in `-l$word':\n";
+      print $file "AC_CHECK_LIB([$word], [main])\n";
+    }
+}
+
+
+# output ($CONFIGURE_SCAN)
+# ------------------------
+# Print a proto configure.ac.
+sub output ($)
+{
+  my $configure_scan = shift;
+  my %unique_makefiles;
+
+  my $file = new Autom4te::XFile "> " . open_quote ($configure_scan);
+
+  print $file
+    ("#                                               -*- Autoconf -*-\n" .
+     "# Process this file with autoconf to produce a configure script.\n" .
+     "\n" .
+     "AC_PREREQ([2.68])\n" .
+     "AC_INIT([FULL-PACKAGE-NAME], [VERSION], [BUG-REPORT-ADDRESS])\n");
+  if (defined $cfiles[0])
+    {
+      print $file "AC_CONFIG_SRCDIR([$cfiles[0]])\n";
+      print $file "AC_CONFIG_HEADERS([config.h])\n";
+    }
+
+  output_kind ($file, 'program');
+  output_kind ($file, 'makevar');
+  output_libraries ($file);
+  output_kind ($file, 'header');
+  output_kind ($file, 'identifier');
+  output_kind ($file, 'function');
+
+  print $file "\n";
+  if (@makefiles)
+    {
+      # Change DIR/Makefile.in to DIR/Makefile.
+      foreach my $m (@makefiles)
+	{
+	  $m =~ s/\.(?:in|am)$//;
+	  $unique_makefiles{$m}++;
+	}
+      print $file ("AC_CONFIG_FILES([",
+		   join ("\n                 ",
+			 sort keys %unique_makefiles), "])\n");
+    }
+  if (@subdirs)
+    {
+      print $file ("AC_CONFIG_SUBDIRS([",
+		   join ("\n                   ",
+			 sort @subdirs), "])\n");
+    }
+  print $file "AC_OUTPUT\n";
+
+  $file->close;
+}
+
+
+
+## --------------------------------------- ##
+## Checking the accuracy of configure.ac.  ##
+## --------------------------------------- ##
+
+
+# &check_configure_ac ($CONFIGURE_AC)
+# -----------------------------------
+# Use autoconf to check if all the suggested macros are included
+# in CONFIGURE_AC.
+sub check_configure_ac ($)
+{
+  my ($configure_ac) = @_;
+
+  # Find what needed macros are invoked in CONFIGURE_AC.
+  # I'd be very happy if someone could explain to me why sort (uniq ...)
+  # doesn't work properly: I need `uniq (sort ...)'.  --akim
+  my $trace_option =
+    join (' --trace=', '',
+	  uniq (sort (map { s/\(.*//; $_ } keys %needed_macros)));
+
+  verb "running: $autoconf $trace_option $configure_ac";
+  my $traces =
+    new Autom4te::XFile "$autoconf $trace_option $configure_ac |";
+
+  while ($_ = $traces->getline)
+    {
+      chomp;
+      my ($file, $line, $macro, @args) = split (/:/, $_);
+      if ($macro =~ /^AC_CHECK_(HEADER|FUNC|TYPE|MEMBER)S$/)
+	{
+	  # To be rigorous, we should distinguish between space and comma
+	  # separated macros.  But there is no point.
+	  foreach my $word (split (/\s|,/, $args[0]))
+	    {
+	      # AC_CHECK_MEMBERS wants `struct' or `union'.
+	      if ($macro eq "AC_CHECK_MEMBERS"
+		  && $word =~ /^stat.st_/)
+		{
+		  $word = "struct " . $word;
+		}
+	      delete $needed_macros{"$macro([$word])"};
+	    }
+	}
+      else
+	{
+	  delete $needed_macros{$macro};
+	}
+    }
+
+  $traces->close;
+
+  # Report the missing macros.
+  foreach my $macro (sort keys %needed_macros)
+    {
+      warn ("$configure_ac: warning: missing $macro wanted by: "
+	    . (${$needed_macros{$macro}}[0])
+	    . "\n");
+      print $log "$me: warning: missing $macro wanted by: \n";
+      foreach my $need (@{$needed_macros{$macro}})
+	{
+	  print $log "\t$need\n";
+	}
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$log = new Autom4te::XFile "> " . open_quote ("$me.log");
+
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+my $configure_ac = find_configure_ac;
+init_tables;
+scan_files;
+output ('configure.scan');
+if (-f $configure_ac)
+  {
+    check_configure_ac ($configure_ac);
+  }
+# This close is really needed.  For some reason, probably best named
+# a bug, it seems that the dtor of $LOG is not called automatically
+# at END.  It results in a truncated file.
+$log->close;
+exit 0;
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoupdate b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoupdate
new file mode 100755
index 0000000..f0939c5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/autoupdate
@@ -0,0 +1,1064 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from autoupdate.in; do not edit by hand.
+
+# autoupdate - modernize an Autoconf file.
+# Copyright (C) 1994, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Originally written by David MacKenzie <djm@gnu.ai.mit.edu>.
+# Rewritten by Akim Demaille <akim@freefriends.org>.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::ChannelDefs;
+use Autom4te::Channels;
+use Autom4te::Configure_ac;
+use Autom4te::FileUtils;
+use Autom4te::General;
+use Autom4te::XFile;
+use File::Basename;
+use strict;
+
+# Lib files.
+my $autom4te = $ENV{'AUTOM4TE'} || '/x86_64-unknown-linux-gnu/bin/autom4te';
+my $autoconf = "$autom4te --language=autoconf";
+# We need to find m4sugar.
+my @prepend_include;
+my @include = ('//share/autoconf');
+my $force = 0;
+# m4.
+my $m4 = $ENV{"M4"} || '/usr/bin/m4';
+
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [TEMPLATE-FILE]...
+
+Update each TEMPLATE-FILE if given, or `configure.ac' if present,
+or else `configure.in', to the syntax of the current version of
+Autoconf.  The original files are backed up.
+
+Operation modes:
+  -h, --help                 print this help, then exit
+  -V, --version              print version number, then exit
+  -v, --verbose              verbosely report processing
+  -d, --debug                don't remove temporary files
+  -f, --force                consider all files obsolete
+
+Library directories:
+  -B, --prepend-include=DIR  prepend directory DIR to search path
+  -I, --include=DIR          append directory DIR to search path
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+# $VERSION
+# --------
+$version = "autoupdate (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Akim Demaille.
+";
+
+## ---------- ##
+## Routines.  ##
+## ---------- ##
+
+
+# parse_args ()
+# -------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  my $srcdir;
+
+  getopt ('I|include=s'         => \@include,
+	  'B|prepend-include=s' => \@prepend_include,
+	  'f|force'             => \$force);
+
+  if (! @ARGV)
+    {
+      my $configure_ac = require_configure_ac;
+      push @ARGV, $configure_ac;
+    }
+}
+
+
+
+# ----------------- #
+# Autoconf macros.  #
+# ----------------- #
+
+my (%ac_macros, %au_macros, %m4_builtins);
+
+# HANDLE_AUTOCONF_MACROS ()
+# -------------------------
+# @M4_BUILTINS -- M4 builtins and a useful comment.
+sub handle_autoconf_macros ()
+{
+  # Get the builtins.
+  xsystem ("echo dumpdef | $m4 2>" . shell_quote ("$tmp/m4.defs") . " >/dev/null");
+  my $m4_defs = new Autom4te::XFile "< " . open_quote ("$tmp/m4.defs");
+  while ($_ = $m4_defs->getline)
+    {
+      $m4_builtins{$1} = 1
+	if /^(\w+):/;
+    }
+  $m4_defs->close;
+
+  my $macros = new Autom4te::XFile ("$autoconf"
+				    . " --trace AU_DEFINE:'AU:\$f:\$1'"
+				    . " --trace define:'AC:\$f:\$1'"
+				    . " --melt /dev/null |");
+  while ($_ = $macros->getline)
+    {
+      chomp;
+      my ($domain, $file, $macro) = /^(AC|AU):(.*):([^:]*)$/ or next;
+      if ($domain eq "AU")
+	{
+	  $au_macros{$macro} = 1;
+	}
+      elsif ($file =~ /(^|\/)m4sugar\/(m4sugar|version)\.m4$/)
+	{
+	  # Add the m4sugar macros to m4_builtins.
+	  $m4_builtins{$macro} = 1;
+	}
+      else
+	{
+	  # Autoconf, aclocal, and m4sh macros.
+	  $ac_macros{$macro} = 1;
+	}
+    }
+  $macros->close;
+
+
+  # Don't keep AU macros in @AC_MACROS.
+  delete $ac_macros{$_}
+    foreach (keys %au_macros);
+  # Don't keep M4sugar macros which are redefined by Autoconf,
+  # such as `builtin', `changequote' etc.  See autoconf/autoconf.m4.
+  delete $ac_macros{$_}
+    foreach (keys %m4_builtins);
+  error "no current Autoconf macros found"
+    unless keys %ac_macros;
+  error "no obsolete Autoconf macros found"
+    unless keys %au_macros;
+
+  if ($debug)
+    {
+      print STDERR "Current Autoconf macros:\n";
+      print STDERR join (' ', sort keys %ac_macros) . "\n\n";
+      print STDERR "Obsolete Autoconf macros:\n";
+      print STDERR join (' ', sort keys %au_macros) . "\n\n";
+    }
+
+  # ac.m4 -- autoquoting definitions of the AC macros (M4sugar excluded).
+  # unac.m4 -- undefine the AC macros.
+  my $ac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/ac.m4");
+  print $ac_m4 "# ac.m4 -- autoquoting definitions of the AC macros.\n";
+  my $unac_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unac.m4");
+  print $unac_m4 "# unac.m4 -- undefine the AC macros.\n";
+  foreach (sort keys %ac_macros)
+    {
+      print $ac_m4   "_au_m4_define([$_], [m4_if(\$#, 0, [[\$0]], [[\$0(\$\@)]])])\n";
+      print $unac_m4 "_au_m4_undefine([$_])\n";
+    }
+
+  # m4save.m4 -- save the m4 builtins.
+  # unm4.m4 -- disable the m4 builtins.
+  # m4.m4 -- enable the m4 builtins.
+  my $m4save_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4save.m4");
+  print $m4save_m4 "# m4save.m4 -- save the m4 builtins.\n";
+  my $unm4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/unm4.m4");
+  print $unm4_m4 "# unm4.m4 -- disable the m4 builtins.\n";
+  my $m4_m4 = new Autom4te::XFile "> " . open_quote ("$tmp/m4.m4");
+  print $m4_m4 "# m4.m4 -- enable the m4 builtins.\n";
+  foreach (sort keys %m4_builtins)
+    {
+      print $m4save_m4 "_au__save([$_])\n";
+      print $unm4_m4   "_au__undefine([$_])\n";
+      print $m4_m4     "_au__restore([$_])\n";
+    }
+}
+
+
+## -------------- ##
+## Main program.  ##
+## -------------- ##
+
+parse_args;
+$autoconf .= " --debug" if $debug;
+$autoconf .= " --force" if $force;
+$autoconf .= " --verbose" if $verbose;
+$autoconf .= join (' --include=', '', map { shell_quote ($_) } @include);
+$autoconf .= join (' --prepend-include=', '', map { shell_quote ($_) } @prepend_include);
+
+mktmpdir ('au');
+handle_autoconf_macros;
+
+# $au_changequote -- enable the quote `[', `]' right before any AU macro.
+my $au_changequote =
+  's/\b(' . join ('|', keys %au_macros) . ')\b/_au_m4_changequote([,])$1/g';
+
+# au.m4 -- definitions the AU macros.
+xsystem ("$autoconf --trace AU_DEFINE:'_au_defun(\@<:\@\$1\@:>\@,
+\@<:\@\$2\@:>\@)' --melt /dev/null "
+	. ">" . shell_quote ("$tmp/au.m4"));
+
+
+
+## ------------------- ##
+## Process the files.  ##
+## ------------------- ##
+
+foreach my $file (@ARGV)
+  {
+    # We need an actual file.
+    if ($file eq '-')
+      {
+	$file = "$tmp/stdin";
+	system "cat >" . shell_quote ($file);
+      }
+    elsif (! -r "$file")
+      {
+	die "$me: $file: No such file or directory";
+      }
+
+    # input.m4 -- m4 program to produce the updated file.
+    # Load the values, the dispatcher, neutralize m4, and the prepared
+    # input file.
+    my $input_m4 = <<\EOF;
+      divert(-1)                                            -*- Autoconf -*-
+      changequote([,])
+
+      # Define our special macros:
+      define([_au__defn], defn([defn]))
+      define([_au__divert], defn([divert]))
+      define([_au__ifdef], defn([ifdef]))
+      define([_au__include], defn([include]))
+      define([_au___undefine], defn([undefine]))
+      define([_au__undefine], [_au__ifdef([$1], [_au___undefine([$1])])])
+      define([_au__save], [m4_ifdef([$1],
+	[m4_define([_au_$1], _m4_defn([$1]))])])
+      define([_au__restore],
+	[_au_m4_ifdef([_au_$1],
+	  [_au_m4_define([$1], _au__defn([_au_$1]))])])
+
+      # Set up m4sugar.
+      include(m4sugar/m4sugar.m4)
+
+      # Redefine __file__ to make warnings nicer; $file is replaced below.
+      m4_define([__file__], [$file])
+
+      # Redefine m4_location to fix the line number.
+      m4_define([m4_location], [__file__:m4_eval(__line__ - _au__first_line)])
+
+      # Move all the builtins into the `_au_' pseudo namespace
+      m4_include([m4save.m4])
+
+      # _au_defun(NAME, BODY)
+      # ---------------------
+      # Define NAME to BODY, plus AU activation/deactivation.
+      _au_m4_define([_au_defun],
+      [_au_m4_define([$1],
+      [_au_enable()dnl
+      $2[]dnl
+      _au_disable()])])
+
+      # Import the definition of the obsolete macros.
+      _au__include([au.m4])
+
+
+      ## ------------------------ ##
+      ## _au_enable/_au_disable.  ##
+      ## ------------------------ ##
+
+      # They work by pair: each time an AU macro is activated, it runs
+      # _au_enable, and at its end its runs _au_disable (see _au_defun
+      # above).  AU macros might use AU macros, which should
+      # enable/disable only for the outer AU macros.
+      #
+      # `_au_enabled' is used to this end, determining whether we really
+      # enable/disable.
+
+
+      # __au_enable
+      # -----------
+      # Reenable the builtins, m4sugar, and the autoquoting AC macros.
+      _au_m4_define([__au_enable],
+      [_au__divert(-1)
+      # Enable special characters.
+      _au_m4_changecom([#])
+
+      _au__include([m4.m4])
+      _au__include([ac.m4])
+
+      _au__divert(0)])
+
+      # _au_enable
+      # ----------
+      # Called at the beginning of all the obsolete macros.  If this is the
+      # outermost level, call __au_enable.
+      _au_m4_define([_au_enable],
+      [_au_m4_ifdef([_au_enabled],
+		 [],
+		 [__au_enable()])_au_dnl
+      _au_m4_pushdef([_au_enabled])])
+
+
+      # __au_disable
+      # ------------
+      # Disable the AC autoquoting macros, m4sugar, and m4.
+      _au_m4_define([__au_disable],
+      [_au__divert(-1)
+      _au__include([unac.m4])
+      _au__include([unm4.m4])
+
+      # Disable special characters.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au__divert(0)])
+
+      # _au_disable
+      # -----------
+      # Called at the end of all the obsolete macros.  If we are at the
+      # outermost level, call __au_disable.
+      _au_m4_define([_au_disable],
+      [_au_m4_popdef([_au_enabled])_au_dnl
+      _au_m4_ifdef([_au_enabled],
+		[],
+		[__au_disable()])])
+
+
+      ## ------------------------------- ##
+      ## Disable, and process the file.  ##
+      ## ------------------------------- ##
+      # The AC autoquoting macros are not loaded yet, hence invoking
+      # `_au_disable' would be wrong.
+      _au__include([unm4.m4])
+
+      # Disable special characters, and set the first line number.
+      _au_m4_changequote()
+      _au_m4_changecom()
+
+      _au_m4_define(_au__first_line, _au___line__)_au__divert(0)_au_dnl
+EOF
+
+    $input_m4 =~ s/^      //mg;
+    $input_m4 =~ s/\$file/$file/g;
+
+    # prepared input -- input, but reenables the quote before each AU macro.
+    open INPUT_M4, "> " . open_quote ("$tmp/input.m4")
+       or error "cannot open: $!";
+    open FILE, "< " . open_quote ($file)
+       or error "cannot open: $!";
+    print INPUT_M4 "$input_m4";
+    while (<FILE>)
+       {
+	 eval $au_changequote;
+	 print INPUT_M4;
+       }
+    close FILE
+       or error "cannot close $file: $!";
+    close INPUT_M4
+       or error "cannot close $tmp/input.m4: $!";
+
+    # Now ask m4 to perform the update.
+    xsystem ("$m4 --include=" . shell_quote ($tmp)
+	     . join (' --include=', '', map { shell_quote ($_) } reverse (@prepend_include))
+	     . join (' --include=', '', map { shell_quote ($_) } @include)
+	     . " " . shell_quote ("$tmp/input.m4") . " > " . shell_quote ("$tmp/updated"));
+    update_file ("$tmp/updated",
+		 "$file" eq "$tmp/stdin" ? '-' : "$file");
+  }
+exit 0;
+
+
+#		  ## ---------------------------- ##
+#		  ## How `autoupdate' functions.  ##
+#		  ## ---------------------------- ##
+#
+# The task of `autoupdate' is not trivial: the biggest difficulty being
+# that you must limit the changes to the parts that really need to be
+# updated.  Finding a satisfying implementation proved to be quite hard,
+# as this is the fifth implementation of `autoupdate'.
+#
+# Below, we will use a simple example of an obsolete macro:
+#
+#     AU_DEFUN([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))])
+#     AC_DEFUN([NEW], [echo "sum($1) = $2"])
+#
+# the input file contains
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Of course the expected output is
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0], [0])
+#
+#
+# # First implementation: sed
+# # =========================
+#
+# The first implementation was only able to change the name of obsolete
+# macros.
+#
+# The file `acoldnames.m4' defined the old names based on the new names.
+# It was simple then to produce a sed script such as:
+#
+#     s/OLD/NEW/g
+#
+# Updating merely consisted in running this script on the file to
+# update.
+#
+# This scheme suffers from an obvious limitation: that `autoupdate' was
+# unable to cope with new macros that just swap some of its arguments
+# compared to the old macro.  Fortunately, that was enough to upgrade
+# from Autoconf 1 to Autoconf 2.  (But I have no idea whether the
+# changes in Autoconf 2 were precisely limited by this constraint.)
+#
+#
+# # Second implementation: hooks
+# # ============================
+#
+# The version 2.15 of Autoconf brought a vast number of changes compared
+# to 2.13, so a solution was needed.  One could think of extending the
+# `sed' scripts with specialized code for complex macros.  However, this
+# approach is of course full of flaws:
+#
+# a. the Autoconf maintainers have to write these snippets, which we
+#    just don't want to,
+#
+# b. I really don't think you'll ever manage to handle the quoting of
+#    m4 with a sed script.
+#
+# To satisfy a., let's remark that the code which implements the old
+# features in term of the new feature is exactly the code which should
+# replace the old code.
+#
+# To answer point b, as usual in the history of Autoconf, the answer, at
+# least on the paper, is simple: m4 is the best tool to parse m4, so
+# let's use m4.
+#
+# Therefore the specification is:
+#
+#     I want to be able to tell Autoconf, well, m4, that the macro I
+#     am currently defining is an obsolete macro (so that the user is
+#     warned), and its code is the code to use when running autoconf,
+#     but that the very same code has to be used when running
+#     autoupdate.  To summarize, the interface I want is
+#     `AU_DEFUN(OLD-NAME, NEW-CODE)'.
+#
+#
+# Now for the technical details.
+#
+# When running autoconf, except for the warning, AU_DEFUN is basically
+# AC_DEFUN.
+#
+# When running autoupdate, we want *only* OLD-NAMEs to be expanded.
+# This obviously means that acgeneral.m4 and acspecific.m4 must not be
+# loaded.  Nonetheless, because we want to use a rich set of m4
+# features, m4sugar.m4 is needed.  Please note that the fact that
+# Autoconf's macros are not loaded is positive on two points:
+#
+# - we do get an updated `configure.ac', not a `configure'!
+#
+# - the old macros are replaced by *calls* to the new-macros, not the
+#   body of the new macros, since their body is not defined!!!
+#   (Whoa, that's really beautiful!).
+#
+# Additionally we need to disable the quotes when reading the input for
+# two reasons: first because otherwise `m4' will swallow the quotes of
+# other macros:
+#
+#     NEW([1, 2], 3)
+#     => NEW(1, 2, 3)
+#
+# and second, because we want to update the macro calls which are
+# quoted, i.e., we want
+#
+#     FOO([OLD(1, 2)])
+#     => FOO([NEW([1, 2], [3])])
+#
+# If we don't disable the quotes, only the macros called at the top
+# level would be updated.
+#
+# So, let's disable the quotes.
+#
+# Well, not quite: m4sugar.m4 still needs to use quotes for some macros.
+# Well, in this case, when running in autoupdate code, each macro first
+# reestablishes the quotes, expands itself, and disables the quotes.
+#
+# Thinking a bit more, you realize that in fact, people may use `define',
+# `ifelse' etc. in their files, and you certainly don't want to process
+# them.  Another example is `dnl': you don't want to remove the
+# comments.  You then realize you don't want exactly to import m4sugar:
+# you want to specify when it is enabled (macros active), and disabled.
+# m4sugar provides m4_disable/m4_enable to this end.
+#
+# You're getting close to it.  Now remains one task: how to handle
+# twofold definitions?
+#
+# Remember that the same AU_DEFUN must be understood in two different
+# ways, the AC way, and the AU way.
+#
+# One first solution is to check whether acgeneral.m4 was loaded.  But
+# that's definitely not cute.  Another is simply to install `hooks',
+# that is to say, to keep in some place m4 knows, late `define' to be
+# triggered *only* in AU mode.
+#
+# You first think of designing AU_DEFUN like this:
+#
+# 1. AC_DEFUN(OLD-NAME,
+#	      [Warn the user OLD-NAME is obsolete.
+#	       NEW-CODE])
+#
+# 2. Store for late AU binding([define(OLD_NAME,
+#				[Reestablish the quotes.
+#				 NEW-CODE
+#				 Disable the quotes.])])
+#
+# but this will not work: NEW-CODE probably uses $1, $2 etc. and these
+# guys will be replaced with the argument of `Store for late AU binding'
+# when you call it.
+#
+# I don't think there is a means to avoid this using this technology
+# (remember that $1 etc. are *always* expanded in m4).  You may also try
+# to replace them with $[1] to preserve them for a later evaluation, but
+# if `Store for late AU binding' is properly written, it will remain
+# quoted till the end...
+#
+# You have to change technology.  Since the problem is that `$1'
+# etc. should be `consumed' right away, one solution is to define now a
+# second macro, `AU_OLD-NAME', and to install a hook than binds OLD-NAME
+# to AU_OLD-NAME.  Then, autoupdate.m4 just need to run the hooks.  By
+# the way, the same method was used in autoheader.
+#
+#
+# # Third implementation: m4 namespaces by m4sugar
+# # ==============================================
+#
+# Actually, this implementation was just a clean up of the previous
+# implementation: instead of defining hooks by hand, m4sugar was equipped
+# with `namespaces'.  What are they?
+#
+# Sometimes we want to disable some *set* of macros, and restore them
+# later.  We provide support for this via namespaces.
+#
+# There are basically three characters playing this scene: defining a
+# macro in a namespace, disabling a namespace, and restoring a namespace
+# (i.e., all the definitions it holds).
+#
+# Technically, to define a MACRO in NAMESPACE means to define the macro
+# named `NAMESPACE::MACRO' to the VALUE.  At the same time, we append
+# `undefine(NAME)' in the macro named `m4_disable(NAMESPACE)', and
+# similarly a binding of NAME to the value of `NAMESPACE::MACRO' in
+# `m4_enable(NAMESPACE)'.  These mechanisms allow to bind the macro of
+# NAMESPACE and to unbind them at will.
+#
+# Of course this implementation is really inefficient: m4 has to grow
+# strings which can become quickly huge, which slows it significantly.
+#
+# In particular one should avoid as much as possible to use `define' for
+# temporaries.  Now that `define' has quite a complex meaning, it is an
+# expensive operations that should be limited to macros.  Use
+# `m4_define' for temporaries.
+#
+# Private copies of the macros we used in entering / exiting the m4sugar
+# namespace.  It is much more convenient than fighting with the renamed
+# version of define etc.
+#
+#
+#
+# Those two implementations suffered from serious problems:
+#
+# - namespaces were really expensive, and incurred a major performance
+#   loss on `autoconf' itself, not only `autoupdate'.  One solution
+#   would have been the limit the use of namespaces to `autoupdate', but
+#   that's again some complications on m4sugar, which really doesn't need
+#   this.  So we wanted to get rid of the namespaces.
+#
+# - since the quotes were disabled, autoupdate was sometimes making
+#   wrong guesses, for instance on:
+#
+#     foo([1, 2])
+#
+#   m4 saw 2 arguments: `[1'and `2]'.  A simple solution, somewhat
+#   fragile, is to reestablish the quotes right before all the obsolete
+#   macros, i.e., to use sed so that the previous text becomes
+#
+#     changequote([, ])foo([1, 2])
+#
+#   To this end, one wants to trace the definition of obsolete macros.
+#
+# It was there that the limitations of the namespace approach became
+# painful: because it was a complex machinery playing a lot with the
+# builtins of m4 (hence, quite fragile), tracing was almost impossible.
+#
+#
+# So this approach was dropped.
+#
+#
+# # The fourth implementation: two steps
+# # ====================================
+#
+# If you drop the uses of namespaces, you no longer can compute the
+# updated value, and replace the old call with it simultaneously.
+#
+# Obviously you will use m4 to compute the updated values, but you may
+# use some other tool to achieve the replacement.  Personally, I trust
+# nobody but m4 to parse m4, so below, m4 will perform the two tasks.
+#
+# How can m4 be used to replace *some* macros calls with newer values.
+# Well, that's dead simple: m4 should learn the definitions of obsolete
+# macros, forget its builtins, disable the quotes, and then run on the
+# input file, which amounts to doing this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# which will result in
+#
+#     dnl The Unbelievable Truth
+#     NEW(1, 2, m4_eval(1 + 2))
+#     NEW([0, 0],
+#	  0)
+#
+# Grpmh.  Two problems.  A minor problem: it would have been much better
+# to have the `m4_eval' computed, and a major problem: you lost the
+# quotation in the result.
+#
+# Let's address the big problem first.  One solution is to define any
+# modern macro to rewrite its calls with the proper quotation, thanks to
+# `$@'.  Again, tracing the `define's makes it possible to know which
+# are these macros, so you input is:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([$1, $2], m4_eval([$1 + $2]))changequote()])
+#     define([NEW], [[NEW($@)]changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     changequote([, ])NEW([0, 0],
+#	  0)
+#
+# which results in
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2],[m4_eval(1 + 2)])
+#     NEW([0, 0],[0])
+#
+# Our problem is solved, i.e., the first call to `NEW' is properly
+# quoted, but introduced another problem: we changed the layout of the
+# second calls, which can be a drama in the case of huge macro calls
+# (think of `AC_TRY_RUN' for instance).  This example didn't show it,
+# but we also introduced parens to macros which did not have some:
+#
+#     AC_INIT
+#     => AC_INIT()
+#
+# No big deal for the semantics (unless the macro depends upon $#, which
+# is bad), but the users would not be happy.
+#
+# Additionally, we introduced quotes that were not there before, which is
+# OK in most cases, but could change the semantics of the file.
+#
+# Cruel dilemma: we do want the auto-quoting definition of `NEW' when
+# evaluating `OLD', but we don't when we evaluate the second `NEW'.
+# Back to namespaces?
+#
+# No.
+#
+#
+# # Second step: replacement
+# # ------------------------
+#
+# No, as announced above, we will work in two steps: in a first step we
+# compute the updated values, and in a second step we replace them.  Our
+# goal is something like this:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD], [NEW([1, 2], [3])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., the new value of `OLD' is precomputed using the auto-quoting
+# definition of `NEW' and the m4 builtins.  We'll see how afterwards,
+# let's finish with the replacement.
+#
+# Of course the solution above is wrong: if there were other calls to
+# `OLD' with different values, we would smash them to the same value.
+# But it is quite easy to generalize the scheme above:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+#     define([OLD], [defn([OLD($@)])changequote()])
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# i.e., for each call to obsolete macros, we build an array `call =>
+# value', and use a macro to dispatch these values.  This results in:
+#
+#     dnl The Unbelievable Truth
+#     NEW([1, 2], [3])
+#     NEW([0, 0],
+#	  0)
+#
+# In French, we say `Youpi !', which you might roughly translate as
+# `Yippee!'.
+#
+#
+# # First step: computation
+# # -----------------------
+#
+# Let's study the anatomy of the file, and name its sections:
+#
+# prologue
+#     divert(-1)dnl
+#     changequote([, ])
+# values
+#     define([OLD([1],[2])], [NEW([1, 2], [3])])
+# dispatcher
+#     define([OLD], [defn([OLD($@)])changequote()])
+# disabler
+#     undefine([dnl])
+#     undefine([m4_eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+# input
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+#
+# # Computing the `values' section
+# # ..............................
+#
+# First we need to get the list of all the AU macro uses.  To this end,
+# first get the list of all the AU macros names by tracing `AU_DEFUN' in
+# the initialization of autoconf.  This list is computed in the file
+# `au.txt' below.
+#
+# Then use this list to trace all the AU macro uses in the input.  The
+# goal is obtain in the case of our example:
+#
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# This is the file `values.in' below.
+#
+# We want to evaluate this with only the builtins (in fact m4sugar), the
+# auto-quoting definitions of the new macros (`new.m4'), and the
+# definition of the old macros (`old.m4').  Computing these last two
+# files is easy: it's just a matter of using the right `--trace' option.
+#
+# So the content of `values.in' is:
+#
+#     include($autoconf_dir/m4sugar.m4)
+#     m4_include(new.m4)
+#     m4_include(old.m4)
+#     divert(0)dnl
+#     [define([OLD([1],[2])],]@<<@OLD([1],[2])@>>@[)]
+#
+# We run m4 on it, which yields:
+#
+#     define([OLD([1],[2])],@<<@NEW([1, 2], [3])@>>@)
+#
+# Transform `@<<@' and `@>>@' into quotes and we get
+#
+#     define([OLD([1],[2])],[NEW([1, 2], [3])])
+#
+# This is `values.m4'.
+#
+#
+# # Computing the `dispatcher' section
+# # ..................................
+#
+# The `prologue', and the `disabler' are simple and need no commenting.
+#
+# To compute the `dispatcher' (`dispatch.m4'), again, it is a simple
+# matter of using the right `--trace'.
+#
+# Finally, the input is not exactly the input file, rather it is the
+# input file with the added `changequote'.  To this end, we build
+# `quote.sed'.
+#
+#
+# # Putting it all together
+# # .......................
+#
+# We build the file `input.m4' which contains:
+#
+#     divert(-1)dnl
+#     changequote([, ])
+#     include(values.m4)
+#     include(dispatch.m4)
+#     undefine([dnl])
+#     undefine([eval])
+#     # Some more undefines...
+#     changequote()
+#     divert(0)dnl
+#     dnl The Unbelievable Truth
+#     changequote([, ])OLD(1, 2)
+#     NEW([0, 0],
+#	  0)
+#
+# And we just run m4 on it.  Et voila`, Monsieur !  Mais oui, mais oui.
+#
+# Well, there are a few additional technicalities.  For instance, we
+# rely on `changequote', `ifelse' and `defn', but we don't want to
+# interpret the changequotes of the user, so we simply use another name:
+# `_au_changequote' etc.
+#
+#
+# # Failure of the fourth approach
+# # ------------------------------
+#
+# This approach is heavily based on traces, but then there is an obvious
+# problem: non expanded code will never be seen.  In particular, the body
+# of a `define' definition is not seen, so on the input
+#
+#	  define([idem], [OLD(0, [$1])])
+#
+# autoupdate would never see the `OLD', and wouldn't have updated it.
+# Worse yet, if `idem(0)' was used later, then autoupdate sees that
+# `OLD' is used, computes the result for `OLD(0, 0)' and sets up a
+# dispatcher for `OLD'.  Since there was no computed value for `OLD(0,
+# [$1])', the dispatcher would have replaced with... nothing, leading
+# to
+#
+#	  define([idem], [])
+#
+# With some more thinking, you see that the two step approach is wrong,
+# the namespace approach was much saner.
+#
+# But you learned a lot, in particular you realized that using traces
+# can make it possible to simulate namespaces!
+#
+#
+#
+# # The fifth implementation: m4 namespaces by files
+# # ================================================
+#
+# The fourth implementation demonstrated something unsurprising: you
+# cannot precompute, i.e., the namespace approach was the right one.
+# Still, we no longer want them, they're too expensive.  Let's have a
+# look at the way it worked.
+#
+# When updating
+#
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# you evaluate `input.m4':
+#
+#     divert(-1)
+#     changequote([, ])
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#     ...
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# where `m4_disable' undefines the m4 and m4sugar, and disables the quotes
+# and comments:
+#
+#     define([m4_disable],
+#     [undefine([__file__])
+#     ...
+#     changecom(#)
+#     changequote()])
+#
+# `m4_enable' does the converse: reestablish quotes and comments
+# --easy--, reestablish m4sugar --easy: just load `m4sugar.m4' again-- and
+# reenable the builtins.  This later task requires that you first save
+# the builtins.  And BTW, the definition above of `m4_disable' cannot
+# work: you undefined `changequote' before using it!  So you need to use
+# your privates copies of the builtins.  Let's introduce three files for
+# this:
+#
+#  `m4save.m4'
+#    moves the m4 builtins into the `_au_' pseudo namespace,
+#  `unm4.m4'
+#    undefines the builtins,
+#  `m4.m4'
+#    restores them.
+#
+# So `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#
+#     # Import AU.
+#     define([OLD],
+#     [m4_enable()NEW([$1, $2], m4_eval([$1 + $2]))m4_disable()])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)])
+#
+#     define([_au_disable],
+#     [# Disable m4sugar.
+#     # Disable the m4 builtins.
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Based on what we learned in the fourth implementation we know that we
+# have to enable the quotes *before* any AU macro, and we know we need
+# to build autoquoting versions of the AC macros.  But the autoquoting
+# AC definitions must be disabled in the rest of the file, and enabled
+# inside AU macros.
+#
+# Using `autoconf --trace' it is easy to build the files
+#
+#   `ac.m4'
+#     define the autoquoting AC fake macros
+#   `disable.m4'
+#     undefine the m4sugar and AC autoquoting macros.
+#   `au.m4'
+#     definitions of the AU macros (such as `OLD' above).
+#
+# Now, `input.m4' is:
+#
+#     divert(-1)
+#     changequote([, ])
+#
+#     include([m4save.m4])
+#     # Import AU.
+#     include([au.m4])
+#
+#     define([_au_enable],
+#     [_au_changecom([#])
+#     _au_include([m4.m4])
+#     _au_include(m4sugar.m4)
+#     _au_include(ac.m4)])
+#
+#     define([_au_disable],
+#     [_au_include([disable.m4])
+#     _au_include([unm4.m4])
+#     # 1. Disable special characters.
+#     _au_changequote()
+#     _au_changecom()])
+#
+#     m4_disable()
+#     dnl The Unbelievable Truth
+#     _au_changequote([, ])OLD(1, 2)
+#     NEW([0, 0], [0])
+#
+# Finally, version V is ready.
+#
+# Well... almost.
+#
+# There is a slight problem that remains: if an AU macro OUTER includes
+# an AU macro INNER, then _au_enable will be run when entering OUTER
+# and when entering INNER (not good, but not too bad yet).  But when
+# getting out of INNER, _au_disable will disable everything while we
+# were still in OUTER.  Badaboom.
+#
+# Therefore _au_enable and _au_disable have to be written to work by
+# pairs: each _au_enable pushdef's _au_enabled, and each _au_disable
+# popdef's _au_enabled.  And of course _au_enable and _au_disable are
+# effective when _au_enabled is *not* defined.
+#
+# Finally, version V' is ready.  And there is much rejoicing.  (And I
+# have free time again.  I think.  Yeah, right.)
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/cp b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/cp
new file mode 100755
index 0000000..4bddfd1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/cp
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/ifnames b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/ifnames
new file mode 100755
index 0000000..69bffb2
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/ifnames
@@ -0,0 +1,153 @@
+#! /usr/bin/perl -w
+# -*- perl -*-
+# Generated from ifnames.in; do not edit by hand.
+
+eval 'case $# in 0) exec /usr/bin/perl -S "$0";; *) exec /usr/bin/perl -S "$0" "$@";; esac'
+    if 0;
+
+# ifnames - print the identifiers used in C preprocessor conditionals
+
+# Copyright (C) 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Reads from stdin if no files are given.
+# Writes to stdout.
+
+# Written by David MacKenzie <djm@gnu.ai.mit.edu>
+# and Paul Eggert <eggert@twinsun.com>.
+
+BEGIN
+{
+  my $pkgdatadir = $ENV{'autom4te_perllibdir'} || '//share/autoconf';
+  unshift @INC, $pkgdatadir;
+
+  # Override SHELL.  On DJGPP SHELL may not be set to a shell
+  # that can handle redirection and quote arguments correctly,
+  # e.g.: COMMAND.COM.  For DJGPP always use the shell that configure
+  # has detected.
+  $ENV{'SHELL'} = '/bin/sh' if ($^O eq 'dos');
+}
+
+use Autom4te::General;
+use Autom4te::XFile;
+use Autom4te::FileUtils;
+
+# $HELP
+# -----
+$help = "Usage: $0 [OPTION]... [FILE]...
+
+Scan all of the C source FILES (or the standard input, if none are
+given) and write to the standard output a sorted list of all the
+identifiers that appear in those files in `#if', `#elif', `#ifdef', or
+`#ifndef' directives.  Print each identifier on a line, followed by a
+space-separated list of the files in which that identifier occurs.
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+
+Report bugs to <bug-autoconf\@gnu.org>.
+GNU Autoconf home page: <http://www.gnu.org/software/autoconf/>.
+General help using GNU software: <http://www.gnu.org/gethelp/>.
+";
+
+
+# $VERSION
+# --------
+$version = "ifnames (GNU Autoconf) 2.68
+Copyright (C) 2010 Free Software Foundation, Inc.
+License GPLv3+/Autoconf: GNU GPL version 3 or later
+<http://gnu.org/licenses/gpl.html>, <http://gnu.org/licenses/exceptions.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+Written by David J. MacKenzie and Paul Eggert.
+";
+
+
+# &parse_args ()
+# --------------
+# Process any command line arguments.
+sub parse_args ()
+{
+  getopt ();
+}
+
+
+# %OCCURRENCE
+# -----------
+my %occurrence;
+
+
+# &scan_file ($FILE-NAME)
+# -----------------------
+sub scan_file ($)
+{
+  my ($file_name) = @_;
+  my $file = new Autom4te::XFile ("< " . open_quote ($file_name));
+  while ($_ = $file->getline)
+    {
+      # Continuation lines.
+      $_ .= $file->getline
+	while (s/\\$//);
+
+      # Preprocessor directives.
+      if (s/^\s*\#\s*(if|ifdef|ifndef|elif)\s+//)
+	{
+	  # Remove comments.  Not perfect, but close enough.
+	  s(/\*.*?\*/)();
+	  s(/\*.*)();
+	  s(//.*)();
+	  foreach my $word (split (/\W+/))
+	    {
+	      next
+		if $word eq 'defined' || $word !~ /^[a-zA-Z_]/;
+	      $occurrence{$word}{$file_name} = 1;
+	    }
+	}
+    }
+}
+
+
+## ------ ##
+## Main.  ##
+## ------ ##
+
+parse_args();
+foreach (@ARGV)
+  {
+    scan_file ($_);
+  }
+foreach (sort keys %occurrence)
+  {
+    print "$_ ", join (' ', sort keys %{$occurrence{$_}}), "\n";
+  }
+
+### Setup "GNU" style for perl-mode and cperl-mode.
+## Local Variables:
+## perl-indent-level: 2
+## perl-continued-statement-offset: 2
+## perl-continued-brace-offset: 0
+## perl-brace-offset: 0
+## perl-brace-imaginary-offset: 0
+## perl-label-offset: -2
+## cperl-indent-level: 2
+## cperl-brace-offset: 0
+## cperl-continued-brace-offset: 0
+## cperl-label-offset: -2
+## cperl-extra-newline-before-brace: t
+## cperl-merge-trailing-else: nil
+## cperl-continued-statement-offset: 2
+## End:
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtool b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtool
new file mode 100755
index 0000000..7f521ba
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtool
@@ -0,0 +1,10988 @@
+#! /bin/sh
+
+# libtool - Provide generalized library-building support services.
+# Generated automatically by config.status (libtool) 2.4.2
+# Libtool was configured on host amethyst:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+#                 Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags="CXX F77 FC GO GCJ RC "
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=2.4.2
+macro_revision=1.3337
+
+# Assembler program.
+AS="as"
+
+# DLL creation program.
+DLLTOOL="false"
+
+# Object dumper program.
+OBJDUMP="objdump"
+
+# Whether or not to build shared libraries.
+build_libtool_libs=yes
+
+# Whether or not to build static libraries.
+build_old_libs=yes
+
+# What type of objects to build.
+pic_mode=default
+
+# Whether or not to optimize for fast installation.
+fast_install=yes
+
+# Shell to use when invoking shell scripts.
+SHELL="/bin/sh"
+
+# An echo program that protects backslashes.
+ECHO="printf %s\\n"
+
+# The PATH separator for the build system.
+PATH_SEPARATOR=":"
+
+# The host system.
+host_alias=
+host=x86_64-unknown-linux-gnu
+host_os=linux-gnu
+
+# The build system.
+build_alias=
+build=x86_64-unknown-linux-gnu
+build_os=linux-gnu
+
+# A sed program that does not truncate output.
+SED="/bin/sed"
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP="/bin/grep"
+
+# An ERE matcher.
+EGREP="/bin/grep -E"
+
+# A literal string matcher.
+FGREP="/bin/grep -F"
+
+# A BSD- or MS-compatible name lister.
+NM="/usr/bin/nm -B"
+
+# Whether we need soft or hard links.
+LN_S="ln -s"
+
+# What is the maximum length of a command?
+max_cmd_len=1572864
+
+# Object file suffix (normally "o").
+objext=o
+
+# Executable file suffix (normally "").
+exeext=
+
+# whether the shell understands "unset".
+lt_unset=unset
+
+# turn spaces into newlines.
+SP2NL="tr \\040 \\012"
+
+# turn newlines into spaces.
+NL2SP="tr \\015\\012 \\040\\040"
+
+# convert $build file names to $host format.
+to_host_file_cmd=func_convert_file_noop
+
+# convert $build files to toolchain format.
+to_tool_file_cmd=func_convert_file_noop
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method="pass_all"
+
+# Command to use when deplibs_check_method = "file_magic".
+file_magic_cmd="\$MAGIC_CMD"
+
+# How to find potential files when deplibs_check_method = "file_magic".
+file_magic_glob=""
+
+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
+want_nocaseglob="no"
+
+# Command to associate shared and link libraries.
+sharedlib_from_linklib_cmd="printf %s\\n"
+
+# The archiver.
+AR="ar"
+
+# Flags to create an archive.
+AR_FLAGS="cru"
+
+# How to feed a file listing to the archiver.
+archiver_list_spec="@"
+
+# A symbol stripping program.
+STRIP="strip"
+
+# Commands used to install an old-style archive.
+RANLIB="ranlib"
+old_postinstall_cmds="chmod 644 \$oldlib~\$RANLIB \$tool_oldlib"
+old_postuninstall_cmds=""
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=no
+
+# A C compiler.
+LTCC="gcc"
+
+# LTCC compiler flags.
+LTCFLAGS="-g -O2"
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe="sed -n -e 's/^.*[	 ]\\([ABCDGIRSTW][ABCDGIRSTW]*\\)[	 ][	 ]*\\([_A-Za-z][_A-Za-z0-9]*\\)\$/\\1 \\2 \\2/p' | sed '/ __gnu_lto/d'"
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl="sed -n -e 's/^T .* \\(.*\\)\$/extern int \\1();/p' -e 's/^[ABCDGIRSTW]* .* \\(.*\\)\$/extern char \\1;/p'"
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p'"
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \\([^ ]*\\)[ ]*\$/  {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\(lib[^ ]*\\)\$/  {\"\\2\", (void *) \\&\\2},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/  {\"lib\\2\", (void *) \\&\\2},/p'"
+
+# Specify filename containing input files for $NM.
+nm_file_list_spec="@"
+
+# The root where to search for dependent libraries,and in which our libraries should be installed.
+lt_sysroot=
+
+# The name of the directory that contains temporary libtool files.
+objdir=.libs
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=file
+
+# Must we lock files when doing compilation?
+need_locks="no"
+
+# Manifest tool.
+MANIFEST_TOOL=":"
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL=""
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT=""
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO=""
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL=""
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=""
+
+# Old archive suffix (normally "a").
+libext=a
+
+# Shared library suffix (normally ".so").
+shrext_cmds=".so"
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=""
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=no
+
+# Do we need a version for libraries?
+need_version=no
+
+# Library versioning type.
+version_type=linux
+
+# Shared library runtime path variable.
+runpath_var=LD_RUN_PATH
+
+# Shared library path variable.
+shlibpath_var=LD_LIBRARY_PATH
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=no
+
+# Format of library name prefix.
+libname_spec="lib\$name"
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec="\${libname}\${release}\${shared_ext}\$versuffix \${libname}\${release}\${shared_ext}\$major \$libname\${shared_ext}"
+
+# The coded name of the library, if different from the real name.
+soname_spec="\${libname}\${release}\${shared_ext}\$major"
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=""
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=""
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=""
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds="PATH=\\\"\\\$PATH:/sbin\\\" ldconfig -n \$libdir"
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=""
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=yes
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec="/usr/lib/gcc/x86_64-linux-gnu/4.6 /usr/lib/x86_64-linux-gnu /usr/lib /lib/x86_64-linux-gnu /lib "
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/i386-linux-gnu/mesa /lib/i386-linux-gnu /usr/lib/i386-linux-gnu /lib/i686-linux-gnu /usr/lib/i686-linux-gnu /usr/local/lib /usr/lib/vmware-tools/lib32/libvmGuestLib.so /usr/lib/vmware-tools/lib64/libvmGuestLib.so /usr/lib/vmware-tools/lib32/libvmGuestLibJava.so /usr/lib/vmware-tools/lib64/libvmGuestLibJava.so /usr/lib/vmware-tools/lib32/libDeployPkg.so /usr/lib/vmware-tools/lib64/libDeployPkg.so /lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu/mesa-egl /usr/lib/x86_64-linux-gnu/mesa /lib32 /usr/lib32 "
+
+# Whether dlopen is supported.
+dlopen_support=yes
+
+# Whether dlopen of programs is supported.
+dlopen_self=yes
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=no
+
+# Commands to strip libraries.
+old_striplib="strip --strip-debug"
+striplib="strip --strip-unneeded"
+
+
+# The linker used to build libraries.
+LD="/usr/bin/ld -m elf_x86_64"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="gcc"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fPIC -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag="-static"
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec="\${wl}--export-dynamic"
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\${wl}--whole-archive\$convenience \${wl}--no-whole-archive"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname -o \$lib"
+archive_expsym_cmds="echo \\\"{ global:\\\" > \$output_objdir/\$libname.ver~
+	    cat \$export_symbols | sed -e \\\"s/\\\\(.*\\\\)/\\\\1;/\\\" >> \$output_objdir/\$libname.ver~
+	    echo \\\"local: *; };\\\" >> \$output_objdir/\$libname.ver~
+	    \$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname \${wl}-version-script \${wl}\$output_objdir/\$libname.ver -o \$lib"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="yes"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec="\${wl}-rpath \${wl}\$libdir"
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL CONFIG
+
+
+# libtool (GNU libtool) 2.4.2
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --no-warn            don't display warning messages
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.4.2
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.2
+TIMESTAMP=""
+package_revision=1.3337
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+} # Extended-shell func_dirname implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result="${1##*/}"
+} # Extended-shell func_basename implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    case ${1} in
+      */*) func_dirname_result="${1%/*}${2}" ;;
+      *  ) func_dirname_result="${3}" ;;
+    esac
+    func_basename_result="${1##*/}"
+} # Extended-shell func_dirname_and_basename implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+    # positional parameters, so assign one to ordinary parameter first.
+    func_stripname_result=${3}
+    func_stripname_result=${func_stripname_result#"${1}"}
+    func_stripname_result=${func_stripname_result%"${2}"}
+} # Extended-shell func_stripname implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    func_split_short_opt_arg=${1#??}
+    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
+} # Extended-shell func_split_short_opt implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    func_split_long_opt_name=${1%%=*}
+    func_split_long_opt_arg=${1#*=}
+} # Extended-shell func_split_long_opt implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+    eval "${1}+=\${2}"
+} # Extended-shell func_append implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+    func_quote_for_eval "${2}"
+    eval "${1}+=\\ \$func_quote_for_eval_result"
+} # Extended-shell func_append_quoted implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+    func_arith_result=$(( $* ))
+} # Extended-shell func_arith implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+    func_len_result=${#1}
+} # Extended-shell func_len implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+    case ${1} in
+      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+      *)    func_lo2o_result=${1} ;;
+    esac
+} # Extended-shell func_lo2o implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+    func_xform_result=${1%.*}.lo
+} # Extended-shell func_xform implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+  shift; set dummy --mode clean ${1+"$@"}; shift
+  ;;
+compile|compil|compi|comp|com|co|c)
+  shift; set dummy --mode compile ${1+"$@"}; shift
+  ;;
+execute|execut|execu|exec|exe|ex|e)
+  shift; set dummy --mode execute ${1+"$@"}; shift
+  ;;
+finish|finis|fini|fin|fi|f)
+  shift; set dummy --mode finish ${1+"$@"}; shift
+  ;;
+install|instal|insta|inst|ins|in|i)
+  shift; set dummy --mode install ${1+"$@"}; shift
+  ;;
+link|lin|li|l)
+  shift; set dummy --mode link ${1+"$@"}; shift
+  ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+  shift; set dummy --mode uninstall ${1+"$@"}; shift
+  ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_warning=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+			;;
+      --config)
+			opt_config=:
+func_config
+			;;
+      --dlopen|-dlopen)
+			optarg="$1"
+			opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+			shift
+			;;
+      --preserve-dup-deps)
+			opt_preserve_dup_deps=:
+			;;
+      --features)
+			opt_features=:
+func_features
+			;;
+      --finish)
+			opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+			;;
+      --help)
+			opt_help=:
+			;;
+      --help-all)
+			opt_help_all=:
+opt_help=': help-all'
+			;;
+      --mode)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_mode="$optarg"
+case $optarg in
+  # Valid mode arguments:
+  clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+  # Catch anything else as an error
+  *) func_error "invalid argument for $opt"
+     exit_cmd=exit
+     break
+     ;;
+esac
+			shift
+			;;
+      --no-silent|--no-quiet)
+			opt_silent=false
+preserve_args+=" $opt"
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+preserve_args+=" $opt"
+			;;
+      --no-verbose)
+			opt_verbose=false
+preserve_args+=" $opt"
+			;;
+      --silent|--quiet)
+			opt_silent=:
+preserve_args+=" $opt"
+        opt_verbose=false
+			;;
+      --verbose|-v)
+			opt_verbose=:
+preserve_args+=" $opt"
+opt_silent=false
+			;;
+      --tag)
+			test $# = 0 && func_missing_arg $opt && break
+			optarg="$1"
+			opt_tag="$optarg"
+preserve_args+=" $opt $optarg"
+func_enable_tag "$optarg"
+			shift
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-n*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # save first non-option argument
+  if test "$#" -gt 0; then
+    nonopt="$opt"
+    shift
+  fi
+
+  # preserve --debug
+  test "$opt_debug" = : || preserve_args+=" --debug"
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+      ;;
+  esac
+
+  $opt_help || {
+    # Sanity checks first:
+    func_check_version_match
+
+    if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+      func_fatal_configuration "not configured to build any kind of library"
+    fi
+
+    # Darwin sucks
+    eval std_shrext=\"$shrext_cmds\"
+
+    # Only execute mode is allowed to have -dlopen flags.
+    if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+      func_error "unrecognized option \`-dlopen'"
+      $ECHO "$help" 1>&2
+      exit $EXIT_FAILURE
+    fi
+
+    # Change the help message to a mode-specific one.
+    generic_help="$help"
+    help="Try \`$progname --help --mode=$opt_mode' for more information."
+  }
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_dirname_and_basename "$1" "" "."
+    func_stripname '' '.exe' "$func_basename_result"
+    func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot.  Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+  func_resolve_sysroot_result=$1
+  case $func_resolve_sysroot_result in
+  =*)
+    func_stripname '=' '' "$func_resolve_sysroot_result"
+    func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+    ;;
+  esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+  case "$lt_sysroot:$1" in
+  ?*:"$lt_sysroot"*)
+    func_stripname "$lt_sysroot" '' "$1"
+    func_replace_sysroot_result="=$func_stripname_result"
+    ;;
+  *)
+    # Including no sysroot.
+    func_replace_sysroot_result=$1
+    ;;
+  esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+	func_append_quoted CC_quoted "$arg"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_append_quoted CC_quoted "$arg"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+  $opt_debug
+  func_convert_core_file_wine_to_w32_result="$1"
+  if test -n "$1"; then
+    # Unfortunately, winepath does not exit with a non-zero error code, so we
+    # are forced to check the contents of stdout. On the other hand, if the
+    # command is not found, the shell will set an exit code of 127 and print
+    # *an error message* to stdout. So we must check for both error code of
+    # zero AND non-empty stdout, which explains the odd construction:
+    func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+    if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+      func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+        $SED -e "$lt_sed_naive_backslashify"`
+    else
+      func_convert_core_file_wine_to_w32_result=
+    fi
+  fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+  $opt_debug
+  # unfortunately, winepath doesn't convert paths, only file names
+  func_convert_core_path_wine_to_w32_result=""
+  if test -n "$1"; then
+    oldIFS=$IFS
+    IFS=:
+    for func_convert_core_path_wine_to_w32_f in $1; do
+      IFS=$oldIFS
+      func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+      if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+        if test -z "$func_convert_core_path_wine_to_w32_result"; then
+          func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+        else
+          func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+        fi
+      fi
+    done
+    IFS=$oldIFS
+  fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+  $opt_debug
+  if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+    func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+    if test "$?" -ne 0; then
+      # on failure, ensure result is empty
+      func_cygpath_result=
+    fi
+  else
+    func_cygpath_result=
+    func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+  fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format.  Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+  $opt_debug
+  # awkward: cmd appends spaces to result
+  func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+    $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+  $opt_debug
+  if test -z "$2" && test -n "$1" ; then
+    func_error "Could not determine host file name corresponding to"
+    func_error "  \`$1'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback:
+    func_to_host_file_result="$1"
+  fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+  $opt_debug
+  if test -z "$4" && test -n "$3"; then
+    func_error "Could not determine the host path corresponding to"
+    func_error "  \`$3'"
+    func_error "Continuing, but uninstalled executables may not work."
+    # Fallback.  This is a deliberately simplistic "conversion" and
+    # should not be "improved".  See libtool.info.
+    if test "x$1" != "x$2"; then
+      lt_replace_pathsep_chars="s|$1|$2|g"
+      func_to_host_path_result=`echo "$3" |
+        $SED -e "$lt_replace_pathsep_chars"`
+    else
+      func_to_host_path_result="$3"
+    fi
+  fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+  $opt_debug
+  case $4 in
+  $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+    ;;
+  esac
+  case $4 in
+  $2 ) func_to_host_path_result+="$3"
+    ;;
+  esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+  $opt_debug
+  $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result.  If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+  $opt_debug
+  case ,$2, in
+    *,"$to_tool_file_cmd",*)
+      func_to_tool_file_result=$1
+      ;;
+    *)
+      $to_tool_file_cmd "$1"
+      func_to_tool_file_result=$func_to_host_file_result
+      ;;
+  esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+  func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+    # LT_CYGPATH in this case.
+    func_to_host_file_result=`cygpath -m "$1"`
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format.  Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_file_wine_to_w32 "$1"
+    func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    func_convert_core_msys_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_msys_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format.  Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set.  Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_file_result="$1"
+  if test -n "$1"; then
+    # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+    func_convert_core_file_wine_to_w32 "$1"
+    func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+    func_to_host_file_result="$func_cygpath_result"
+  fi
+  func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format.  If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+#   file name conversion function    : func_convert_file_X_to_Y ()
+#   path conversion function         : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same.  If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+  $opt_debug
+  if test -z "$to_host_path_cmd"; then
+    func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+    to_host_path_cmd="func_convert_path_${func_stripname_result}"
+  fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+  $opt_debug
+  func_init_to_host_path_cmd
+  $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+  func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper.  Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from ARG.  MSYS
+    # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+    # and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format.  Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format.  Requires a wine environment and
+# a working winepath.  Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+    func_convert_path_check : ";" \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+  fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format.  Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # See func_convert_path_msys_to_w32:
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format.  Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set.  Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+  $opt_debug
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    # Remove leading and trailing path separator characters from
+    # ARG. msys behavior is inconsistent here, cygpath turns them
+    # into '.;' and ';.', and winepath ignores them completely.
+    func_stripname : : "$1"
+    func_to_host_path_tmp1=$func_stripname_result
+    func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+    func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+    func_to_host_path_result="$func_cygpath_result"
+    func_convert_path_check : : \
+      "$func_to_host_path_tmp1" "$func_to_host_path_result"
+    func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+  fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          pie_flag+=" $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  later+=" $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_append_quoted lastarg "$arg"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  base_compile+=" $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_append_quoted base_compile "$lastarg"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      removelist+=" $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    removelist+=" $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+    srcfile=$func_to_tool_file_result
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	command+=" -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	command+=" -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      command+="$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $opt_mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$opt_mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for opt_mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $opt_dlopen; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  dir+="/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_append_quoted args "$file"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libs=
+    libdirs=
+    admincmds=
+
+    for opt in "$nonopt" ${1+"$@"}
+    do
+      if test -d "$opt"; then
+	libdirs+=" $opt"
+
+      elif test -f "$opt"; then
+	if func_lalib_unsafe_p "$opt"; then
+	  libs+=" $opt"
+	else
+	  func_warning "\`$opt' is not a valid libtool archive"
+	fi
+
+      else
+	func_fatal_error "invalid argument \`$opt'"
+      fi
+    done
+
+    if test -n "$libs"; then
+      if test -n "$lt_sysroot"; then
+        sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+        sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+      else
+        sysroot_cmd=
+      fi
+
+      # Remove sysroot references
+      if $opt_dry_run; then
+        for lib in $libs; do
+          echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+        done
+      else
+        tmpdir=`func_mktempdir`
+        for lib in $libs; do
+	  sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+	    > $tmpdir/tmp-la
+	  mv -f $tmpdir/tmp-la $lib
+	done
+        ${RM}r "$tmpdir"
+      fi
+    fi
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || admincmds+="
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      echo "----------------------------------------------------------------------"
+      echo "Libraries have been installed in:"
+      for libdir in $libdirs; do
+	$ECHO "   $libdir"
+      done
+      echo
+      echo "If you ever happen to want to link against installed libraries"
+      echo "in a given directory, LIBDIR, you must either use libtool, and"
+      echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+      echo "flag during linking and do at least one of the following:"
+      if test -n "$shlibpath_var"; then
+	echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+	echo "     during execution"
+      fi
+      if test -n "$runpath_var"; then
+	echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+	echo "     during linking"
+      fi
+      if test -n "$hardcode_libdir_flag_spec"; then
+	libdir=LIBDIR
+	eval flag=\"$hardcode_libdir_flag_spec\"
+
+	$ECHO "   - use the \`$flag' linker flag"
+      fi
+      if test -n "$admincmds"; then
+	$ECHO "   - have your system administrator run these commands:$admincmds"
+      fi
+      if test -f /etc/ld.so.conf; then
+	echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+      fi
+      echo
+
+      echo "See any operating system documentation about shared libraries for"
+      case $host in
+	solaris2.[6789]|solaris2.1[0-9])
+	  echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	  echo "pages."
+	  ;;
+	*)
+	  echo "more information, such as the ld(1) and ld.so(8) manual pages."
+	  ;;
+      esac
+      echo "----------------------------------------------------------------------"
+    fi
+    exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    install_prog+="$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	files+=" $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      install_prog+=" $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      install_shared_prog+=" $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	install_shared_prog+=" -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	staticlibs+=" $file"
+	;;
+
+      *.la)
+	func_resolve_sysroot "$file"
+	file=$func_resolve_sysroot_result
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) current_libdirs+=" $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) future_libdirs+=" $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	dir+="$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && staticlibs+=" $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+      func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+      tool_oldlib=$func_to_tool_file_result
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data.  */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+	    func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+	    $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+          case $host in
+	    *cygwin* | *mingw* | *cegcc* )
+	      # if an import library, we need to obtain dlname
+	      if func_win32_import_lib_p "$dlprefile"; then
+	        func_tr_sh "$dlprefile"
+	        eval "curr_lafile=\$libfile_$func_tr_sh_result"
+	        dlprefile_dlbasename=""
+	        if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+	          # Use subshell, to avoid clobbering current variable values
+	          dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+	          if test -n "$dlprefile_dlname" ; then
+	            func_basename "$dlprefile_dlname"
+	            dlprefile_dlbasename="$func_basename_result"
+	          else
+	            # no lafile. user explicitly requested -dlpreopen <import library>.
+	            $sharedlib_from_linklib_cmd "$dlprefile"
+	            dlprefile_dlbasename=$sharedlib_from_linklib_result
+	          fi
+	        fi
+	        $opt_dry_run || {
+	          if test -n "$dlprefile_dlbasename" ; then
+	            eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+	          else
+	            func_warning "Could not compute DLL name from $name"
+	            eval '$ECHO ": $name " >> "$nlist"'
+	          fi
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+	            $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+	        }
+	      else # not an import lib
+	        $opt_dry_run || {
+	          eval '$ECHO ": $name " >> "$nlist"'
+	          func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	          eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	        }
+	      fi
+	    ;;
+	    *)
+	      $opt_dry_run || {
+	        eval '$ECHO ": $name " >> "$nlist"'
+	        func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+	        eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	      }
+	    ;;
+          esac
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) symtab_cflags+=" $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      func_to_tool_file "$1" func_convert_file_msys_to_w32
+      win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+  $opt_debug
+  sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+  $opt_debug
+  match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+  $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+    $SED '/^Contents of section '"$match_literal"':/{
+      # Place marker at beginning of archive member dllname section
+      s/.*/====MARK====/
+      p
+      d
+    }
+    # These lines can sometimes be longer than 43 characters, but
+    # are always uninteresting
+    /:[	 ]*file format pe[i]\{,1\}-/d
+    /^In archive [^:]*:/d
+    # Ensure marker is printed
+    /^====MARK====/p
+    # Remove all lines with less than 43 characters
+    /^.\{43\}/!d
+    # From remaining lines, remove first 43 characters
+    s/^.\{43\}//' |
+    $SED -n '
+      # Join marker and all lines until next marker into a single line
+      /^====MARK====/ b para
+      H
+      $ b para
+      b
+      :para
+      x
+      s/\n//g
+      # Remove the marker
+      s/^====MARK====//
+      # Remove trailing dots and whitespace
+      s/[\. \t]*$//
+      # Print
+      /./p' |
+    # we now have a list, one entry per line, of the stringified
+    # contents of the appropriate section of all members of the
+    # archive which possess that section. Heuristic: eliminate
+    # all those which have a first or second character that is
+    # a '.' (that is, objdump's representation of an unprintable
+    # character.) This should work for all archives with less than
+    # 0x302f exports -- but will fail for DLLs whose name actually
+    # begins with a literal '.' or a single character followed by
+    # a '.'.
+    #
+    # Of those that remain, print the first one.
+    $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+  test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+  $opt_debug
+  func_to_tool_file "$1" func_convert_file_msys_to_w32
+  func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+  test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+#    $sharedlib_from_linklib_cmd
+# Result is available in the variable
+#    $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+  $opt_debug
+  if func_cygming_gnu_implib_p "$1" ; then
+    # binutils import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+  elif func_cygming_ms_implib_p "$1" ; then
+    # ms-generated import library
+    sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+  else
+    # unknown
+    sharedlib_from_linklib_result=""
+  fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  case \" \$* \" in
+  *\\ --lt-*)
+    for lt_wr_arg
+    do
+      case \$lt_wr_arg in
+      --lt-*) ;;
+      *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+      esac
+      shift
+    done ;;
+  esac
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# fixup the dll searchpath if we need to.
+	#
+	# Fix the DLL searchpath if we need to.  Do this before prepending
+	# to shlibpath, because on Windows, both are PATH and uninstalled
+	# libraries must come first.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_path "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_path "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_path_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  /* Update the DLL searchpath.  EXE_PATH_VALUE ($dllsearchpath) must
+     be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+     because on Windows, both *_VARNAMEs are PATH but uninstalled
+     libraries must come first. */
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+	      $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/  fputs ("\1", f);/p
+g
+D'
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      libtool_args+=" $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  compile_command+=" @OUTPUT@"
+	  finalize_command+=" @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    compile_command+=" @SYMFILE@"
+	    finalize_command+=" @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      dlfiles+=" $arg"
+	    else
+	      dlprefiles+=" $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) deplibs+=" $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      moreargs+=" $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      dlfiles+=" $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    dlprefiles+=" $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  libobjs+=" $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  non_pic_objects+=" $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  non_pic_objects+=" $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  libobjs+=" $pic_object"
+		  non_pic_objects+=" $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) rpath+=" $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) xrpath+=" $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  weak_libs+=" $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  compiler_flags+=" $qarg"
+	  prev=
+	  compile_command+=" $qarg"
+	  finalize_command+=" $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  linker_flags+=" $qarg"
+	  compiler_flags+=" $wl$qarg"
+	  prev=
+	  compile_command+=" $wl$qarg"
+	  finalize_command+=" $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  compile_command+=" $link_static_flag"
+	  finalize_command+=" $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  compile_command+=" $arg"
+	  finalize_command+=" $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname "-L" '' "$arg"
+	if test -z "$func_stripname_result"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	func_resolve_sysroot "$func_stripname_result"
+	dir=$func_resolve_sysroot_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "* | *" $arg "*)
+	  # Will only happen for absolute or sysroot arguments
+	  ;;
+	*)
+	  # Preserve sysroot, but never include relative directories
+	  case $dir in
+	    [\\/]* | [A-Za-z]:[\\/]* | =*) deplibs+=" $arg" ;;
+	    *) deplibs+=" -L$dir" ;;
+	  esac
+	  lib_search_path+=" $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) dllsearchpath+=":$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    deplibs+=" System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	deplibs+=" $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot|--sysroot)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+      |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	compiler_flags+=" $arg"
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) new_inherited_linker_flags+=" $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	=*)
+	  func_stripname '=' '' "$dir"
+	  dir=$lt_sysroot$func_stripname_result
+	  ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) xrpath+=" $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $func_quote_for_eval_result"
+	  compiler_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg+=" $wl$func_quote_for_eval_result"
+	  compiler_flags+=" $wl$func_quote_for_eval_result"
+	  linker_flags+=" $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      # --sysroot=*          for sysroot support
+      # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+      -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        compile_command+=" $arg"
+        finalize_command+=" $arg"
+        compiler_flags+=" $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	objs+=" $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		dlfiles+=" $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      dlprefiles+=" $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    libobjs+=" $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    non_pic_objects+=" $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    libobjs+=" $pic_object"
+	    non_pic_objects+=" $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	deplibs+=" $arg"
+	old_deplibs+=" $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	func_resolve_sysroot "$arg"
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  dlfiles+=" $func_resolve_sysroot_result"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  dlprefiles+=" $func_resolve_sysroot_result"
+	  prev=
+	else
+	  deplibs+=" $func_resolve_sysroot_result"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	compile_command+=" $arg"
+	finalize_command+=" $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      compile_command+=" $arg"
+      finalize_command+=" $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    func_to_tool_file "$output_objdir/"
+    tool_output_objdir=$func_to_tool_file_result
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_preserve_dup_deps ; then
+	case "$libs " in
+	*" $deplib "*) specialdeplibs+=" $deplib" ;;
+	esac
+      fi
+      libs+=" $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) specialdeplibs+=" $pre_post_deps" ;;
+	  esac
+	  pre_post_deps+=" $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  func_resolve_sysroot "$lib"
+	  case $lib in
+	  *.la)	func_source "$func_resolve_sysroot_result" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) deplibs+=" $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+        |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    compiler_flags+=" $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags+=" $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    newlib_search_path+=" $func_resolve_sysroot_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    func_resolve_sysroot "$func_stripname_result"
+	    dir=$func_resolve_sysroot_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) xrpath+=" $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la)
+	  func_resolve_sysroot "$deplib"
+	  lib=$func_resolve_sysroot_result
+	  ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      newdlprefiles+=" $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      newdlfiles+=" $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) new_inherited_linker_flags+=" $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && dlfiles+=" $dlopen"
+	  test -n "$dlpreopen" && dlprefiles+=" $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    convenience+=" $ladir/$objdir/$old_library"
+	    old_convenience+=" $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	if test -n "$old_library" &&
+	   { test "$prefer_static_libs" = yes ||
+	     test "$prefer_static_libs,$installed" = "built,no"; }; then
+	  linklib=$old_library
+	else
+	  for l in $old_library $library_names; do
+	    linklib="$l"
+	  done
+	fi
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    dlprefiles+=" $lib $dependency_libs"
+	  else
+	    newdlfiles+=" $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$lt_sysroot$libdir"
+	    absdir="$lt_sysroot$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    notinst_path+=" $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  case "$host" in
+	    # special handling for platforms with PE-DLLs.
+	    *cygwin* | *mingw* | *cegcc* )
+	      # Linker will automatically link against shared library if both
+	      # static and shared are present.  Therefore, ensure we extract
+	      # symbols from the import library if a shared library is present
+	      # (otherwise, the dlopen module name will be incorrect).  We do
+	      # this by putting the import library name into $newdlprefiles.
+	      # We recover the dlopen module name by 'saving' the la file
+	      # name in a special purpose variable, and (later) extracting the
+	      # dlname from the la file.
+	      if test -n "$dlname"; then
+	        func_tr_sh "$dir/$linklib"
+	        eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+	        newdlprefiles+=" $dir/$linklib"
+	      else
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      fi
+	    ;;
+	    * )
+	      # Prefer using a static library (so that no silly _DYNAMIC symbols
+	      # are required to link).
+	      if test -n "$old_library"; then
+	        newdlprefiles+=" $dir/$old_library"
+	        # Keep a list of preopened convenience libraries to check
+	        # that they are being used correctly in the link pass.
+	        test -z "$libdir" && \
+	          dlpreconveniencelibs+=" $dir/$old_library"
+	      # Otherwise, use the dlname, so that lt_dlopen finds it.
+	      elif test -n "$dlname"; then
+	        newdlprefiles+=" $dir/$dlname"
+	      else
+	        newdlprefiles+=" $dir/$linklib"
+	      fi
+	    ;;
+	  esac
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  newlib_search_path+=" $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         func_resolve_sysroot "$func_stripname_result"
+	         newlib_search_path+=" $func_resolve_sysroot_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs+=" $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) temp_rpath+="$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      notinst_deplibs+=" $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      notinst_deplibs+=" $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath+=" $absdir" ;;
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath+=" $libdir" ;;
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$opt_mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$absdir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      add_dir+=" -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) compile_shlibpath+="$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) finalize_shlibpath+="$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$opt_mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) finalize_shlibpath+="$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    add_dir+=" -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) xrpath+=" $temp_xrpath";;
+		   esac;;
+	      *) temp_deplibs+=" $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  newlib_search_path+=" $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    case $deplib in
+              -L*) func_stripname '-L' '' "$deplib"
+                   func_resolve_sysroot "$func_stripname_result";;
+              *) func_resolve_sysroot "$deplib" ;;
+            esac
+	    if $opt_preserve_dup_deps ; then
+	      case "$tmp_libs " in
+	      *" $func_resolve_sysroot_result "*)
+                specialdeplibs+=" $func_resolve_sysroot_result" ;;
+	      esac
+	    fi
+	    tmp_libs+=" $func_resolve_sysroot_result"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_resolve_sysroot "$deplib"
+	        deplib=$func_resolve_sysroot_result
+	        func_dirname "$deplib" "" "."
+		dir=$func_dirname_result
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      compiler_flags+=" ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      linker_flags+=" -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) lib_search_path+=" $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) tmp_libs+=" $deplib" ;;
+	      esac
+	      ;;
+	    *) tmp_libs+=" $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  tmp_libs+=" $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      objs+="$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  libobjs+=" $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  # correct linux to gnu/linux during the next big refactor
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux) # correct to gnu/linux during the next big refactor
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  verstring+=":${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      libobjs+=" $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$opt_mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       removelist+=" $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	oldlibs+=" $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  func_replace_sysroot "$libdir"
+	  temp_xrpath+=" -R$func_replace_sysroot_result"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) dlfiles+=" $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) dlprefiles+=" $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    deplibs+=" System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      deplibs+=" -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    newdeplibs+=" $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    newdeplibs+=" $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      newdeplibs+=" $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      newdeplibs+=" $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		newdeplibs+=" $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		if test -n "$file_magic_glob"; then
+		  libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+		else
+		  libnameglob=$libname
+		fi
+		test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  if test "$want_nocaseglob" = yes; then
+		    shopt -s nocaseglob
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		    $nocaseglob
+		  else
+		    potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+		  fi
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			newdeplibs+=" $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs+=" $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      newdeplibs+=" $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs+=" $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	# Remove ${wl} instances when linking with ld.
+	# FIXME: should test the right _cmds variable.
+	case $archive_cmds in
+	  *\$LD\ *) wl= ;;
+        esac
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		func_replace_sysroot "$libdir"
+		libdir=$func_replace_sysroot_result
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		dep_rpath+=" $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) perm_rpath+=" $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      rpath+="$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  linknames+=" $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  delfiles+=" $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd1 in $cmds; do
+	      IFS="$save_ifs"
+	      # Take the normal branch if the nm_file_list_spec branch
+	      # doesn't work or if tool conversion is not needed.
+	      case $nm_file_list_spec~$to_tool_file_cmd in
+		*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+		  try_normal_branch=yes
+		  eval cmd=\"$cmd1\"
+		  func_len " $cmd"
+		  len=$func_len_result
+		  ;;
+		*)
+		  try_normal_branch=no
+		  ;;
+	      esac
+	      if test "$try_normal_branch" = yes \
+		 && { test "$len" -lt "$max_cmd_len" \
+		      || test "$max_cmd_len" -le -1; }
+	      then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      elif test -n "$nm_file_list_spec"; then
+		func_basename "$output"
+		output_la=$func_basename_result
+		save_libobjs=$libobjs
+		save_output=$output
+		output=${output_objdir}/${output_la}.nm
+		func_to_tool_file "$output"
+		libobjs=$nm_file_list_spec$func_to_tool_file_result
+		delfiles+=" $output"
+		func_verbose "creating $NM input file list: $output"
+		for obj in $save_libobjs; do
+		  func_to_tool_file "$obj"
+		  $ECHO "$func_to_tool_file_result"
+		done > "$output"
+		eval cmd=\"$cmd1\"
+		func_show_eval "$cmd" 'exit $?'
+		output=$save_output
+		libobjs=$save_libobjs
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    tmp_deplibs+=" $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    generated+=" $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    libobjs+=" $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  linker_flags+=" $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    echo ')' >> $output
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$func_to_tool_file_result
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      func_to_tool_file "$obj"
+	      $ECHO "$func_to_tool_file_result" >> $output
+	    done
+	    delfiles+=" $output"
+	    func_to_tool_file "$output"
+	    output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  objlist+=" $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      delfiles+=" $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$opt_mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      delfiles+=" $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  libobjs+=" $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$opt_mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$opt_mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # If we're not building shared, we need to use non_pic_objs
+      test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      compile_command+=" ${wl}-bind_at_load"
+	      finalize_command+=" ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs+=" -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs+=" $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs+=" $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      compile_command+=" $compile_deplibs"
+      finalize_command+=" $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath+=" $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) dllsearchpath+=":$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath+=":$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs+="$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath+=" $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_perm_rpath+=" $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    rpath+="$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+
+	if test -n "$postlink_cmds"; then
+	  func_to_tool_file "$output"
+	  postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	  func_execute_cmds "$postlink_cmds" 'exit $?'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      if test -n "$postlink_cmds"; then
+	func_to_tool_file "$output_objdir/$outputname"
+	postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+	func_execute_cmds "$postlink_cmds" 'exit $?'
+      fi
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    oldobjs+=" $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	generated+=" $gentop"
+
+	func_extract_archives $gentop $addlibs
+	oldobjs+=" $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  oldobjs+=" $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  generated+=" $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      oldobjs+=" $gentop/$newobj"
+	      ;;
+	    *) oldobjs+=" $obj" ;;
+	    esac
+	  done
+	fi
+	func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+	tool_oldlib=$func_to_tool_file_result
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	elif test -n "$archiver_list_spec"; then
+	  func_verbose "using command file archive linking..."
+	  for obj in $oldobjs
+	  do
+	    func_to_tool_file "$obj"
+	    $ECHO "$func_to_tool_file_result"
+	  done > $output_objdir/$libname.libcmd
+	  func_to_tool_file "$output_objdir/$libname.libcmd"
+	  oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    objlist+=" $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		func_resolve_sysroot "$deplib"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		newdependency_libs+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      -L*)
+		func_stripname -L '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -L$func_replace_sysroot_result"
+		;;
+	      -R*)
+		func_stripname -R '' "$deplib"
+		func_replace_sysroot "$func_stripname_result"
+		newdependency_libs+=" -R$func_replace_sysroot_result"
+		;;
+	      *) newdependency_libs+=" $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlfiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      *) newdlfiles+=" $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlprefiles+=" ${lt_sysroot:+=}$libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlfiles+=" $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlprefiles+=" $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) RM+=" $arg"; rmforce=yes ;;
+      -*) RM+=" $arg" ;;
+      *) files+=" $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	odir="$objdir"
+      else
+	odir="$dir/$objdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$opt_mode" = uninstall && odir="$dir"
+
+      # Remember odir for removal later, being careful to avoid duplicates
+      if test "$opt_mode" = clean; then
+	case " $rmdirs " in
+	  *" $odir "*) ;;
+	  *) rmdirs+=" $odir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    rmfiles+=" $odir/$n"
+	  done
+	  test -n "$old_library" && rmfiles+=" $odir/$old_library"
+
+	  case "$opt_mode" in
+	  clean)
+	    case " $library_names " in
+	    *" $dlname "*) ;;
+	    *) test -n "$dlname" && rmfiles+=" $odir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && rmfiles+=" $odir/$name $odir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    rmfiles+=" $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    rmfiles+=" $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$opt_mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    rmfiles+=" $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      rmfiles+=" $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    rmfiles+=" $odir/$name $odir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      rmfiles+=" $odir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      rmfiles+=" $odir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
+
+# ### BEGIN LIBTOOL TAG CONFIG: CXX
+
+# The linker used to build libraries.
+LD="/usr/bin/ld -m elf_x86_64"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC="g++"
+
+# Is the compiler the GNU compiler?
+with_gcc=yes
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=" -fno-builtin"
+
+# Additional compiler flags for building library objects.
+pic_flag=" -fPIC -DPIC"
+
+# How to pass a linker flag through the compiler.
+wl="-Wl,"
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag="-static"
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec="\${wl}--export-dynamic"
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec="\${wl}--whole-archive\$convenience \${wl}--no-whole-archive"
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object="no"
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \${wl}-soname \$wl\$soname -o \$lib"
+archive_expsym_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \${wl}-soname \$wl\$soname \${wl}-retain-symbols-file \$wl\$export_symbols -o \$lib"
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld="yes"
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec="\${wl}-rpath \${wl}\$libdir"
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=unsupported
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=immediate
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs="/usr/lib/gcc/x86_64-linux-gnu/4.6 /usr/lib/gcc/x86_64-linux-gnu/4.6/../../../x86_64-linux-gnu /usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../lib /lib/x86_64-linux-gnu /lib/../lib /usr/lib/x86_64-linux-gnu /usr/lib/../lib /usr/lib/gcc/x86_64-linux-gnu/4.6/../../.."
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects="/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/4.6/crtbeginS.o"
+postdep_objects="/usr/lib/gcc/x86_64-linux-gnu/4.6/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/4.6/../../../x86_64-linux-gnu/crtn.o"
+predeps=""
+postdeps="-lstdc++ -lm -lgcc_s -lc -lgcc_s"
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path="-L/usr/lib/gcc/x86_64-linux-gnu/4.6 -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../.."
+
+# ### END LIBTOOL TAG CONFIG: CXX
+
+# ### BEGIN LIBTOOL TAG CONFIG: F77
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: F77
+
+# ### BEGIN LIBTOOL TAG CONFIG: FC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=no
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=no
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=no
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=no
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=no
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=no
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=unknown
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=no
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: FC
+
+# ### BEGIN LIBTOOL TAG CONFIG: GO
+
+# The linker used to build libraries.
+LD="/usr/bin/ld -m elf_x86_64"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GO
+
+# ### BEGIN LIBTOOL TAG CONFIG: GCJ
+
+# The linker used to build libraries.
+LD="/usr/bin/ld -m elf_x86_64"
+
+# How to create reloadable object files.
+reload_flag=" -r"
+reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
+
+# Commands used to build an old-style archive.
+old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=""
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=no
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: GCJ
+
+# ### BEGIN LIBTOOL TAG CONFIG: RC
+
+# The linker used to build libraries.
+LD=""
+
+# How to create reloadable object files.
+reload_flag=""
+reload_cmds=""
+
+# Commands used to build an old-style archive.
+old_archive_cmds=""
+
+# A language specific compiler.
+CC=""
+
+# Is the compiler the GNU compiler?
+with_gcc=
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=""
+
+# Additional compiler flags for building library objects.
+pic_flag=""
+
+# How to pass a linker flag through the compiler.
+wl=""
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=""
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o="yes"
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=""
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=""
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=""
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=""
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=""
+
+# Commands used to build a shared archive.
+archive_cmds=""
+archive_expsym_cmds=""
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=""
+module_expsym_cmds=""
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=""
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=""
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=""
+
+# Flag to hardcode $libdir into a binary during linking.
+# This must work even if $libdir does not exist
+hardcode_libdir_flag_spec=""
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=""
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=
+
+# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting ${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=
+
+# The commands to list exported symbols.
+export_symbols_cmds=""
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=""
+
+# Symbols that must always be exported.
+include_expsyms=""
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=""
+
+# Commands necessary for finishing linking programs.
+postlink_cmds=""
+
+# Specify filename containing input files.
+file_list_spec=""
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=
+
+# The directories searched by this compiler when creating a shared library.
+compiler_lib_search_dirs=""
+
+# Dependencies to place before and after the objects being linked to
+# create a shared library.
+predep_objects=""
+postdep_objects=""
+predeps=""
+postdeps=""
+
+# The library search path used internally by the compiler when linking
+# a shared library.
+compiler_lib_search_path=""
+
+# ### END LIBTOOL TAG CONFIG: RC
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtoolize b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtoolize
new file mode 100755
index 0000000..ef723b5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/libtoolize
@@ -0,0 +1,2555 @@
+#! /bin/sh
+# Generated from libtoolize.m4sh by GNU Autoconf 2.68.
+
+# libtoolize (GNU libtool) 2.4.2
+# Written by Gary V. Vaughan <gary@gnu.org>, 2003
+
+# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
+# Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# Libtoolize is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Libtoolize is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with libtoolize; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]...
+#
+# Prepare a package to use libtool.
+#
+#   -c, --copy          copy files rather than symlinking them
+#       --debug         enable verbose shell tracing
+#   -n, --dry-run       print commands rather than running them
+#   -f, --force         replace existing files
+#   -i, --install       copy missing auxiliary files
+#       --ltdl[=DIR]    install libltdl sources [default: libltdl]
+#       --no-warn       don't display warning messages
+#       --nonrecursive  prepare ltdl for non-recursive make
+#   -q, --quiet         work silently
+#       --recursive     prepare ltdl for recursive make
+#       --subproject    prepare ltdl to configure and build independently
+#   -v, --verbose       verbosely report processing
+#       --version       print version information and exit
+#   -h, --help          print short or long help message
+#
+# The following space or comma delimited options can be passed to $progname
+# via the environment variable LIBTOOLIZE_OPTIONS, unknown environment
+# options are ignored:
+#
+#   --debug             enable verbose shell tracing
+#   --no-warn           don't display warning messages
+#   --quiet             work silently
+#   --verbose           verbosely report processing
+#
+# You must `cd' to the top directory of your package before you run
+# `$progname'.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#       host-triplet:	x86_64-unknown-linux-gnu
+#       $progname:	(GNU libtool) 2.4.2
+#       automake:		$automake_version
+#       autoconf:		$autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+: ${TAR=tar}
+
+PROGRAM=libtoolize
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested=""
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   break 2
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="/bin/grep -E"}
+: ${FGREP="/bin/grep -F"}
+: ${GREP="/bin/grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="/bin/sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+    func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+    func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+    # Extract subdirectory from the argument.
+    func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+    if test "X$func_dirname_result" = "X${1}"; then
+      func_dirname_result="${3}"
+    else
+      func_dirname_result="$func_dirname_result${2}"
+    fi
+    func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+    case ${2} in
+      .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+      *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+    esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=${PATH_SEPARATOR-:}
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes.  A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result.  All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+  case $1 in
+  [0-9]* | *[!a-zA-Z0-9_]*)
+    func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+    ;;
+  * )
+    func_tr_sh_result=$1
+    ;;
+  esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $opt_debug
+
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $opt_debug
+
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+	:print
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/
+	p
+	d
+     }
+     /^# .* home page:/b print
+     /^# General help using/b print
+     ' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    $opt_debug
+
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+    my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+    my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+    func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+    func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+    my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+    my_sed_long_arg='1s/^--[^=]*=//'
+
+    func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+    func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+# ltdl can be installed to be self-contained (subproject, the default);
+# or to be configured by a parent project, either with a recursive or
+# nonrecursive automake driven make:
+ltdl_mode=
+
+# Locations for important files:
+ltdldir=
+
+# Parse environment options
+{
+  my_sed_env_opt='1s/^\([^,:; ]*\).*$/\1/;q'
+  my_sed_env_rest='1s/^[^,:; ]*[,:; ]*\(.*\)$/\1/;q'
+
+  while test -n "$LIBTOOLIZE_OPTIONS"; do
+    opt=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_opt"`
+    LIBTOOLIZE_OPTIONS=`echo "$LIBTOOLIZE_OPTIONS" | sed "$my_sed_env_rest"`
+
+    case $opt in
+      --debug|--no-warn|--quiet|--verbose)
+		envopts="${envopts+$envopts }$opt"			  ;;
+      --*)	env_warning="${env_warning+$env_warning
+}unrecognized environment option \`$opt'" 				  ;;
+      *)	func_fatal_help "garbled LIBTOOLIZE_OPTIONS near \`$opt'" ;;
+    esac
+  done
+
+  test -n "$envopts" && {
+    func_quote_for_eval "$envopts"
+    eval set dummy "$func_quote_for_eval_result" ${1+"$@"}
+    shift
+  }
+}
+
+
+
+# Option defaults:
+opt_debug=:
+opt_copy=false
+opt_force=false
+opt_install=false
+opt_dry_run=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+opt_nonrecursive=false
+opt_recursive=false
+opt_standalone=false
+opt_ltdl="false"
+
+
+# Parse options once, thoroughly.  This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+  # this just eases exit handling
+  while test $# -gt 0; do
+    opt="$1"
+    shift
+    case $opt in
+      --debug|-x)	opt_debug='set -x'
+			func_echo "enabling shell trace mode"
+			$opt_debug
+			;;
+      --copy|-c)
+			opt_copy=:
+			;;
+      --force|-f)
+			opt_force=:
+			;;
+      --install|-i)
+			opt_install=:
+			;;
+      --dry-run|--dryrun|-n)
+			opt_dry_run=:
+CP="func_echo_all $CP"
+test -n "$LN_S" && LN_S="func_echo_all $LN_S"
+MKDIR="func_echo_all $MKDIR"
+RM="func_echo_all $RM"
+TAR="func_echo_all $TAR"
+			;;
+      --quiet|--automake|-q)
+			opt_quiet=:
+			;;
+      --verbose|-v)
+			opt_verbose=:
+			;;
+      --no-warning|--no-warn)
+			opt_warning=false
+			;;
+      --nonrecursive|--non-recursive)
+			opt_nonrecursive=:
+			;;
+      --recursive)
+			opt_recursive=:
+			;;
+      --standalone)
+			opt_standalone=:
+			;;
+      --ltdl)
+			optarg="$1"
+			if test $# -gt 0; then
+			    case $optarg in # ((
+			        -*) ;;
+			        *) opt_ltdl="$optarg"; shift ;;
+			    esac
+			fi
+# This is tricky, since we're overloading $opt_ltdl to be the
+# optarg for --ltdl during option processing, but then stashing
+# the (optional) optarg in $ltdldir and reusing $opt_ltdl to
+# indicate that --ltdl was seen during option processing.  Also,
+# be careful that --ltdl=foo --ltdl=bar results in ltdldir=bar:
+case $opt_ltdl in
+          false|:) ;;  # a bare '--ltdl' followed by another option
+  *)       ltdldir=`$ECHO "$optarg" | $SED 's,/*$,,'` ;;
+esac
+opt_ltdl=:
+			;;
+
+      -\?|-h)		func_usage				;;
+      --help)		func_help				;;
+      --version)	func_version				;;
+
+      # Separate optargs to long options:
+      --*=*)
+			func_split_long_opt "$opt"
+			set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      # Separate non-argument short options:
+      -\?*|-h*|-c*|-f*|-i*|-n*|-q*|-v*)
+			func_split_short_opt "$opt"
+			set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+			shift
+			;;
+
+      --)		break					;;
+      -*)		func_fatal_help "unrecognized option \`$opt'" ;;
+      *)		set dummy "$opt" ${1+"$@"};	shift; break  ;;
+    esac
+  done
+
+  # Validate options:
+
+  # show any warnings saved by LIBTOOLIZE_OPTIONS parsing
+  test -n "$env_warning" &&
+    echo "$env_warning" |while read line; do func_warning "$line"; done
+
+  # validate $opt_nonrecursive, $opt_recursive and $opt_standalone
+  if $opt_nonrecursive; then
+    if $opt_recursive || $opt_standalone; then
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    fi
+    ltdl_mode=nonrecursive
+  elif $opt_recursive; then
+    $opt_standalone &&
+      func_error "you can have at most one of --non-recursive, --recursive and --standalone"
+    ltdl_mode=recursive
+  elif $opt_standalone; then
+    ltdl_mode=standalone
+  fi
+
+  # any remaining arguments are an error
+  test $# -gt 0 &&
+    func_fatal_help "unknown additional arguments: \`${1+}'"
+
+
+  # Bail if the options were screwed
+  $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+# func_echo_once msg_var
+# Calls func_echo with the value of MSG_VAR, and then sets MSG_VAR="" so
+# that subsequent calls have no effect.
+func_echo_once ()
+{
+    $opt_debug
+    if test -n "$1"; then
+      eval my_msg=\$$1
+
+      if test -n "$my_msg"; then
+        func_echo "$my_msg"
+        eval $1=""
+      fi
+    fi
+}
+
+
+# func_copy srcfile destfile [msg_var]
+# A wrapper for func_copy_cb that accepts arguments in the same order
+# as the cp(1) shell command.
+func_copy ()
+{
+    $opt_debug
+
+    test -f "$1" || \
+      { func_error "\`$1' not copied:  not a regular file"; return 1; }
+
+    func_dirname_and_basename "$1"
+    my_f1=$func_basename_result
+
+    if test -d "$2"; then
+
+      func_copy_cb "$my_f1" \
+	`$ECHO "$1" | $SED "$dirname"` "$2" "$3"
+
+    else
+
+      # Supporting this would mean changing the timestamp:
+      func_dirname_and_basename "$2"
+      my_tname=$func_basename_result
+      test "X$my_f1" = "X$my_tname" \
+        || func_fatal_error "func_copy() cannot change filename on copy"
+
+      func_copy_cb "$my_f1" \
+        `$ECHO "$1" | $SED "$dirname"` \
+        `$ECHO "$2" | $SED "$dirname"` \
+	"$3"
+
+    fi
+
+    return $copy_return_status # set in func_copy_cb
+}
+
+
+# func_copy_cb filename srcdir destdir [msg_var]
+# If option `--copy' was specified, or soft-linking SRCFILE to DESTFILE fails,
+# then try to copy SRCFILE to DESTFILE (without changing the timestamp if
+# possible).
+func_copy_cb ()
+{
+    $opt_debug
+    my_file="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    copy_return_status=1
+
+    # Libtool is probably misinstalled if this happens:
+    test -f "$my_srcdir/$my_file" ||
+        func_fatal_error "\`$my_file' not found in \`$my_srcdir'"
+
+    case $opt_verbose in
+      false) my_copy_msg="file \`$my_destdir/$my_file'"     ;;
+      *)     my_copy_msg="file from \`$my_srcdir/$my_file'" ;;
+    esac
+    func_mkdir_p `$ECHO "$my_destdir/$my_file" | $SED "$dirname"`
+
+    $RM "$my_destdir/$my_file"
+    if $opt_copy; then
+      if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+           | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1; } \
+	 && touch "$my_destdir/$my_file"; then
+	$opt_quiet || func_echo_once "$my_msg_var"
+	$opt_quiet || func_echo "copying $my_copy_msg"
+	copy_return_status=0
+      fi
+    else
+      if test "$my_file" = "aclocal.m4"; then
+	if { ( cd "$my_srcdir" && $TAR chf - "$my_file" ) 2>/dev/null \
+	     | ( umask 0 && cd "$my_destdir" && $TAR xf - ) >/dev/null 2>&1 ; }
+	then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "copying $my_copy_msg"
+	  copy_return_status=0
+	fi
+      else
+	if $LN_S "$my_srcdir/$my_file" "$my_destdir/$my_file"; then
+	  $opt_quiet || func_echo_once "$my_msg_var"
+	  $opt_quiet || func_echo "linking $my_copy_msg"
+	  copy_return_status=0
+	fi
+      fi
+    fi
+    if test "$copy_return_status" != 0; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      func_error "can not copy \`$my_srcdir/$my_file' to \`$my_destdir/'"
+      exit_status=$EXIT_FAILURE
+    fi
+}
+
+
+# func_copy_some_files srcfile_spec srcdir destdir [msg_var] [cb=func_copy_cb]
+# Call COPY_CB for each regular file in SRCDIR named by the ':' delimited
+# names in SRCFILE_SPEC.  The odd calling convention is needed to allow
+# spaces in file and directory names.
+func_copy_some_files ()
+{
+    $opt_debug
+    my_srcfile_spec="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_copy_cb="${5-func_copy_cb}"
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for my_filename in $my_srcfile_spec; do
+      IFS="$my_save_IFS"
+      if test -f "$my_srcdir/$my_filename"; then
+        if test "X$my_copy_cb" = Xfunc_copy_cb; then
+	  $opt_force || if test -f "$my_destdir/$my_filename"; then
+	    $opt_quiet || func_echo_once "$my_msg_var"
+	    $opt_quiet \
+	      || func_error "\`$my_destdir/$my_filename' exists: use \`--force' to overwrite"
+	    continue
+	  fi
+        fi
+      else
+	func_echo_once "$my_msg_var"
+	func_fatal_error "\`$my_filename' not found in \`$my_srcdir'"
+      fi
+
+      $my_copy_cb "$my_filename" "$my_srcdir" "$my_destdir" "$my_msg_var"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_fixup_Makefile srcfile srcdir destdir
+func_fixup_Makefile ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_fixup_non_subpackage_script="\
+      s,(LIBOBJS),(ltdl_LIBOBJS),g
+      s,(LTLIBOBJS),(ltdl_LTLIBOBJS),g
+      s,libltdl/configure.ac,,
+      s,libltdl/configure,,
+      s,libltdl/aclocal.m4,,
+      s,libltdl/config-h.in,,
+      s,libltdl/Makefile.am,,
+      s,libltdl/Makefile.in,,
+      /^[	 ]*\\\\\$/d"
+    case $my_filename in
+      Makefile.am)
+	my_fixup_non_subpackage_script=`echo "$my_fixup_non_subpackage_script" | \
+		sed 's,libltdl/,,'`
+	my_fixup_inc_paths_script= ;;
+      Makefile.inc)
+	repl=$ltdldir
+	repl_uscore=`$ECHO "$repl" | $SED 's,[/.+-],_,g'`
+	my_fixup_inc_paths_script="\
+	  s,libltdl_,@repl_uscore@_,
+	  s,libltdl/,@repl@/,
+	  s,: libltdl/,: @repl@/,
+	  s, -Ilibltdl , -I@repl@ ,
+	  s,\\\$(libltdl_,\$(@repl_uscore@_,
+	  s,)/libltdl ,)/@repl@ ,
+	  s,@repl_uscore@,${repl_uscore},g
+	  s,@repl@,${repl},g"
+	;;
+    esac
+
+    $RM "$my_destdir/$my_filename" 2>/dev/null
+    $opt_quiet || func_echo "creating file \`$my_destdir/$my_filename'"
+    if $opt_dry_run; then :;
+    else
+      $SED "$my_fixup_non_subpackage_script
+	    $my_fixup_inc_paths_script" \
+	< "$my_srcdir/$my_filename" > "$my_destdir/$my_filename" ||
+	func_fatal_error "cannot create $my_destdir/$my_filename"
+    fi
+}
+
+# func_scan_files
+# Scan configure.(ac|in) and aclocal.m4 (if present) for use of libltdl
+# and libtool.  Possibly running some of these tools if necessary.
+# Libtoolize affects the contents of aclocal.m4, and should be run before
+# aclocal, so we can't use configure --trace which relies on a consistent
+# configure.(ac|in) and aclocal.m4.
+func_scan_files ()
+{
+    $opt_debug
+    # Prefer configure.ac to configure.in
+    test -f configure.ac && configure_ac=configure.ac
+    test -f "$configure_ac" || configure_ac=
+
+    # Set local variables to reflect contents of configure.ac
+    my_sed_scan_configure_ac='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,;
+	/AC_INIT/ {
+	    s,^.*$,seen_autoconf=:,
+	    p
+	}
+	d'
+    test -z "$configure_ac" \
+        || eval `$SED "$my_sed_scan_configure_ac" "$configure_ac"`
+
+    $seen_autoconf || {
+	my_configure_ac=
+	test -n "$configure_ac" && my_configure_ac="$configure_ac: "
+        func_verbose "${my_configure_ac}not using Autoconf"
+
+	# Make sure ltdldir and ltdl_mode have sensible defaults
+        # since we return early here:
+	test -n "$ltdldir" || ltdldir=libltdl
+	test -n "$ltdl_mode" || ltdl_mode=subproject
+
+	return
+    }
+
+    # ---------------------------------------------------- #
+    # Probe macro usage in configure.ac and/or aclocal.m4. #
+    # ---------------------------------------------------- #
+
+    my_sed_traces='s,#.*$,,; s,^dnl .*$,,; s, dnl .*$,,
+        s,^.*AC_REQUIRE(.*$,,; s,^.*m4_require(.*$,,;
+	s,^.*m4_define(.*$,,
+	s,^.*A[CU]_DEFUN(.*$,,; s,^.*m4_defun(.*$,,
+	/AC_CONFIG_AUX_DIR(/ {
+	    s,^.*AC_CONFIG_AUX_DIR([[	 ]*\([^])]*\).*$,ac_auxdir=\1,
+	    p
+        }
+	/AC_CONFIG_MACRO_DIR(/ {
+	    s,^.*AC_CONFIG_MACRO_DIR([[	 ]*\([^])]*\).*$,ac_macrodir=\1,
+	    p
+        }
+	/_LT_CONFIG_LTDL_DIR(/d
+	/LT_CONFIG_LTDL_DIR(/ {
+	    s,^.*LT_CONFIG_LTDL_DIR([[	 ]*\([^])]*\).*$,ac_ltdldir=\1,
+	    p
+	}
+	/\[A[CM]_PROG_LIBTOOL/d
+	/A[CM]_PROG_LIBTOOL/ {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/the.*option into.*LT_INIT.*parameter/d
+	/\[LT_INIT/d
+	/LT_INIT/		 {
+	    s,^.*$,seen_libtool=:,
+	    p
+	}
+	/\[LTDL_INIT/d
+	/LTDL_INIT/          {
+	    s,^.*LTDL_INIT([[	 ]*\([^])]*\).*$,ltdl_options="\1",
+	    s,^.*LTDL_INIT[	 ]*$,seen_ltdl=:,
+	    p
+	}
+	/LT_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_LIB_LTDL/        {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	/AC_WITH_LTDL/       {
+	    s,^.*$,seen_ltdl=:,
+	    p
+	}
+	d'
+    eval `cat aclocal.m4 "$configure_ac" 2>/dev/null | $SED "$my_sed_traces"`
+
+
+    # ----------------- #
+    # Validate ltdldir. #
+    # ----------------- #
+
+    ac_ltdldir=`$ECHO "$ac_ltdldir" | $SED 's,/*$,,'`
+
+    # If $configure_ac contains AC_CONFIG_LTDL_DIR, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ac_ltdldir" in
+      *\$*)
+        func_fatal_error "can not handle variables in LT_CONFIG_LTDL_DIR"
+        ;;
+    esac
+
+    # If neither --ltdl nor LT_CONFIG_LTDL_DIR are specified, default to
+    # `libltdl'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given! (If LT_CONFIG_LTDL_DIR is not specified
+    # we suggest adding it later in this code.)
+    case x$ac_ltdldir,x$ltdldir in
+      x,x)	ltdldir=libltdl		;;
+      x*,x)	ltdldir=$ac_ltdldir	;;
+      x,x*)	ltdldir=$ltdldir	;;
+      *)
+        test x"$ac_ltdldir" = x"$ltdldir" || \
+	    func_fatal_error "--ltdl='$ltdldir' does not match LT_CONFIG_LTDL_DIR($ac_ltdldir)"
+	;;
+    esac
+
+
+    # ------------------- #
+    # Validate ltdl_mode. #
+    # ------------------- #
+
+    test -n "$ltdl_options" && seen_ltdl=:
+
+    # If $configure_ac contains LTDL_INIT, check that its
+    # arguments were not given in terms of a shell variable!
+    case "$ltdl_options" in
+      *\$*)
+        func_fatal_error "can not handle variables in LTDL_INIT"
+        ;;
+    esac
+
+    # Extract mode name from ltdl_options
+    # FIXME: Diagnose multiple conflicting modes in ltdl_options
+    ac_ltdl_mode=
+    case " $ltdl_options " in
+      *" nonrecursive "*)  ac_ltdl_mode=nonrecursive	;;
+      *" recursive "*)     ac_ltdl_mode=recursive	;;
+      *" subproject "*)    ac_ltdl_mode=subproject	;;
+    esac
+
+    # If neither --ltdl nor an LTDL_INIT mode are specified, default to
+    # `subproject'.  If both are specified, they must be the same.  Otherwise,
+    # take the one that is given!
+    case x$ac_ltdl_mode,x$ltdl_mode in
+      x,x)	ltdl_mode=subproject	;;
+      x*,x)	ltdl_mode=$ac_ltdl_mode	;;
+      x,x*)	ltdl_mode=$ltdl_mode	;;
+      *)
+        test x"$ac_ltdl_mode" = x"$ltdl_mode" || \
+	    func_fatal_error "--$ltdl_mode does not match LTDL_INIT($ac_ltdl_mode)"
+	;;
+    esac
+
+    # ---------------- #
+    # Validate auxdir. #
+    # ---------------- #
+
+    if test -n "$ac_auxdir"; then
+      # If $configure_ac contains AC_CONFIG_AUX_DIR, check that it was
+      # not given in terms of a shell variable!
+      case "$ac_auxdir" in
+      *\$*)
+        func_fatal_error "can not handle variables in AC_CONFIG_AUX_DIR"
+        ;;
+      *)
+	auxdir=$ac_auxdir
+	;;
+      esac
+    else
+      # Try to discover auxdir the same way it is discovered by configure.
+      # Note that we default to the current directory.
+      for dir in . .. ../..; do
+        if test -f "$dir/install-sh"; then
+          auxdir=$dir
+          break
+        elif test -f "$dir/install.sh"; then
+          auxdir="$dir"
+          break
+        fi
+      done
+    fi
+
+    # Just use the current directory if all else fails.
+    test -n "$auxdir" || auxdir=.
+
+
+    # ------------------------------ #
+    # Find local m4 macro directory. #
+    # ------------------------------ #
+
+    # Hunt for ACLOCAL_AMFLAGS in `Makefile.am' for a `-I' argument.
+
+    my_sed_aclocal_flags='
+        /^[	 ]*ACLOCAL_[A-Z_]*FLAGS[	 ]*=[	 ]*/ {
+	    s,,,
+	    q
+	}
+	d'
+    if test -f Makefile.am; then
+      my_macrodir_is_next=false
+      for arg in `$SED "$my_sed_aclocal_flags" Makefile.am`; do
+        if $my_macrodir_is_next; then
+          am_macrodir="$arg"
+          break
+        else
+	  case $arg in
+	    -I) my_macrodir_is_next=: ;;
+	    -I*)
+	      am_macrodir=`$ECHO "$arg" | sed 's,^-I,,'`
+	      break
+	      ;;
+	    *) my_macrodir_is_next=false ;;
+	  esac
+        fi
+      done
+    fi
+
+    macrodir="$ac_macrodir"
+    test -z "$macrodir" && macrodir="$am_macrodir"
+
+    if test -n "$am_macrodir" && test -n "$ac_macrodir"; then
+      test "$am_macrodir" = "$ac_macrodir" \
+        || func_fatal_error "AC_CONFIG_MACRO_DIR([$ac_macrodir]) conflicts with ACLOCAL_AMFLAGS=-I $am_macrodir."
+    fi
+}
+
+# func_included_files searchfile
+# Output INCLUDEFILE if SEARCHFILE m4_includes it, else output SEARCHFILE.
+func_included_files ()
+{
+    $opt_debug
+    my_searchfile="$1"
+
+    my_include_regex=
+    my_sed_include='
+        /^m4_include(\[.*\])$/ {
+	    s,^m4_include(\[\(.*\)\])$,\1,
+	    p
+	}
+        d'
+
+    if test -f "$my_searchfile"; then
+      $ECHO "$my_searchfile"
+
+      # Only recurse when we don't care if all the variables we use get
+      # trashed, since they are in global scope.
+      for my_filename in `$SED "$my_sed_include" "$my_searchfile"`; do
+	func_included_files $my_filename
+      done
+    fi
+}
+
+
+# func_serial filename [macro_regex]
+# Output the value of the serial number comment in FILENAME, where the
+# comment line must also match MACRO_REGEX, if given.
+func_serial ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_macro_regex="$2"
+    my_sed_serial='
+	/^# serial [1-9][0-9.]*[	 ]*'"$my_macro_regex"'[	 ]*$/ {
+	    s,^# serial \([1-9][0-9.]*\).*$,\1,
+	    q
+	}
+	d'
+
+    # Search FILENAME and all the files it m4_includes for a serial number
+    # in the file that AC_DEFUNs MACRO_REGEX.
+    my_serial=
+    func_dirname_and_basename "$my_filename"
+    my_filebase=$func_basename_result
+    for my_file in `func_included_files "$my_filename"`; do
+      if test -z "$my_macro_regex" ||
+         test "$my_filename" = aclocal.m4 ||
+         test "X$my_macro_regex" = "X$my_filebase" ||
+         func_grep '^AC_DEFUN(\['"$my_macro_regex" "$my_file"
+      then
+        my_serial=`$SED -e "$my_sed_serial" "$my_file"`
+	break
+      fi
+    done
+
+    # If the file has no serial number, we assume it's ancient.
+    test -n "$my_serial" || my_serial=0
+
+    $ECHO "$my_serial"
+}
+
+
+# func_serial_max serial1 serial2
+# Compare (possibly multi-part, '.' delimited) serial numbers, and
+# return the largest in $func_serial_max_result.  If they are the
+# same, func_serial_max_result will be empty.
+func_serial_max ()
+{
+    $opt_debug
+    my_serial1="$1"
+    my_serial2="$2"
+
+    my_sed_dot='s/\..*$//g'
+    my_sed_rest='s/^[0-9][1-9]*\.*//'
+    my_sed_digits='s/[^0-9.]//g'
+
+    # Incase they turn out to be the same, we'll set it to empty
+    func_serial_max_result=
+
+    test "X$1$2" = X`$ECHO "$1$2" | $SED "$my_sed_digits"` || {
+      func_error "serial numbers \`$1' or \`$2' contain non-digit chars"
+      return
+    }
+
+    while test -n "$my_serial1$my_serial2"; do
+      my_serial1_part=`$ECHO "$my_serial1" | $SED "$my_sed_dot"`
+      my_serial2_part=`$ECHO "$my_serial2" | $SED "$my_sed_dot"`
+
+      test -z "$my_serial1_part$my_serial2_part" \
+        && break
+
+      test -z "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      test -z "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial1_part" -gt "$my_serial2_part" \
+        && { func_serial_max_result="$1"; break; }
+
+      test "$my_serial2_part" -gt "$my_serial1_part" \
+        && { func_serial_max_result="$2"; break; }
+
+      my_serial1=`$ECHO "$my_serial1" | $SED "$my_sed_rest"`
+      my_serial2=`$ECHO "$my_serial2" | $SED "$my_sed_rest"`
+    done
+}
+
+
+# func_serial_update_check srcfile src_serial destfile dest_serial
+# Unless SRC_SERIAL is newer than DEST_SERIAL set $func_serial_update_check
+# to 'false'.
+func_serial_update_check ()
+{
+    $opt_debug
+    my_srcfile="$1"
+    my_src_serial="$2"
+    my_destfile="$3"
+    my_dest_serial="$4"
+    my_update_p=:
+
+    if test -f "$my_destfile"; then
+      test "X$my_src_serial" = "X0" && {
+        func_warning "no serial number on \`$my_srcfile', not copying."
+	return
+      }
+
+      # Determine whether the destination has an older serial.
+      func_serial_max "$my_src_serial" "$my_dest_serial"
+      test "X$my_src_serial" = "X$func_serial_max_result" || my_update_p=false
+
+      test "X$my_src_serial" = "X$func_serial_max_result" \
+        && func_verbose "\`$my_srcfile' is serial $my_src_serial, greater than $my_dest_serial in \`$my_destfile'"
+
+      if test "X$my_dest_serial" = "X$func_serial_max_result"; then
+        func_verbose "\`$my_srcfile' is serial $my_src_serial, less than $my_dest_serial in \`$my_destfile'"
+	$opt_force || if test -n "$ac_macrodir$ac_ltdldir"; then
+           func_error "\`$my_destfile' is newer: use \`--force' to overwrite"
+        fi
+      fi
+    fi
+
+    func_serial_update_check_result="$my_update_p"
+}
+
+
+# func_aclocal_update_check filename
+# Unless serial number of FILENAME is newer than the matching serial number
+# in aclocal.m4, set $func_aclocal_update_check to 'false'.
+func_aclocal_update_check ()
+{
+    $opt_debug
+    my_srcfile="$aclocaldir/$1"
+    my_destfile="aclocal.m4"
+
+    case $need in
+      libtool.m4)
+	my_src_serial=`func_serial "$my_srcfile" LT_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LT_INIT`
+
+	# Strictly, this libtoolize ought not to have to deal with ancient
+	# serial formats, but we accept them here to be complete:
+	test "X$my_src_serial" = "X0" &&
+	  my_src_serial=`func_serial "$my_srcfile" 'A[CM]_PROG_LIBTOOL'`
+	test "X$my_dest_serial" = "X0" &&
+	  my_dest_serial=`func_serial "$my_destfile" 'A[CM]_PROG_LIBTOOL'`
+	;;
+      ltdl.m4)
+	my_src_serial=`func_serial "$my_srcfile" LTDL_INIT`
+	my_dest_serial=`func_serial "$my_destfile" LTDL_INIT`
+	;;
+      *)
+	my_src_serial=`func_serial "$my_srcfile" "$need"`
+	my_dest_serial=`func_serial "$my_destfile" "$need"`
+	;;
+    esac
+
+    func_serial_update_check \
+      "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+
+    func_aclocal_update_check_result="$func_serial_update_check_result"
+}
+
+
+# func_serial_update filename srcdir destdir [msg_var] [macro_re] [old_macro_re]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer serial number, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.  If given, MACRO_REGEX or
+# OLD_MACRO_REGEX must match any text after "# serial N" in both files.
+func_serial_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_msg_var="$4"
+    my_macro_regex="$5"
+    my_old_macro_regex="$6"
+
+    my_serial_update_p=:
+    my_return_status=1
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`func_serial "$my_srcfile" "$my_macro_regex"`
+      my_dest_serial=`func_serial "$my_destfile" "$my_macro_regex"`
+
+      # Strictly, this libtoolize ought not to have to deal with ancient
+      # serial formats, but we accept them here to be complete:
+      test "X$my_src_serial" = "X0" &&
+        my_src_serial=`func_serial "$my_srcfile" "$my_old_macro_regex"`
+
+      test "X$my_dest_serial" = "X0" &&
+        my_dest_serial=`func_serial "$my_destfile" "$my_old_macro_regex"`
+
+      func_serial_update_check \
+        "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_serial_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_serial_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+      my_return_status=$?
+    elif $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      $opt_quiet || func_echo_once "$my_msg_var"
+      $opt_quiet \
+        || func_echo "\`$my_destfile' is already up to date."
+    fi
+
+    # Do this after the copy for hand maintained `aclocal.m4', incase
+    # it has `m4_include([DESTFILE])', so the copy effectively already
+    # updated `aclocal.m4'.
+    my_included_files=`func_included_files aclocal.m4`
+    case `echo " $my_included_files " | $NL2SP` in
+
+      # Skip included files:
+      *" $my_destfile "*) ;;
+
+      # Otherwise compare to aclocal.m4 serial number (func_serial
+      # returns 0 for older macro serial numbers before we provided
+      # serial tags, so the update message will be correctly given
+      # if aclocal.m4 contains an untagged --i.e older-- macro file):
+      *)
+        if test -f aclocal.m4; then
+          func_serial_max \
+              "$my_src_serial" `func_serial aclocal.m4 "$my_macro_regex"`
+          if test "X$my_src_serial" = "X$func_serial_max_result"; then
+              func_echo_once "$my_msg_var"
+	      func_echo "You should add the contents of \`$my_destfile' to \`aclocal.m4'."
+          fi
+        fi
+        ;;
+    esac
+    return $my_return_status
+}
+
+
+# func_keyword_update filename srcdir destdir sed_script [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision according to the serial number extracted by
+# SED_SCRIPT, or DESTFILE does not yet exist, or the user specified
+# `--force' at the command line.
+func_keyword_update ()
+{
+    $opt_debug
+    my_filename="$1"
+    my_srcdir="$2"
+    my_destdir="$3"
+    my_sed_script="$4"
+    my_msg_var="$5"
+
+    my_srcfile="$my_srcdir/$my_filename"
+    my_destfile="$my_destdir/$my_filename"
+
+    my_keyword_update_p=:
+
+    test -f "$my_srcfile" || func_fatal_error "\`$my_srcfile' does not exist."
+
+    if test -f "$my_destfile"; then
+      my_src_serial=`$SED -e "$my_sed_script" "$my_srcfile"`
+      test -z "$my_src_serial" && {
+        func_warning "no serial number in \`$my_srcfile', not copying."
+	return
+      }
+
+      my_dest_serial=`$SED -e "$my_sed_script" "$my_destfile"`
+      test -n "$my_dest_serial" || my_dest_serial=0
+
+      func_serial_update_check \
+         "$my_srcfile" "$my_src_serial" "$my_destfile" "$my_dest_serial"
+      my_keyword_update_p="$func_serial_update_check_result"
+    fi
+
+    if $my_keyword_update_p || $opt_force; then
+      func_copy "$my_srcfile" "$my_destfile" "$my_msg_var"
+    elif $opt_verbose || $opt_force && test "X$my_dest_serial" = "X$my_src_serial"; then
+      func_echo_once "$my_msg_var"
+      func_echo "\`$my_destfile' is already up to date."
+    fi
+}
+
+
+# func_ltmain_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer revision, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_ltmain_update ()
+{
+    $opt_debug
+    my_sed_ltmain='
+	/^package_revision='\''*[0-9][1-9.]*'\''*/ {
+	    s,^package_revision='\''*\([0-9.]*\)'\''*[	 ]*$,\1,
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_ltmain" "$4"
+
+    return $my_return_status
+}
+
+
+# func_config_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_config_update ()
+{
+    $opt_debug
+    my_sed_config='
+	/^timestamp='\''*[0-9][1-9-]*'\''*/ {
+	    s,^timestamp='\''*\([0-9-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_config" "$4"
+
+    return $my_return_status
+}
+
+
+# func_install_update filename srcdir destdir [msg_var]
+# Copy the FILENAME from a SRCDIR to DESTDIR provided that either FILENAME
+# has a newer timestamp, or DESTFILE does not yet exist, or the user
+# specified `--force' at the command line.
+func_install_update ()
+{
+    $opt_debug
+    my_sed_install='
+	/^scriptversion='\''*[0-9][1-9.-]*'\''*/ {
+	    s,[#;].*,,
+	    s,^scriptversion='\''*\([0-9.-]*\)'\''*,\1,
+	    s/-/./g
+	    p
+	}
+	d'
+
+    func_keyword_update "$1" "$2" "$3" "$my_sed_install" "$4"
+
+    return $my_return_status
+}
+
+
+# func_massage_aclocal_DATA [glob_exclude]
+# @aclocal_DATA\@ is substituted as per its value in Makefile.am;
+# this function massages it into a suitable format for func_copy_some_files.
+func_massage_aclocal_DATA ()
+{
+    $opt_debug
+    pkgmacro_files=     # GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgmacro_files from the value used in Makefile.am.
+    for my_filename in m4/argz.m4 m4/libtool.m4 m4/ltdl.m4 m4/ltoptions.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4; do
+      func_dirname_and_basename "$my_filename"
+      my_filename=$func_basename_result
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      pkgmacro_files="$pkgmacro_files:$my_filename"
+    done
+
+    # strip spurious leading `:'
+    pkgmacro_files=`$ECHO "$pkgmacro_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgmacro_subproject
+# Unless --quiet was passed, display a message. Then copy pkgmacro_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgmacro_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$subproject_macrodir/$file" && func_verbose "rm -f '$subproject_macrodir/$file'"
+      rm -f "$subproject_macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test "x$macrodir" != "x$subproject_macrodir"; then
+      pkgmacro_header="putting macros in \`$subproject_macrodir'."
+    elif test -n "$subproject_macrodir"; then
+      pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$subproject_macrodir'."
+    fi
+
+    func_copy_some_files "argz.m4:libtool.m4:ltdl.m4:$pkgmacro_files" \
+      "$aclocaldir" "$subproject_macrodir" pkgmacro_header
+}
+
+
+# func_install_pkgmacro_parent
+# Unless --quiet was passed, or AC_CONFIG_MACRO_DIR was not seen, display
+# a message.  Then update appropriate macros if newer ones are available
+# from the libtool installation tree.
+func_install_pkgmacro_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgmacro_files; do
+      test -f "$macrodir/$file" && func_verbose "rm -f '$macrodir/$file'"
+      rm -f "$macrodir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified a macrodir.
+    $opt_quiet || if test -n "$ac_macrodir"; then
+      my_pkgmacro_header="putting macros in AC_CONFIG_MACRO_DIR, \`$ac_macrodir'."
+    elif test -n "$macrodir"; then
+      my_pkgmacro_header="putting macros in \`$macrodir'."
+    fi
+
+    if $opt_ltdl; then
+      func_serial_update argz.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header argz.m4
+    else
+      func_verbose "Not copying \`$macrodir/argz.m4', libltdl not used."
+    fi
+
+    func_serial_update  libtool.m4 "$aclocaldir" "$macrodir" \
+      my_pkgmacro_header LT_INIT 'A[CM]_PROG_LIBTOOL'
+
+    if $opt_ltdl; then
+      func_serial_update ltdl.m4 "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header 'LTDL_INIT'
+    else
+      func_verbose "Not copying \`$macrodir/ltdl.m4', libltdl not used."
+    fi
+
+    my_save_IFS="$IFS"
+    IFS=:
+    for file in $pkgmacro_files; do
+      IFS="$my_save_IFS"
+      func_serial_update "$file" "$aclocaldir" "$macrodir" \
+        my_pkgmacro_header "$file"
+    done
+    IFS="$my_save_IFS"
+}
+
+
+# func_install_pkgmacro_files
+# Install copies of the libtool and libltdl m4 macros into this package.
+func_install_pkgmacro_files ()
+{
+    $opt_debug
+
+    # argz.m4, libtool.m4 and ltdl.m4 are handled specially:
+    func_massage_aclocal_DATA 'argz.m4|libtool.m4|ltdl.m4'
+
+  # 1. Parent has separate macrodir to subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test -n "$macrodir" && test "x$macrodir" != "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_parent
+      func_install_pkgmacro_subproject
+
+  # 2. Parent shares macrodir with subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$macrodir" = "x$subproject_macrodir"
+    then
+      func_install_pkgmacro_subproject
+
+  # 3. Not a subproject, but macrodir was specified in parent:
+    elif test -n "$macrodir"; then
+      func_install_pkgmacro_parent
+
+  # 4. AC_CONFIG_MACRO_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_MACRO_DIR not defined, not copying libtool macros."
+    fi
+}
+
+
+# func_massage_pkgltdl_files [glob_exclude]
+# @pkgltdl_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgltdl_files ()
+{
+    $opt_debug
+    pkgltdl_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgltdl_files from the value used in Makefile.am
+    for my_filename in libltdl/COPYING.LIB libltdl/README libltdl/Makefile.inc libltdl/Makefile.am libltdl/configure.ac libltdl/aclocal.m4 libltdl/Makefile.in libltdl/config-h.in libltdl/configure libltdl/argz_.h libltdl/argz.c libltdl/loaders/dld_link.c libltdl/loaders/dlopen.c libltdl/loaders/dyld.c libltdl/loaders/load_add_on.c libltdl/loaders/loadlibrary.c libltdl/loaders/shl_load.c libltdl/lt__dirent.c libltdl/lt__strl.c libltdl/libltdl/lt__alloc.h libltdl/libltdl/lt__dirent.h libltdl/libltdl/lt__glibc.h libltdl/libltdl/lt__private.h libltdl/libltdl/lt__strl.h libltdl/libltdl/lt_dlloader.h libltdl/libltdl/lt_error.h libltdl/libltdl/lt_system.h libltdl/libltdl/slist.h libltdl/loaders/preopen.c libltdl/lt__alloc.c libltdl/lt_dlloader.c libltdl/lt_error.c libltdl/ltdl.c libltdl/ltdl.h libltdl/slist.c; do
+
+      # Strip surplus leading 'libltdl/':
+      my_filename=`expr "X$my_filename" : 'Xlibltdl/\(.*\)'`
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgltdl_files: in
+        *:$my_filename:*) ;;
+	*) pkgltdl_files="$pkgltdl_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgltdl_files=`$ECHO "$pkgltdl_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgltdl_files
+# Install copies of the libltdl files into this package.  Any auxiliary
+# or m4 macro files needed in the libltdl tree will also be copied by
+# func_install_pkgconfig_files and func_install_pkgmacro_files resp.
+func_install_pkgltdl_files ()
+{
+    $opt_debug
+    $opt_ltdl || return
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgltdl_files; do
+      test -f "$ltdldir/$file" && func_verbose "rm -f '$ltdldir/$file'"
+      rm -f "$ltdldir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified `--ltdl'.
+    $opt_quiet || if test -n "$ac_ltdldir"; then
+      pkgltdl_header="putting libltdl files in LT_CONFIG_LTDL_DIR, \`$ac_ltdldir'."
+    elif test -n "$ltdldir"; then
+      pkgltdl_header="putting libltdl files in \`$ltdldir'."
+    fi
+
+    # These files are handled specially, depending on ltdl_mode:
+    if test "x$ltdl_mode" = "xsubproject"; then
+      func_massage_pkgltdl_files 'Makefile.inc'
+    else
+      func_massage_pkgltdl_files 'Makefile.am|Makefile.in*|aclocal.m4|config*'
+    fi
+
+    func_copy_some_files "$pkgltdl_files" \
+      "$pkgltdldir/libltdl" "$ltdldir" pkgltdl_header
+
+    # For recursive ltdl modes, copy a suitable Makefile.{am,inc}:
+    case $ltdl_mode in
+      recursive)
+        func_fixup_Makefile "Makefile.am" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+      nonrecursive)
+        func_fixup_Makefile "Makefile.inc" "$pkgltdldir/libltdl" "$ltdldir"
+        ;;
+    esac
+}
+
+
+# func_massage_pkgconfig_files [glob_exclude]
+# @pkgconfig_files\@ is substituted as per its value in Makefile.am; this
+# function massages it into a suitable format for func_copy_some_files.
+func_massage_pkgconfig_files ()
+{
+    $opt_debug
+    pkgconfig_files=	# GLOBAL VAR
+
+    my_glob_exclude="$1"
+
+    # Massage a value for pkgconfig_files from the value used in Makefile.am
+    for my_filename in config/compile config/config.guess config/config.sub config/depcomp config/install-sh config/missing config/ltmain.sh; do
+
+      # ignore excluded filenames
+      if test -n "$my_glob_exclude"; then
+	my_cont=false
+	eval 'case $my_filename in '$my_glob_exclude') my_cont=: ;; esac'
+	$my_cont && continue
+      fi
+
+      # ignore duplicates
+      case :$pkgconfig_files: in
+        *:$my_filename:*) ;;
+	*) pkgconfig_files="$pkgconfig_files:$my_filename" ;;
+      esac
+    done
+
+    # strip spurious leading `:'
+    pkgconfig_files=`$ECHO "$pkgconfig_files" | $SED 's,^:*,,'`
+}
+
+
+# func_install_pkgconfig_subproject
+# Unless --quiet was passed, display a message. Then copy pkgconfig_files
+# from libtool installation tree to subproject libltdl tree.
+func_install_pkgconfig_subproject ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$subproject_auxdir/$file" && func_verbose "rm -f '$subproject_auxdir/$file'"
+      rm -f "$subproject_auxdir/$file"
+    done
+
+    # Copy all the files from installed libltdl to this project, if the
+    # user specified an auxdir.
+    $opt_quiet || if test "x$ac_auxdir" = "x$subproject_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$subproject_auxdir'."
+    elif test -n "$auxdir"; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    func_copy_some_files "$pkgconfig_files" \
+      "$pkgdatadir" "$ltdldir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_parent
+# Unless --quiet was passed, or AC_CONFIG_AUX_DIR was not seen, display a
+# message.  Then update appropriate auxiliary files if newer ones are
+# available from the libtool installation tree.
+func_install_pkgconfig_parent ()
+{
+    $opt_debug
+
+    # Remove any lingering files that my have been installed by some
+    # previous libtoolize release:
+    $opt_force && for file in $all_pkgconfig_files; do
+      test -f "$auxdir/$file" && func_verbose "rm -f '$auxdir/$file'"
+      rm -f "$auxdir/$file"
+    done
+
+    if test -n "$ac_auxdir"; then
+      pkgconfig_header="putting auxiliary files in AC_CONFIG_AUX_DIR, \`$ac_auxdir'."
+    elif test -n "$auxdir" || test "x$ltdldir" = "x."; then
+      pkgconfig_header="putting auxiliary files in \`$auxdir'."
+    fi
+
+    if $opt_install; then
+      func_config_update config.guess \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_config_update config.sub \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+      func_install_update install-sh \
+        "$pkgdatadir/config" "$auxdir" pkgconfig_header
+    fi
+    func_ltmain_update ltmain.sh \
+      "$pkgdatadir/config" "$auxdir" pkgconfig_header
+}
+
+
+# func_install_pkgconfig_files
+# Install copies of the auxiliary files into this package according to
+# the whether libltdl is included as a subproject, and whether the parent
+# shares the AC_CONFIG_AUX_DIR setting.
+func_install_pkgconfig_files ()
+{
+    $opt_debug
+    func_massage_pkgconfig_files
+
+  # 1. Parent shares auxdir with subproject ltdl:
+    if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" &&
+       test "x$ac_auxdir" = "x$subproject_auxdir"
+    then
+      func_install_pkgconfig_subproject
+
+  # 2. Parent has separate auxdir to subproject ltdl:
+    elif $opt_ltdl && test "x$ltdl_mode" = "xsubproject"
+       # && test "x$auxdir" != "x$subproject_auxdir" is implied
+    then
+      if $seen_autoconf; then
+	func_install_pkgconfig_parent
+      fi
+      func_install_pkgconfig_subproject
+
+  # 3. Not subproject, but AC_CONFIG_AUX_DIR was used in parent:
+    elif test -n "$ac_auxdir" || test "x$auxdir" = "x."; then
+      func_install_pkgconfig_parent
+
+  # 4. AC_CONFIG_AUX_DIR was not specified:
+    else
+      func_verbose "AC_CONFIG_AUX_DIR not defined, not copying libtool auxiliary files."
+    fi
+}
+
+
+# func_nonemptydir_p dirvar
+# DIRVAR is the name of a variable to evaluate.  Unless DIRVAR names
+# a directory that exists and is non-empty abort with a diagnostic.
+func_nonemptydir_p ()
+{
+    $opt_debug
+    my_dirvar="$1"
+    my_dir=`eval echo "\\\$$my_dirvar"`
+
+    # Is it a directory at all?
+    test -d "$my_dir" \
+      || func_fatal_error "\$$my_dirvar is not a directory: \`$my_dir'"
+
+    # check that the directories contents can be ls'ed
+    test -n "`{ cd $my_dir && ls; } 2>/dev/null`" \
+        || func_fatal_error "can not list files: \`$my_dir'"
+}
+
+
+# func_check_macros
+# Sanity check macros from aclocal.m4 against installed versions.
+func_check_macros ()
+{
+    $opt_debug
+    $opt_quiet && return
+    $seen_autoconf || return
+
+    ac_config_macro_dir_advised=false
+
+    if test -n "$ac_macrodir$ltdldir" && test -z "$macrodir"; then
+      my_ac_config_macro_srcdir="$aclocaldir"
+      if $opt_ltdl && test "$macrodir" != "$subproject_macrodir"; then
+	my_ac_config_macro_srcdir="$subproject_macrodir"
+      fi
+
+      my_needed="libtool.m4 ltoptions.m4 ltversion.m4 ltsugar.m4 lt~obsolete.m4"
+      $opt_ltdl && my_needed="$my_needed argz.m4 ltdl.m4"
+
+      if test -f "aclocal.m4"; then
+	for need in $my_needed; do
+	  func_aclocal_update_check $need
+	  $func_aclocal_update_check_result && my_missing="$my_missing $need"
+	done
+      else
+        my_missing="$my_needed"
+      fi
+
+      if test -n "$my_missing"; then
+        func_echo "You should add the contents of the following files to \`aclocal.m4':"
+        for need in $my_missing; do
+	  func_echo "  \`$my_ac_config_macro_srcdir/$need'"
+        done
+
+        if test "$my_ac_config_macro_srcdir" != "$aclocaldir"; then
+          func_echo "or else add \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' to $configure_ac."
+	  ac_config_macro_dir_advised=:
+        fi
+      fi
+    fi
+
+    ## ---------------------------------------------------------- ##
+    ## Since we return early here when --no-warn was given:       ##
+    ## DO NOT PUT ANYTHING BUT UPGRADE ADVICE MESSAGES BELOW HERE ##
+    ## ---------------------------------------------------------- ##
+
+    $opt_warning || return
+
+    $seen_libtool ||
+      func_echo "Remember to add \`LT_INIT' to $configure_ac."
+
+    # Suggest using LTDL_INIT if appropriate:
+    $opt_ltdl && if test x$seen_ltdl != x:; then
+      case $ltdl_mode in
+	subproject) ltdl_init_args=""               ;;
+	*)          ltdl_init_args="([$ltdl_mode])" ;;
+      esac
+      func_echo "Remember to add \`LTDL_INIT$ltdl_init_args' to $configure_ac."
+    fi
+
+    if $opt_ltdl; then
+      # Remind the user to call LT_CONFIG_LTDL_DIR:
+      test -n "$ac_ltdldir" ||
+        func_echo "Remember to add \`LT_CONFIG_LTDL_DIR([$ltdldir])' to \`$configure_ac'."
+
+      # For subproject mode, offer some suggestions for avoiding duplicate
+      # files in a project that uses libltdl:
+      if test "x$ltdl_mode" = "xsubproject"; then
+        test "$subproject_auxdir" = "$auxdir" ||
+          func_echo "Consider using \`AC_CONFIG_AUX_DIR([$subproject_auxdir])' in $configure_ac."
+        $ac_config_macro_dir_advised || test "$subproject_macrodir" = "$macrodir" ||
+          func_echo "Consider using \`AC_CONFIG_MACRO_DIR([$subproject_macrodir])' in $configure_ac."
+	ac_config_macro_dir_advised=:
+      fi
+    fi
+
+    # Suggest modern idioms for storing autoconf macros:
+    $ac_config_macro_dir_advised || if test -z "$ac_macrodir" || test x"$macrodir" = x.; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([m4])' to $configure_ac and"
+      func_echo "rerunning $progname, to keep the correct libtool macros in-tree."
+      ac_config_macro_dir_advised=:
+
+    elif test -z "$ac_macrodir$ltdldir"; then
+      func_echo "Consider adding \`AC_CONFIG_MACRO_DIR([$macrodir])' to $configure_ac,"
+      func_echo "and rerunning $progname and aclocal."
+      ac_config_macro_dir_advised=:
+    fi
+
+    if test -z "$am_macrodir$macrodir"; then
+      func_echo "Consider adding \`-I m4' to ACLOCAL_AMFLAGS in Makefile.am."
+
+    elif test -z "$am_macrodir"; then
+      if $opt_ltdl && test "x$ltdl_mode" = "xsubproject" && test "$subproject_macrodir" != "$macrodir"; then
+	func_echo "Consider adding \`-I $subproject_macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      else
+        func_echo "Consider adding \`-I $macrodir' to ACLOCAL_AMFLAGS in Makefile.am."
+      fi
+    fi
+
+    # Don't trace for this, we're just checking the user didn't invoke it
+    # directly from configure.ac.
+    $SED 's,dnl .*$,,; s,# .*$,,' "$configure_ac" | grep AC_PROG_RANLIB >/dev/null &&
+      func_echo "\`AC_PROG_RANLIB' is rendered obsolete by \`LT_INIT'"
+
+    # FIXME: Ensure ltmain.sh, libtool.m4 and ltdl.m4 are from the same release
+}
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+{
+  # Lists of all files libtoolize has ever installed.  These are removed
+  # before installing the latest files when --force was passed to help
+  # ensure a clean upgrade.
+  # Do not remove config.guess nor config.sub, we don't install them
+  # without --install, and the project may not be using Automake.
+  all_pkgconfig_files="ltmain.sh"
+  all_pkgmacro_files="argz.m4 libtool.m4 ltdl.m4 ltoptions.m4 ltsugar.m4 ltversion.in ltversion.m4 lt~obsolete.m4"
+  all_pkgltdl_files="COPYING.LIB Makefile Makefile.in Makefile.inc Makefile.am README acinclude.m4 aclocal.m4 argz_.h argz.c config.h.in config-h.in configure configure.ac configure.in libltdl/lt__alloc.h libltdl/lt__dirent.h libltdl/lt__glibc.h libltdl/lt__private.h libltdl/lt__strl.h libltdl/lt_dlloader.h libltdl/lt_error.h libltdl/lt_system.h libltdl/slist.h loaders/dld_link.c loaders/dlopen.c loaders/dyld.c loaders/load_add_on.c loaders/loadlibrary.c loaders/preopen.c loaders/shl_load.c lt__alloc.c lt__dirent.c lt__strl.c lt_dlloader.c lt_error.c ltdl.c ltdl.h slist.c"
+
+  # Locations for important files:
+  prefix=/
+  datadir=//share
+  pkgdatadir=//share/libtool
+  pkgltdldir=//share/libtool
+  aclocaldir=//share/aclocal
+  auxdir=
+  macrodir=
+  configure_ac=configure.in
+
+  seen_autoconf=false
+  seen_libtool=false
+  seen_ltdl=false
+
+  # test EBCDIC or ASCII
+  case `echo X|tr X '\101'` in
+   A) # ASCII based system
+      # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+    SP2NL='tr \040 \012'
+    NL2SP='tr \015\012 \040\040'
+    ;;
+   *) # EBCDIC based system
+    SP2NL='tr \100 \n'
+    NL2SP='tr \r\n \100\100'
+    ;;
+  esac
+
+  # Allow the user to override the master libtoolize repository:
+  if test -n "$_lt_pkgdatadir"; then
+    pkgltdldir="$_lt_pkgdatadir"
+    pkgdatadir="$_lt_pkgdatadir/libltdl"
+    aclocaldir="$_lt_pkgdatadir/libltdl/m4"
+  fi
+  func_nonemptydir_p pkgltdldir
+  func_nonemptydir_p pkgdatadir
+  func_nonemptydir_p aclocaldir
+
+  func_scan_files
+
+  case $ltdldir in
+  .) ltdlprefix= ;;
+  *) ltdlprefix=$ltdldir/ ;;
+  esac
+  subproject_auxdir=${ltdlprefix}config
+  subproject_macrodir=${ltdlprefix}m4
+
+  # :::BE CAREFUL HERE:::
+  # func_check_macros needs to check whether --ltdl was specified when
+  # LTDL_INIT was not seen, so we can't just use one variable for both
+  # conditions, or that check will be impossible.   No need to clutter the
+  # rest of the code with '$opt_ltdl || $seen_ltdl' though, because we CAN
+  # safely set opt_ltdl to true if LTDL_INIT was seen:
+  $seen_ltdl && opt_ltdl=:
+
+  func_install_pkgconfig_files
+  func_install_pkgmacro_files
+  func_install_pkgltdl_files
+
+  func_check_macros
+}
+
+exit $exit_status
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/m4 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/m4
new file mode 100755
index 0000000..6b57310
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/m4
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/make b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/make
new file mode 100755
index 0000000..5f25959
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/bin/make
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.a b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.a
new file mode 100644
index 0000000..f6a46f6
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.a
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.la b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.la
new file mode 100755
index 0000000..df1c342
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.la
@@ -0,0 +1,41 @@
+# libltdl.la - a libtool library file
+# Generated by libtool (GNU libtool) 2.4.2
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='libltdl.so.7'
+
+# Names of this library.
+library_names='libltdl.so.7.3.0 libltdl.so.7 libltdl.so'
+
+# The name of the static archive.
+old_library='libltdl.a'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags=''
+
+# Libraries that this one depends upon.
+dependency_libs=' -ldl'
+
+# Names of additional weak libraries provided by this library
+weak_library_names=''
+
+# Version information for libltdl.
+current=10
+age=3
+revision=0
+
+# Is this an already installed library?
+installed=yes
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=no
+
+# Files to dlopen/dlpreopen
+dlopen=''
+dlpreopen=''
+
+# Directory that this library needs to be installed in:
+libdir='/x86_64-unknown-linux-gnu/lib'
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so
new file mode 120000
index 0000000..2fac5cb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so
@@ -0,0 +1 @@
+libltdl.so.7.3.0
\ No newline at end of file
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7
new file mode 120000
index 0000000..2fac5cb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7
@@ -0,0 +1 @@
+libltdl.so.7.3.0
\ No newline at end of file
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7.3.0 b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7.3.0
new file mode 100755
index 0000000..ef8375a
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/host/x86_64-unknown-linux-gnu/lib/libltdl.so.7.3.0
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.tar.gz
new file mode 100644
index 0000000..2bacf53
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.url
new file mode 100644
index 0000000..9be2ca5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/autoconf/autoconf-2.68.tar.gz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.version
new file mode 100644
index 0000000..264f2ce
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/autoconf/autoconf.version
@@ -0,0 +1 @@
+2.68
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.tar.gz
new file mode 100644
index 0000000..5c4e8c4
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.url
new file mode 100644
index 0000000..f2b1411
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/automake/automake-1.14.1.tar.gz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.version
new file mode 100644
index 0000000..63e799c
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/automake/automake.version
@@ -0,0 +1 @@
+1.14.1
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/build b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/build
new file mode 100755
index 0000000..ecf4609
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/build
@@ -0,0 +1,119 @@
+#!/bin/sh
+
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a convenience script for building, for the current
+#      build host system, the minimal, core set of GNU autotools on
+#      which other projects' build systems depend.
+#
+
+#
+# usage
+#
+# Display program usage.
+#
+usage() {
+    name=`basename ${0}`
+
+    echo "Usage: ${name} [ options ] [ -- <package> ... ]"
+
+    if [ $1 -ne 0 ]; then
+        echo "Try '${name} -h' for more information."
+    fi
+
+    if [ $1 -ne 1 ]; then
+        echo ""
+        echo "  -h, --help       Print this help, then exit."
+        echo ""
+    fi
+
+    exit $1
+}
+
+# Parse out any command line options
+
+while [ ${#} -gt 0 ]; do
+    if [ ${1} == "-h" ] || [ ${1} == "--help" ]; then
+        usage 0
+
+    elif [ ${1} == "--" ]; then
+        shift 1
+        break
+
+    else
+        usage 1
+
+    fi
+done
+
+# Determine what packages to build
+
+if [ ${#} -gt 0 ]; then
+    SUBDIRS="${*}"
+
+else
+    SUBDIRS="`cat packages`"
+
+fi
+
+stem=tools/packages
+abs_srcdir=`pwd`
+abs_top_srcdir=`echo ${abs_srcdir} | sed -e s,/${stem},,g`
+
+abs_top_hostdir="${abs_top_srcdir}/tools/host"
+
+# Figure out what sort of build host we are running on, stripping off
+# any trailing version number information typically included on Darwin
+# / Mac OS X.
+
+host=`../../autoconf/config.guess | sed -e 's,[[:digit:].]*$,,g'`
+
+echo "================================================================================"
+echo "Building GNU autotools for ${host}..."
+
+for subdir in ${SUBDIRS}; do
+  echo "--------------------------------------------------------------------------------"
+  version=`cat ${subdir}/${subdir}.version`
+  package="${subdir}-${version}"
+
+  cd ${subdir} || exit ${?}
+
+  if [ ! -d "${package}" ]; then
+      echo "Expanding ${subdir}..."
+      tar -xf ${subdir}.tar.?z || exit ${?}
+  fi
+
+  echo "Setting up ${subdir}..."
+  mkdir -p build/${host} || exit ${?}
+  cd build/${host} || exit ${?}
+
+  echo "Configuring ${package}..."
+  PATH=${abs_top_hostdir}/bin:${abs_top_hostdir}/${host}:${PATH} ${abs_srcdir}/${subdir}/${package}/configure --prefix=/ --exec-prefix=/${host} || exit ${?}
+  cd ../.. || exit ${?}
+
+  echo "Building ${package}..."
+  make -C build/${host} all || exit ${?}
+
+  echo "Installing ${package}..."
+  make -C build/${host} DESTDIR=${abs_top_hostdir} install || exit ${?}
+  echo "Done."
+  cd ${abs_srcdir} || exit ${?}
+done
+
+echo "================================================================================"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/clean b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/clean
new file mode 100755
index 0000000..9849365
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/clean
@@ -0,0 +1,104 @@
+#!/bin/sh
+
+#
+#    Copyright 2014-2016 Nest Labs Inc. All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+
+#
+#    Description:
+#      This file is a convenience script for cleaning up, for the
+#      current build host system, the intermediate build results for
+#      the minimal, core set of GNU autotools on which other projects'
+#      build systems depend.
+#
+
+#
+# usage
+#
+# Display program usage.
+#
+usage() {
+    name=`basename ${0}`
+
+    echo "Usage: ${name} [ options ] [ -- <package> ... ]"
+
+    if [ $1 -ne 0 ]; then
+        echo "Try '${name} -h' for more information."
+    fi
+
+    if [ $1 -ne 1 ]; then
+        echo ""
+        echo "  -h, --help       Print this help, then exit."
+        echo ""
+    fi
+
+    exit $1
+}
+
+# Parse out any command line options
+
+while [ ${#} -gt 0 ]; do
+    if [ ${1} == "-h" ] || [ ${1} == "--help" ]; then
+        usage 0
+
+    elif [ ${1} == "--" ]; then
+        shift 1
+        break
+
+    else
+        usage 1
+
+    fi
+done
+
+# Determine what packages to clean
+
+if [ ${#} -gt 0 ]; then
+    SUBDIRS="${*}"
+
+else
+    SUBDIRS="`cat packages`"
+
+fi
+
+stem=tools/packages
+abs_srcdir=`pwd`
+abs_top_srcdir=`echo ${abs_srcdir} | sed -e s,/${stem},,g`
+
+abs_top_hostdir="${abs_top_srcdir}/tools/host"
+
+# Figure out what sort of build host we are running on, stripping off
+# any trailing version number information typically included on Darwin
+# / Mac OS X.
+
+host=`../../autoconf/config.guess | sed -e 's,[[:digit:].]*$,,g'`
+
+echo "================================================================================"
+echo "Cleaning GNU autotools for ${host}..."
+
+for subdir in ${SUBDIRS}; do
+  echo "--------------------------------------------------------------------------------"
+  version=`cat ${subdir}/${subdir}.version`
+  package="${subdir}-${version}"
+
+  echo "Cleaning ${subdir}..."
+  rm -r -f ${subdir}/build/${host}
+  rm -r -f ${subdir}/${package}
+
+  echo "Done."
+  cd ${abs_srcdir} || exit ${?}
+done
+
+echo "================================================================================"
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.tar.xz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.tar.xz
new file mode 100644
index 0000000..c980a09
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.tar.xz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.url
new file mode 100644
index 0000000..de881be
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/coreutils/coreutils-8.21.tar.xz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.version
new file mode 100644
index 0000000..671d705
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/coreutils/coreutils.version
@@ -0,0 +1 @@
+8.21
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.tar.gz
new file mode 100644
index 0000000..4b31ba0
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.url
new file mode 100644
index 0000000..b3bb3c5
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/libtool/libtool-2.4.2.tar.gz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.version
new file mode 100644
index 0000000..8e8299d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/libtool/libtool.version
@@ -0,0 +1 @@
+2.4.2
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.tar.gz
new file mode 100644
index 0000000..e667bd1
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.url
new file mode 100644
index 0000000..0b1282d
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/m4/m4-1.14.17.tar.gz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.version
new file mode 100644
index 0000000..04e0d3f
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/m4/m4.version
@@ -0,0 +1 @@
+1.4.17
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.tar.gz b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.tar.gz
new file mode 100644
index 0000000..adb50bb
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.tar.gz
Binary files differ
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.url b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.url
new file mode 100644
index 0000000..729ec98
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.url
@@ -0,0 +1 @@
+ftp://ftp.gnu.org/pub/gnu/make/make-3.82.tar.gz
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.version b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.version
new file mode 100644
index 0000000..818a025
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/make/make.version
@@ -0,0 +1 @@
+3.82
diff --git a/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/packages b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/packages
new file mode 100644
index 0000000..28d8733
--- /dev/null
+++ b/nl-unit-test/third_party/nlbuild-autotools/repo/tools/packages/packages
@@ -0,0 +1,6 @@
+m4
+autoconf
+automake
+libtool
+make
+coreutils